repo
stringclasses
856 values
pull_number
int64
3
127k
instance_id
stringlengths
12
58
issue_numbers
sequencelengths
1
5
base_commit
stringlengths
40
40
patch
stringlengths
67
1.54M
test_patch
stringlengths
0
107M
problem_statement
stringlengths
3
307k
hints_text
stringlengths
0
908k
created_at
timestamp[s]
readthedocs/readthedocs.org
3,501
readthedocs__readthedocs.org-3501
[ "1788" ]
76635e64188e94a9fa7c92828d7c91fa916fef23
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -21,8 +21,7 @@ from readthedocs.core.utils import broadcast from readthedocs.projects.constants import ( - BITBUCKET_REGEXS, BITBUCKET_URL, GITHUB_REGEXS, GITHUB_URL, GITLAB_REGEXS, - GITLAB_URL, PRIVACY_CHOICES, PRIVATE) + BITBUCKET_URL, GITHUB_URL, GITLAB_URL, PRIVACY_CHOICES, PRIVATE) from readthedocs.projects.models import APIProject, Project from .constants import ( @@ -30,6 +29,9 @@ NON_REPOSITORY_VERSIONS, STABLE, TAG, VERSION_TYPES) from .managers import VersionManager from .querysets import BuildQuerySet, RelatedBuildQuerySet, VersionQuerySet +from .utils import ( + get_bitbucket_username_repo, get_github_username_repo, + get_gitlab_username_repo) from .version_slug import VersionSlugField DEFAULT_VERSION_PRIVACY_LEVEL = getattr( @@ -277,12 +279,8 @@ def get_github_url( elif action == 'edit': action_string = 'edit' - for regex in GITHUB_REGEXS: - match = regex.search(repo_url) - if match: - user, repo = match.groups() - break - else: + user, repo = get_github_username_repo(repo_url) + if not user and not repo: return '' repo = repo.rstrip('/') @@ -315,12 +313,8 @@ def get_gitlab_url( elif action == 'edit': action_string = 'edit' - for regex in GITLAB_REGEXS: - match = regex.search(repo_url) - if match: - user, repo = match.groups() - break - else: + user, repo = get_gitlab_username_repo(repo_url) + if not user and not repo: return '' repo = repo.rstrip('/') @@ -341,12 +335,8 @@ def get_bitbucket_url(self, docroot, filename, source_suffix='.rst'): if not docroot: return '' - for regex in BITBUCKET_REGEXS: - match = regex.search(repo_url) - if match: - user, repo = match.groups() - break - else: + user, repo = get_bitbucket_username_repo(repo_url) + if not user and not repo: return '' repo = repo.rstrip('/') diff --git a/readthedocs/builds/utils.py b/readthedocs/builds/utils.py --- a/readthedocs/builds/utils.py +++ b/readthedocs/builds/utils.py @@ -4,32 +4,13 @@ from __future__ import ( absolute_import, division, print_function, unicode_literals) -import re - -GH_REGEXS = [ - re.compile('github.com/(.+)/(.+)(?:\.git){1}'), - re.compile('github.com/(.+)/(.+)'), - re.compile('github.com:(.+)/(.+).git'), -] - -BB_REGEXS = [ - re.compile('bitbucket.org/(.+)/(.+)/'), - re.compile('bitbucket.org/(.+)/(.+)'), - re.compile('bitbucket.org:(.+)/(.+)\.git'), -] - -# TODO: I think this can be different than `gitlab.com` -# self.adapter.provider_base_url -GL_REGEXS = [ - re.compile('gitlab.com/(.+)/(.+)(?:\.git){1}'), - re.compile('gitlab.com/(.+)/(.+)'), - re.compile('gitlab.com:(.+)/(.+)\.git'), -] +from readthedocs.projects.constants import ( + BITBUCKET_REGEXS, GITHUB_REGEXS, GITLAB_REGEXS) def get_github_username_repo(url): if 'github' in url: - for regex in GH_REGEXS: + for regex in GITHUB_REGEXS: match = regex.search(url) if match: return match.groups() @@ -38,7 +19,7 @@ def get_github_username_repo(url): def get_bitbucket_username_repo(url=None): if 'bitbucket' in url: - for regex in BB_REGEXS: + for regex in BITBUCKET_REGEXS: match = regex.search(url) if match: return match.groups() @@ -47,7 +28,7 @@ def get_bitbucket_username_repo(url=None): def get_gitlab_username_repo(url=None): if 'gitlab' in url: - for regex in GL_REGEXS: + for regex in GITLAB_REGEXS: match = regex.search(url) if match: return match.groups() diff --git a/readthedocs/projects/constants.py b/readthedocs/projects/constants.py --- a/readthedocs/projects/constants.py +++ b/readthedocs/projects/constants.py @@ -292,19 +292,20 @@ PROJECT_SLUG_REGEX = '(?:[-\w]+)' GITHUB_REGEXS = [ - re.compile('github.com/(.+)/(.+)(?:\.git){1}'), + re.compile('github.com/(.+)/(.+)(?:\.git){1}$'), re.compile('github.com/(.+)/(.+)'), - re.compile('github.com:(.+)/(.+).git'), + re.compile('github.com:(.+)/(.+)\.git$'), ] BITBUCKET_REGEXS = [ - re.compile('bitbucket.org/(.+)/(.+).git'), + re.compile('@bitbucket.org/(.+)/(.+)\.git$'), re.compile('bitbucket.org/(.+)/(.+)/'), re.compile('bitbucket.org/(.+)/(.+)'), + re.compile('bitbucket.org:(.+)/(.+)\.git$'), ] GITLAB_REGEXS = [ - re.compile('gitlab.com/(.+)/(.+)(?:\.git){1}'), + re.compile('gitlab.com/(.+)/(.+)(?:\.git){1}$'), re.compile('gitlab.com/(.+)/(.+)'), - re.compile('gitlab.com:(.+)/(.+).git'), + re.compile('gitlab.com:(.+)/(.+)\.git$'), ] GITHUB_URL = ( 'https://github.com/{user}/{repo}/'
diff --git a/readthedocs/rtd_tests/tests/test_repo_parsing.py b/readthedocs/rtd_tests/tests/test_repo_parsing.py --- a/readthedocs/rtd_tests/tests/test_repo_parsing.py +++ b/readthedocs/rtd_tests/tests/test_repo_parsing.py @@ -22,9 +22,28 @@ def test_github(self): self.pip.repo = 'https://github.com/user/repo/' self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo/blob/master/docs/file.rst') + self.pip.repo = 'https://github.com/user/repo.github.io' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo.github.io/blob/master/docs/file.rst') + + self.pip.repo = 'https://github.com/user/repo.github.io/' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo.github.io/blob/master/docs/file.rst') + self.pip.repo = 'https://github.com/user/repo.git' self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo/blob/master/docs/file.rst') + self.pip.repo = 'https://github.com/user/repo.github.io.git' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo.github.io/blob/master/docs/file.rst') + + self.pip.repo = 'https://github.com/user/repo.git.git' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo.git/blob/master/docs/file.rst') + + def test_github_ssh(self): + self.pip.repo = '[email protected]:user/repo.git' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo/blob/master/docs/file.rst') + + self.pip.repo = '[email protected]:user/repo.github.io.git' + self.assertEqual(self.version.get_github_url(docroot='/docs/', filename='file'), 'https://github.com/user/repo.github.io/blob/master/docs/file.rst') + def test_gitlab(self): self.pip.repo = 'https://gitlab.com/user/repo' self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo/blob/master/foo/bar/file.rst') @@ -32,9 +51,28 @@ def test_gitlab(self): self.pip.repo = 'https://gitlab.com/user/repo/' self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo/blob/master/foo/bar/file.rst') + self.pip.repo = 'https://gitlab.com/user/repo.gitlab.io' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo.gitlab.io/blob/master/foo/bar/file.rst') + + self.pip.repo = 'https://gitlab.com/user/repo.gitlab.io/' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo.gitlab.io/blob/master/foo/bar/file.rst') + self.pip.repo = 'https://gitlab.com/user/repo.git' self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo/blob/master/foo/bar/file.rst') + self.pip.repo = 'https://gitlab.com/user/repo.gitlab.io.git' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo.gitlab.io/blob/master/foo/bar/file.rst') + + self.pip.repo = 'https://gitlab.com/user/repo.git.git' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo.git/blob/master/foo/bar/file.rst') + + def test_gitlab_ssh(self): + self.pip.repo = '[email protected]:user/repo.git' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo/blob/master/foo/bar/file.rst') + + self.pip.repo = '[email protected]:user/repo.gitlab.io.git' + self.assertEqual(self.version.get_gitlab_url(docroot='/foo/bar/', filename='file'), 'https://gitlab.com/user/repo.gitlab.io/blob/master/foo/bar/file.rst') + def test_bitbucket(self): self.pip.repo = 'https://bitbucket.org/user/repo' self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo/src/master/foo/bar/file.rst') @@ -42,6 +80,31 @@ def test_bitbucket(self): self.pip.repo = 'https://bitbucket.org/user/repo/' self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo/src/master/foo/bar/file.rst') + self.pip.repo = 'https://bitbucket.org/user/repo.gitbucket.io' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.gitbucket.io/src/master/foo/bar/file.rst') + + self.pip.repo = 'https://bitbucket.org/user/repo.gitbucket.io/' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.gitbucket.io/src/master/foo/bar/file.rst') + self.pip.repo = 'https://bitbucket.org/user/repo.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.git/src/master/foo/bar/file.rst') + + self.pip.repo = 'https://bitbucket.org/user/repo.gitbucket.io.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.gitbucket.io.git/src/master/foo/bar/file.rst') + + self.pip.repo = 'https://bitbucket.org/user/repo.git.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.git.git/src/master/foo/bar/file.rst') + + def test_bitbucket_https(self): + self.pip.repo = 'https://[email protected]/user/repo.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo/src/master/foo/bar/file.rst') + + self.pip.repo = 'https://[email protected]/user/repo.gitbucket.io.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.gitbucket.io/src/master/foo/bar/file.rst') + + def test_bitbucket_ssh(self): + self.pip.repo = '[email protected]:user/repo.git' self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo/src/master/foo/bar/file.rst') + self.pip.repo = '[email protected]:user/repo.gitbucket.io.git' + self.assertEqual(self.version.get_bitbucket_url(docroot='/foo/bar/', filename='file'), 'https://bitbucket.org/user/repo.gitbucket.io/src/master/foo/bar/file.rst')
GitHub edit link is aggressively stripped Hi I'm trying to override the value of `github_repo` which is used to create the 'edit in git hub' link on the doco site. In my case this value isn't the default (which appears to be that of the RTD project name). I've set `html_context` in my `conf.py` to override `github_repo` however I see in the build logs that RTD seems to extend this and blow away my changes: ``` python # my config html_context = { 'github_repo': "esp.github.io", } ... later in the same script ... ########################################################################### # auto-created readthedocs.org specific configuration # ########################################################################### # # The following code was added during an automated build on readthedocs.org # It is auto created and injected for every build. The result is based on the # conf.py.tmpl file found in the readthedocs.org codebase: # https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl # ... later in the same script ... #Add project information to the template context. context = { 'using_theme': using_rtd_theme, 'html_theme': html_theme, 'current_version': "latest", 'MEDIA_URL': "https://media.readthedocs.org/", 'PRODUCTION_DOMAIN': "readthedocs.org", 'versions': [ ("latest", "/en/latest/"), ], 'downloads': [ ("pdf", "//readthedocs.org/projects/esp/downloads/pdf/latest/"), ("htmlzip", "//readthedocs.org/projects/esp/downloads/htmlzip/latest/"), ("epub", "//readthedocs.org/projects/esp/downloads/epub/latest/"), ], 'slug': 'esp', 'name': u'Evented State Processor (ESP)', 'rtd_language': u'en', 'canonical_url': 'http://esp.readthedocs.org/en/latest/', 'analytics_code': '', 'single_version': False, 'conf_py_path': '/./', 'api_host': 'https://readthedocs.org/', 'github_user': 'esp', 'github_repo': 'esp', 'github_version': 'master', 'display_github': True, 'bitbucket_user': 'None', 'bitbucket_repo': 'None', 'bitbucket_version': 'master', 'display_bitbucket': False, 'READTHEDOCS': True, 'using_theme': (html_theme == "default"), 'new_theme': (html_theme == "sphinx_rtd_theme"), 'source_suffix': SUFFIX, 'user_analytics_code': '', 'global_analytics_code': 'UA-17997319-1', 'commit': '98303330', } if 'html_context' in globals(): html_context.update(context) else: html_context = context ``` I need to override this value as I'm using RTD to host a single doco site for 2 projects ([1](https://github.com/esp/esp-js),[2](https://github.com/esp/esp-net)). I need `github_repo` to be `esp.github.io` so it correctly points to the [doco src](https://github.com/esp/esp.github.io). I think the fix to in rtfd's [conf.py](https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl#L152-L155) may be something like this (assuming it's safe to replace html_context): ``` python if 'html_context' in globals(): context.update(html_context) html_context = context ``` Possibly related to #743 Appreciate any help or suggestions on how to fix the link this Keith
The github repo that we link to is the github repo that is set up with the project. I'm curious why this needs to be changed. If your source repo isn't correct, can't you change it on the project? Hi Again, sorry for the very late reply. > The github repo that we link to is the github repo that is set up with the project. I'm curious why this needs to be changed. If your source repo isn't correct, can't you change it on the project? I don't know if this is working for me. Both the repository url and project homepage (under the RTFD admin) is set to my [documentation repository](https://github.com/esp/esp.github.io) on github, I'd expect the link to point to that. However when I created the RTFD project I used a different name, simple 'esp'. The reason my RTFD project name is different for my repository is because my RTFD site is really for my organisation https://github.com/esp, not that organisations sub repositories. In my case all the sub repos are just implementations of a similar pattern that can be discussed in one documentation site. It appears that that the 'edit in git hub' link is actually inferred from the RTFD project name, not admin settings that point to the doco source repo. I hope this makes sense :) It seems what might be happening here is that the `.github.io` is being removed from you repository name? The github URL is generated using your repo path, which looks correct. Seems this is a bug rather than a project misconfiguration. Thanks for coming back. Not sure what I'll do on my end for now but at least the issues is capture for now. I was able to reproduce this, the issue is on the regex that checks the repo url, the `.github` part is the problem, while a patch is provided you can use this url form `https://github.com/{user}/{user}.github.io.git`. Note the final `.git`.
2018-01-11T00:11:28
readthedocs/readthedocs.org
3,504
readthedocs__readthedocs.org-3504
[ "2357" ]
a79534bc331aa0d1bbc64e9869c8618f931acc54
diff --git a/readthedocs/restapi/views/footer_views.py b/readthedocs/restapi/views/footer_views.py --- a/readthedocs/restapi/views/footer_views.py +++ b/readthedocs/restapi/views/footer_views.py @@ -43,7 +43,7 @@ def get_version_compare_data(project, base_version=None): } if highest_version_obj: ret_val['url'] = highest_version_obj.get_absolute_url() - ret_val['slug'] = (highest_version_obj.slug,) + ret_val['slug'] = highest_version_obj.slug if base_version and base_version.slug != LATEST: try: base_version_comparable = parse_version_failsafe(
diff --git a/readthedocs/rtd_tests/tests/test_footer.py b/readthedocs/rtd_tests/tests/test_footer.py --- a/readthedocs/rtd_tests/tests/test_footer.py +++ b/readthedocs/rtd_tests/tests/test_footer.py @@ -104,7 +104,7 @@ def test_highest_version_from_stable(self): valid_data = { 'project': 'Version 0.8.1 of Pip (19)', 'url': '/dashboard/pip/version/0.8.1/', - 'slug': ('0.8.1',), + 'slug': '0.8.1', 'version': '0.8.1', 'is_highest': True, } @@ -116,7 +116,7 @@ def test_highest_version_from_lower(self): valid_data = { 'project': 'Version 0.8.1 of Pip (19)', 'url': '/dashboard/pip/version/0.8.1/', - 'slug': ('0.8.1',), + 'slug': '0.8.1', 'version': '0.8.1', 'is_highest': False, } @@ -129,7 +129,7 @@ def test_highest_version_from_latest(self): valid_data = { 'project': 'Version 0.8.1 of Pip (19)', 'url': '/dashboard/pip/version/0.8.1/', - 'slug': ('0.8.1',), + 'slug': '0.8.1', 'version': '0.8.1', 'is_highest': True, } @@ -157,7 +157,7 @@ def test_highest_version_over_branches(self): valid_data = { 'project': 'Version 1.0.0 of Pip ({})'.format(version.pk), 'url': '/dashboard/pip/version/1.0.0/', - 'slug': ('1.0.0',), + 'slug': '1.0.0', 'version': '1.0.0', 'is_highest': False, } @@ -171,7 +171,7 @@ def test_highest_version_without_tags(self): valid_data = { 'project': 'Version 0.8.1 of Pip (19)', 'url': '/dashboard/pip/version/0.8.1/', - 'slug': ('0.8.1',), + 'slug': '0.8.1', 'version': '0.8.1', 'is_highest': True, } @@ -182,7 +182,7 @@ def test_highest_version_without_tags(self): valid_data = { 'project': 'Version 0.8.1 of Pip (19)', 'url': '/dashboard/pip/version/0.8.1/', - 'slug': ('0.8.1',), + 'slug': '0.8.1', 'version': '0.8.1', 'is_highest': False, } @@ -199,7 +199,7 @@ def test_highest_version_without_tags(self): valid_data = { 'project': 'Version 2.0.0 of Pip ({})'.format(version.pk), 'url': '/dashboard/pip/version/2.0.0/', - 'slug': ('2.0.0',), + 'slug': '2.0.0', 'version': '2.0.0', 'is_highest': False, }
Old version banner uses a version number format present nowhere else ## Details - Project URL: https://github.com/dfhack/dfhack/ - Build URL: http://dfhack.readthedocs.io/en/v0.40.24-r5/ and http://dfhack.readthedocs.io/en/0.42.06-r1/ - Read the Docs username: lethosor ## Expected result The banner on documentation pages for old versions of our project should refer to the newest version using the same version number format present used elsewhere - in this case, "0.43.03-r1" ## Actual result The banner refers to "0.43.3-post1", which isn't used anywhere else, as far as I can tell. ## Additional Information It would be great if this banner could use the version numbers that our project uses instead of the ones with "post1" at the end. I realize the scheme we use is a bit unconventional, but it's tied to another project which uses the "0.43.03" format that we don't really have control over. I made a tentative patch at https://github.com/lethosor/readthedocs.org/commit/7d84130471885905a3f663324af602b7be1f7f64, although I haven't tested it enough to be confident that it'll work for other projects. In particular, I'm not sure in what situations (if any) `slugs` could contain multiple items.
I think this is not a bug in RTD, but how the `packaging` module works: https://pypi.python.org/pypi/packaging ``` >>> from packaging.version import Version >>> v = '0.43.03-r1' >>> Version(v) <Version('0.43.3.post1')> >>> ``` This is explained at https://www.python.org/dev/peps/pep-0440/#post-release-spelling Sorry for not responding earlier - I just got a notification for this about 10 minutes ago. Anyway, I think you're correct about where that version number is coming from, but it's inconsistent with how everything else (that I can see) is displayed. For instance, on http://dfhack.readthedocs.io/en/stable/, clicking on the version selector in the lower right corner ("v: stable") gives a list of choices such as "0.43.03-r1", "0.43.05-r1", etc., which link to pages with those version numbers in the URLs as well (e.g. http://dfhack.readthedocs.io/en/0.43.05-r1/). Yeah, I understand your point. So, I think RTD should decide one way or another but not both.
2018-01-11T18:15:52
readthedocs/readthedocs.org
3,525
readthedocs__readthedocs.org-3525
[ "1917" ]
5030ec45b6f0560770fb7b742a3e668022201f59
diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py --- a/readthedocs/doc_builder/backends/mkdocs.py +++ b/readthedocs/doc_builder/backends/mkdocs.py @@ -111,11 +111,16 @@ def append_conf(self, **__): '%scss/readthedocs-doc-embed.css' % static_url, ]) - docs_path = os.path.join(self.root_path, docs_dir) + # The docs path is relative to the location + # of the mkdocs configuration file. + docs_path = os.path.join( + os.path.dirname(self.yaml_file), + docs_dir + ) # RTD javascript writing rtd_data = self.generate_rtd_data( - docs_dir=docs_dir, + docs_dir=os.path.relpath(docs_path, self.root_path), mkdocs_config=user_config ) with open(os.path.join(docs_path, 'readthedocs-data.js'), 'w') as f:
diff --git a/readthedocs/rtd_tests/tests/test_doc_builder.py b/readthedocs/rtd_tests/tests/test_doc_builder.py --- a/readthedocs/rtd_tests/tests/test_doc_builder.py +++ b/readthedocs/rtd_tests/tests/test_doc_builder.py @@ -335,3 +335,36 @@ def test_dont_override_theme(self, checkout_path, run): config['theme_dir'], 'not-readthedocs' ) + + @patch('readthedocs.doc_builder.backends.mkdocs.BaseMkdocs.generate_rtd_data') + @patch('readthedocs.doc_builder.base.BaseBuilder.run') + @patch('readthedocs.projects.models.Project.checkout_path') + def test_write_js_data_docs_dir(self, checkout_path, run, generate_rtd_data): + tmpdir = tempfile.mkdtemp() + os.mkdir(os.path.join(tmpdir, 'docs')) + yaml_file = os.path.join(tmpdir, 'mkdocs.yml') + yaml.safe_dump( + { + 'site_name': 'mkdocs', + 'docs_dir': 'docs', + }, + open(yaml_file, 'w') + ) + checkout_path.return_value = tmpdir + generate_rtd_data.return_value = '' + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=None, + ) + self.searchbuilder = MkdocsHTML( + build_env=self.build_env, + python_env=python_env, + ) + self.searchbuilder.append_conf() + + generate_rtd_data.assert_called_with( + docs_dir='docs', + mkdocs_config=mock.ANY + )
Flyout links incorrect on Mkdocs projects GitHub links in the sidebar footer contain an extraneous path. For example, on http://tracks.readthedocs.org/en/latest/upgrading/ the "View" link points to https://github.com/TracksApp/tracks/blob/master/home/docs/checkouts/readthedocs.org/user_builds/tracks/checkouts/latest/doc/Upgrading.md.
#1480 was closed even though the root cause seems to be the same. Yet, the flyout links are still incorrect. I was trying to fix this and believe I found the cause, I was able to get a working version for a MkDocs based project. More info here: https://github.com/rtfd/readthedocs.org/commit/98b5d34316917e0bdb26404c6f3816108ce7f42e @agjohnson any thoughts on my comment in https://github.com/rtfd/readthedocs.org/commit/98b5d34316917e0bdb26404c6f3816108ce7f42e I have tested the change locally using the directions here: https://docs.readthedocs.org/en/latest/install.html#installing-read-the-docs and it works for a MkDocs and Sphinx based projects. @ericholscher do you have any thoughts on this? If I have a fix that is tested, so I can create the PR. Want to get this fixed as we just went live with our ReadTheDocs integration. @sahilTakiar Definitely send the PR over! The same here. Steps to reproduce: 1. Go to [this page](http://docs.drupalvm.com/en/latest/). 2. Click _Read the Docs_ in the bottom, then View or Edit. The links are like: ``` https://github.com/geerlingguy/drupal-vm/blob/master/home/docs/checkouts/readthedocs.org/user_builds/drupal-vm/checkouts/latest/docs/index.md https://github.com/geerlingguy/drupal-vm/edit/master/home/docs/checkouts/readthedocs.org/user_builds/drupal-vm/checkouts/latest/docs/index.md ``` which aren't valid. Same here: http://mathquill.readthedocs.io/en/latest/ #2066 was merged hours ago and thus this should now be closed I guess. Thanks @marcelstoer. I just rebuilt and the issue persists so I don't believe this should be closed until the change is deployed. Just deployed this, so it should hopefully be fixed. It appears to be. Thanks @sahilTakiar and @ericholscher! It works for the OP's project at http://tracks.readthedocs.org but it doesn't work for any other project listed here in the comments: http://docs.drupalvm.com, http://mathquill.readthedocs.io and http://nodemcu.readthedocs.io (mine) still have this odd ".../docs/checkouts/readthedocs.org/..." URLs. What's the difference between them? I just built mine again to be sure. same for me. The view and edit URLs have the same '.../docs/checkouts/readthedocs.org/...' in them I thought maybe it was related to having ```edit_uri``` set in ```mckdocs.yml```, but removing it and rebuilding seems to make no difference. @marcelstoer doesn't even have the set in their mkdcos.yml. - https://psraw.readthedocs.io/en/latest/ - https://psmsgraph.readthedocs.io/en/latest/ - https://autodocumentsexample.readthedocs.io/en/latest/ `edit_uri` is a feature that [appeared in mkdocs 0.16](http://www.mkdocs.org/about/release-notes/#version-016-2016-11-04) and that would help in solving that I think. But it seems RTFD is still using mkdocs 0.15 according to my [build logs](https://readthedocs.org/projects/phpspreadsheet/builds/5380219/). Is an upgrade to mkdocs 0.16 planed ? Or is it something that could be contributed by someone relatively easily ?
2018-01-17T04:03:00
readthedocs/readthedocs.org
3,543
readthedocs__readthedocs.org-3543
[ "3493" ]
b242b414d8cc6924c93088aef32b0fdd6263575c
diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -828,6 +828,16 @@ def remove_orphan_symlinks(): os.unlink(orphan_domain_path) [email protected](queue='web') +def broadcast_remove_orphan_symlinks(): + """ + Broadcast the task ``remove_orphan_symlinks`` to all our web servers. + + This task is executed by CELERY BEAT. + """ + broadcast(type='web', task=remove_orphan_symlinks, args=[]) + + @app.task(queue='web') def symlink_subproject(project_pk): project = Project.objects.get(pk=project_pk) diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -247,7 +247,7 @@ def USE_PROMOS(self): # noqa CELERYBEAT_SCHEDULE = { # Ran every hour on minute 30 'hourly-remove-orphan-symlinks': { - 'task': 'readthedocs.projects.tasks.remove_orphan_symlinks', + 'task': 'readthedocs.projects.tasks.broadcast_remove_orphan_symlinks', 'schedule': crontab(minute=30), 'options': {'queue': 'web'}, },
diff --git a/readthedocs/rtd_tests/tests/test_project_symlinks.py b/readthedocs/rtd_tests/tests/test_project_symlinks.py --- a/readthedocs/rtd_tests/tests/test_project_symlinks.py +++ b/readthedocs/rtd_tests/tests/test_project_symlinks.py @@ -14,7 +14,7 @@ from readthedocs.builds.models import Version from readthedocs.projects.models import Project, Domain -from readthedocs.projects.tasks import symlink_project, remove_orphan_symlinks +from readthedocs.projects.tasks import broadcast_remove_orphan_symlinks, remove_orphan_symlinks, symlink_project from readthedocs.core.symlink import PublicSymlink, PrivateSymlink @@ -238,6 +238,16 @@ def test_symlink_remove_orphan_symlinks(self): self.assertFilesystem(filesystem) + def test_broadcast_remove_orphan_symlinks(self): + """Broadcast orphan symlinks is called with the proper attributes.""" + with mock.patch('readthedocs.projects.tasks.broadcast') as broadcast: + broadcast_remove_orphan_symlinks() + + broadcast.assert_called_with( + type='web', + task=remove_orphan_symlinks, + args=[], + ) def test_symlink_cname_dont_link_missing_domains(self): """Domains should be relinked after deletion"""
Remove old symlink when Domain is changed I started getting random 404 at https://docs.pytest.org/en/latest/ and we found they have confifured `doc.pytest.org` as a their domain so, the new web server was failing on serving the files. At some point, they should have configured `docs.pytest.org` under RTD and then they changed it to just `doc`, otherwise RTD should know nothing about the `docs` domain in the old web servers. To fix this, once the Domain is changed we need to perform a cleanup of the old symlinks in the web servers. This can be done by a task that catches all the various symlink leftovers we have, and sync’s them. * https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/views/private.py#L678-L701 * https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/forms.py#L546 > As a reference, they are mixing `doc` and `docs` in their repo: https://github.com/pytest-dev/pytest/blob/0a15edd5732578f6cc40e28770f0fc87dfa5ced9/doc/en/announce/release-3.2.0.rst
We should do two things: * Change the logic of the Domain Form or View to trigger a delete symlink task * Have a background task that runs every hour that checks for invalid symlinks, then deletes them if found The change should be immediate when users act, but it's nice to have something in the background doing cleanup. It also catches a number of cases the Views & Forms don't catch (eg. if we edit it in the Admin, CLI, ORM `update()` etc.). It looks like our symlink domain logic already accounts for deletions, we just aren't calling it: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/tasks.py#L775-L801 I'm not really sure how the `Domain` works under RTD, but I see there is a `count` attribute (Number of times this domain has been hit) in the model that won't have more sense if we _modify/edit_ the domain object. Taking a look at the code, it doesn't seem to be tought to allow the _edit_ operation. It looks like more _delete_ and _create_ a new one. In this case, the `delete` method will remove the old symlinks properly since it's calling the proper task. https://github.com/rtfd/readthedocs.org/blob/1af444173481df9a7b16e015fc6b12abe8155e7e/readthedocs/projects/models.py#L1014-L1018 Regarding the background task, what's the proper way to do this? How do we search for orphan domains? 1. list all the symlinks under `(Public/Private)Symlink.CNAME_ROOT` 1. use each of them to query the database and check the Domain object exists 1. if it doesn't exist, remove the symlink Sounds good? To modify the Domain Form or view, we will need to modify the `symlink_domain` task to accept a `domain_name` instead of a `domain_pk` since the domain name that we want to remove doesn't exist anymore in the database but in the filesystem. So, at the moment we don't have a way to remove a "domain name" that it's not a "domain object" in the database. I see two possible solutions: 1. do not allow _edit_ at all and force the user to delete and create a new one 1. broadcast the periodic task immediately when the domain is modified (kind of overkill) I think that 1) makes more sense after reading the code.
2018-01-23T20:25:49
readthedocs/readthedocs.org
3,544
readthedocs__readthedocs.org-3544
[ "872" ]
1af444173481df9a7b16e015fc6b12abe8155e7e
diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -205,6 +205,13 @@ class BuildViewSetBase(UserSelectViewSet): admin_serializer_class = BuildAdminSerializer model = Build + def get_queryset(self): + query = super(BuildViewSetBase, self).get_queryset() + commit = self.request.query_params.get('commit', None) + if commit is not None: + query = query.filter(commit=commit) + return query + class BuildViewSet(SettingsOverrideObject):
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -152,6 +152,22 @@ def test_make_build_commands(self): self.assertEqual(build['commands'][0]['run_time'], 5) self.assertEqual(build['commands'][0]['description'], 'foo') + def test_build_filter_by_commit(self): + """ + Create a build with commit + Should return the list of builds according to the + commit query params + """ + get(Build, project_id=1, version_id=1, builder='foo', commit='test') + get(Build, project_id=2, version_id=1, builder='foo', commit='other') + client = APIClient() + api_user = get(User, staff=False, password='test') + client.force_authenticate(user=api_user) + resp = client.get('/api/v2/build/', {'commit': 'test'}, format='json') + self.assertEqual(resp.status_code, 200) + build = resp.data + self.assertEqual(len(build['results']), 1) + class APITests(TestCase): fixtures = ['eric.json', 'test_data.json']
Provide an API to query the build status by commit In order to do a check before release that everything is ok, I would like to have a way to obtain the current build status for a given commit. So, in addition to: ``` GET /api/v1/build/{id}/ ``` also have this: ``` GET /api/v1/commit/{sha1}/ ``` or ``` GET /api/v1/{user}/{project}/commit/{sha1}/ ``` Is this possible right now?
Neat. We currently don't record the commit hash for builds. This is a good idea, and would allow that kind of API. Any news on this? Any way I can help? New (simpler?) idea: Is it possible to insert the commit hash in the sphinx html? If it can be done already, then I just need to download the the page and check that it matches the version my latest hash. This should be part of newly built documentation with our theme already, we pass the commit hash in with via the API. We are storing the commit hash on the build object now, but we don't expose the commit as a first class endpoint in the api. You can find similar API endpoints here: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/restapi/views/core_views.py Adding a `build_status` endpoint to that file with a test would be a good place to start. This looks like it might have some overlap with #2251 which also adds commit ids to builds. @agjohnson This "good first issue" is quite old. Can we improve the solution to fixing it, to encourage contribution? Same with https://github.com/rtfd/readthedocs.org/issues/1045. Would it be possible to get a more up to date information on this issue? I'm interested to have a look. Thanks. > Would it be possible to get a more up to date information on this issue? I'm interested to have a look. Thanks for your interest. I think the original aspect of the issue is still valid: "build an API endpoint that receive a project and a commit hash and return the status (failed/success) of that build" The `Build` object should have the commit hash: https://github.com/rtfd/readthedocs.org/blob/1af444173481df9a7b16e015fc6b12abe8155e7e/readthedocs/builds/models.py#L430-L431 So, I think we have all the information to create that endpoint. This could be next to the others at this file: https://github.com/rtfd/readthedocs.org/blob/1af444173481df9a7b16e015fc6b12abe8155e7e/readthedocs/restapi/views/model_views.py @Alig1493 let me know if you are able to start with that info or not. Please, keep us updated on this :) Thanks! Will the api endpoint be under v1 or v2? v2 is the right place. v1 is deprecated and shouldn't be used. @humitos There are already `api/v2/buid/<id>`, so what should be the endpoint for this? Yes. What I understood from the original report is to have the ability that given the sha and the project you can know the status of that build. Now, a build id is needed, that you don't know how to get it from a sha. Makes sense? @humitos I have understand. I wanted to explain that, there are already a endpoint for `api/v2/buid/<id>`, so the getting with another endpoint like `api/v2/buid/<commit hash>` will make the user confuse. So I was wondering, what the API endpoint will look like. Can you please clarify?
2018-01-23T21:07:01
readthedocs/readthedocs.org
3,545
readthedocs__readthedocs.org-3545
[ "1989" ]
1af444173481df9a7b16e015fc6b12abe8155e7e
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -111,7 +111,7 @@ def parse_tags(self, data): if row == []: continue commit_hash, name = row - clean_name = name.split('/')[-1] + clean_name = name.replace('refs/tags/', '') vcs_tags.append(VCSVersion(self, commit_hash, clean_name)) return vcs_tags @@ -148,15 +148,12 @@ def parse_branches(self, data): if branch: branch = branch[0] if branch.startswith('origin/'): - cut_len = len('origin/') - slug = branch[cut_len:].replace('/', '-') - if slug in ['HEAD']: + verbose_name = branch.replace('origin/', '') + if verbose_name in ['HEAD']: continue - clean_branches.append(VCSVersion(self, branch, slug)) + clean_branches.append(VCSVersion(self, branch, verbose_name)) else: - # Believe this is dead code. - slug = branch.replace('/', '-') - clean_branches.append(VCSVersion(self, branch, slug)) + clean_branches.append(VCSVersion(self, branch, branch)) return clean_branches @property
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -33,15 +33,17 @@ def test_parse_branches(self): origin/HEAD -> origin/master origin/master origin/release/2.0.0 + origin/release/foo/bar """ expected_ids = [ ('develop', 'develop'), ('master', 'master'), - ('release/2.0.0', 'release-2.0.0'), + ('release/2.0.0', 'release/2.0.0'), ('origin/2.0.X', '2.0.X'), ('origin/master', 'master'), - ('origin/release/2.0.0', 'release-2.0.0') + ('origin/release/2.0.0', 'release/2.0.0'), + ('origin/release/foo/bar', 'release/foo/bar'), ] given_ids = [(x.identifier, x.verbose_name) for x in self.project.vcs_repo().parse_branches(data)] @@ -60,6 +62,7 @@ def test_parse_git_tags(self): a63a2de628a3ce89034b7d1a5ca5e8159534eef0 refs/tags/2.1.0.beta2 c7fc3d16ed9dc0b19f0d27583ca661a64562d21e refs/tags/2.1.0.rc1 edc0a2d02a0cc8eae8b67a3a275f65cd126c05b1 refs/tags/2.1.0.rc2 + 274a5a8c988a804e40da098f59ec6c8f0378fe34 refs/tags/release/foobar """ expected_tags = [ ('3b32886c8d3cb815df3793b3937b2e91d0fb00f1', '2.0.0'), @@ -68,6 +71,7 @@ def test_parse_git_tags(self): ('a63a2de628a3ce89034b7d1a5ca5e8159534eef0', '2.1.0.beta2'), ('c7fc3d16ed9dc0b19f0d27583ca661a64562d21e', '2.1.0.rc1'), ('edc0a2d02a0cc8eae8b67a3a275f65cd126c05b1', '2.1.0.rc2'), + ('274a5a8c988a804e40da098f59ec6c8f0378fe34', 'release/foobar'), ] given_ids = [(x.identifier, x.verbose_name) for x in
Git backend transforming tags/branches incorrectly I had thought that the refactor to version slugs had unified our slugging for VCS versions, but it seems the Git backend is still performing transforms on these names. Is there a reason for this? For instance, a repository with a tag of "release/foobar" becomes just "foobar" at: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/vcs_support/backends/git.py#L113 Shouldn't the `VersionSlugField` be handling this now?
@agjohnson following you example, what should be the correct slug generated, `release-foobar`?
2018-01-23T21:29:02
readthedocs/readthedocs.org
3,548
readthedocs__readthedocs.org-3548
[ "3337" ]
e923c0cdf7547e63b4d2a9ab2b5e69b527bb482b
diff --git a/readthedocs/restapi/serializers.py b/readthedocs/restapi/serializers.py --- a/readthedocs/restapi/serializers.py +++ b/readthedocs/restapi/serializers.py @@ -3,6 +3,8 @@ from __future__ import absolute_import from builtins import object + +from allauth.socialaccount.models import SocialAccount from rest_framework import serializers from readthedocs.builds.models import Build, BuildCommandResult, Version @@ -157,3 +159,27 @@ def get_matches(self, obj): request = self.context['request'] if request.user is not None and request.user.is_authenticated(): return obj.matches(request.user) + + +class ProviderSerializer(serializers.Serializer): + + id = serializers.CharField(max_length=20) + name = serializers.CharField(max_length=20) + + +class SocialAccountSerializer(serializers.ModelSerializer): + + username = serializers.SerializerMethodField() + avatar_url = serializers.URLField(source='get_avatar_url') + provider = ProviderSerializer(source='get_provider') + + class Meta(object): + model = SocialAccount + exclude = ('extra_data',) + + def get_username(self, obj): + return ( + obj.extra_data.get('username') or + obj.extra_data.get('login') + # FIXME: which one is GitLab? + ) diff --git a/readthedocs/restapi/urls.py b/readthedocs/restapi/urls.py --- a/readthedocs/restapi/urls.py +++ b/readthedocs/restapi/urls.py @@ -17,7 +17,8 @@ ProjectViewSet, NotificationViewSet, VersionViewSet, DomainViewSet, RemoteOrganizationViewSet, - RemoteRepositoryViewSet) + RemoteRepositoryViewSet, + SocialAccountViewSet) router = routers.DefaultRouter() router.register(r'build', BuildViewSet, base_name='build') @@ -30,6 +31,8 @@ r'remote/org', RemoteOrganizationViewSet, base_name='remoteorganization') router.register( r'remote/repo', RemoteRepositoryViewSet, base_name='remoterepository') +router.register( + r'remote/account', SocialAccountViewSet, base_name='remoteaccount') router.register(r'comments', CommentViewSet, base_name="comments") urlpatterns = [ diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -6,6 +6,7 @@ import logging +from allauth.socialaccount.models import SocialAccount from django.shortcuts import get_object_or_404 from rest_framework import decorators, permissions, status, viewsets from rest_framework.decorators import detail_route @@ -28,7 +29,7 @@ BuildAdminSerializer, BuildCommandSerializer, BuildSerializer, DomainSerializer, ProjectAdminSerializer, ProjectSerializer, RemoteOrganizationSerializer, RemoteRepositorySerializer, - VersionAdminSerializer, VersionSerializer) + SocialAccountSerializer, VersionAdminSerializer, VersionSerializer) log = logging.getLogger(__name__) @@ -275,8 +276,26 @@ def get_queryset(self): org = self.request.query_params.get('org', None) if org is not None: query = query.filter(organization__pk=org) + + own = self.request.query_params.get('own', None) + if own is not None: + query = query.filter( + account__provider=own, + organization=None, + ) + query = query.filter( account__provider__in=[ service.adapter.provider_id for service in registry ]) return query + + +class SocialAccountViewSet(viewsets.ReadOnlyModelViewSet): + permission_classes = [IsOwner] + renderer_classes = (JSONRenderer,) + serializer_class = SocialAccountSerializer + model = SocialAccount + + def get_queryset(self): + return self.model.objects.filter(user=self.request.user.pk)
diff --git a/readthedocs/rtd_tests/tests/test_privacy_urls.py b/readthedocs/rtd_tests/tests/test_privacy_urls.py --- a/readthedocs/rtd_tests/tests/test_privacy_urls.py +++ b/readthedocs/rtd_tests/tests/test_privacy_urls.py @@ -2,6 +2,7 @@ from __future__ import print_function import re +from allauth.socialaccount.models import SocialAccount from builtins import object from django.contrib.admindocs.views import extract_views_from_urlpatterns from django.test import TestCase @@ -295,6 +296,7 @@ def setUp(self): self.domain = get(Domain, url='http://docs.foobar.com', project=self.pip) self.comment = get(DocumentComment, node__project=self.pip) self.snapshot = get(NodeSnapshot, node=self.comment.node) + self.social_account = get(SocialAccount) self.remote_org = get(RemoteOrganization) self.remote_repo = get(RemoteRepository, organization=self.remote_org) self.integration = get(Integration, project=self.pip, provider_data='') @@ -314,6 +316,7 @@ def setUp(self): 'footer_html': {'data': {'project': 'pip', 'version': 'latest', 'page': 'index'}}, 'remoteorganization-detail': {'pk': self.remote_org.pk}, 'remoterepository-detail': {'pk': self.remote_repo.pk}, + 'remoteaccount-detail': {'pk': self.social_account.pk}, 'api_webhook': {'integration_pk': self.integration.pk}, } self.response_data = { @@ -337,6 +340,7 @@ def setUp(self): 'api_webhook_generic': {'status_code': 403}, 'remoteorganization-detail': {'status_code': 404}, 'remoterepository-detail': {'status_code': 404}, + 'remoteaccount-detail': {'status_code': 404}, }
Filter by my own repositories when importing When Github account is connected and you want to import a project, it's kind of "annoying" (don't know the proper word) to look for your owns. If you belong to a couple of organizations and they have a lot of projects, you have to click Next many times before you reach the project you want to import. ![captura de pantalla_2017-11-30_12-47-21](https://user-images.githubusercontent.com/244656/33446006-b5783462-d5cc-11e7-9f0b-042a92736ed3.png) I'm just posting this here just to be considered in case it's something easy to do, but it would be zero priority for me.
2018-01-25T16:50:19
readthedocs/readthedocs.org
3,556
readthedocs__readthedocs.org-3556
[ "3553" ]
b12e1885dc7f6da6b9719054e3654019b1e0fe65
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -13,6 +13,7 @@ from datetime import datetime from readthedocs.core.utils import slugify +from django.conf import settings from django.utils.translation import ugettext_lazy as _, ugettext_noop from docker import Client from docker.utils import create_host_config @@ -662,6 +663,42 @@ def get_client(self): ) ) + def get_container_host_config(self): + """ + Create the ``host_config`` settings for the container. + + It mainly generates the proper path bindings between the Docker + container and the Host by mounting them with the proper permissions. + Besides, it mounts the ``GLOBAL_PIP_CACHE`` if it's set and we are under + ``DEBUG``. + + The object returned is passed to Docker function + ``client.create_container``. + """ + binds = { + SPHINX_TEMPLATE_DIR: { + 'bind': SPHINX_TEMPLATE_DIR, + 'mode': 'ro', + }, + MKDOCS_TEMPLATE_DIR: { + 'bind': MKDOCS_TEMPLATE_DIR, + 'mode': 'ro', + }, + self.project.doc_path: { + 'bind': self.project.doc_path, + 'mode': 'rw', + }, + } + + if getattr(settings, 'GLOBAL_PIP_CACHE', False) and settings.DEBUG: + binds.update({ + self.project.pip_cache_path: { + 'bind': self.project.pip_cache_path, + 'mode': 'rw', + } + }) + return create_host_config(binds=binds) + @property def container_id(self): """Return id of container if it is valid.""" @@ -715,20 +752,7 @@ def create_container(self): exit=DOCKER_TIMEOUT_EXIT_CODE)), name=self.container_id, hostname=self.container_id, - host_config=create_host_config(binds={ - SPHINX_TEMPLATE_DIR: { - 'bind': SPHINX_TEMPLATE_DIR, - 'mode': 'ro' - }, - MKDOCS_TEMPLATE_DIR: { - 'bind': MKDOCS_TEMPLATE_DIR, - 'mode': 'ro' - }, - self.project.doc_path: { - 'bind': self.project.doc_path, - 'mode': 'rw' - }, - }), + host_config=self.get_container_host_config(), detach=True, environment=self.environment, mem_limit=self.container_mem_limit, diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -449,7 +449,7 @@ def checkout_path(self, version=LATEST): @property def pip_cache_path(self): """Path to pip cache.""" - if getattr(settings, 'GLOBAL_PIP_CACHE', False): + if getattr(settings, 'GLOBAL_PIP_CACHE', False) and settings.DEBUG: return settings.GLOBAL_PIP_CACHE return os.path.join(self.doc_path, '.cache', 'pip')
Support devpi inside docker containers Devpi[1] is a great tool to help with intermittent connections. While I am traveling (airports/airplanes/spotty cell service), I find myself wanting a local pypi server. Devpi allows for online and offline usage, and so normal day to day usage on solid internet connections should seed a cache that can be used offline. The problem blocked this from use is that the docker container would need to be created with additional options, forwarding the host (or another docker container hosting devpi). One option around this might be to create a signal that can be caught before running the docker command to create the container, and allow for alteration of the dictionary that we pass to the docker API. This would allow for a custom RTD plugin that would add a link to a devpi container, or an additional forwarded port/etc.
2018-01-26T14:45:54
readthedocs/readthedocs.org
3,559
readthedocs__readthedocs.org-3559
[ "3251" ]
b12e1885dc7f6da6b9719054e3654019b1e0fe65
diff --git a/readthedocs/restapi/utils.py b/readthedocs/restapi/utils.py --- a/readthedocs/restapi/utils.py +++ b/readthedocs/restapi/utils.py @@ -1,9 +1,14 @@ +# -*- coding: utf-8 -*- """Utility functions that are used by both views and celery tasks.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import hashlib import logging +from rest_framework.pagination import PageNumberPagination + from readthedocs.builds.constants import NON_REPOSITORY_VERSIONS from readthedocs.builds.models import Version from readthedocs.search.indexes import PageIndex, ProjectIndex, SectionIndex @@ -31,14 +36,17 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin else: # Update slug with new identifier Version.objects.filter( - project=project, verbose_name=version_name - ).update( - identifier=version_id, - type=type, - machine=False, + project=project, verbose_name=version_name).update( + identifier=version_id, + type=type, + machine=False, + ) # noqa + + log.info( + '(Sync Versions) Updated Version: [%s=%s] ', + version['verbose_name'], + version['identifier'], ) - log.info("(Sync Versions) Updated Version: [%s=%s] ", - version['verbose_name'], version['identifier']) else: # New Version created_version = Version.objects.create( @@ -49,7 +57,7 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin ) added.add(created_version.slug) if added: - log.info("(Sync Versions) Added Versions: [%s] ", ' '.join(added)) + log.info('(Sync Versions) Added Versions: [%s] ', ' '.join(added)) return added @@ -70,14 +78,15 @@ def delete_versions(project, version_data): if to_delete_qs.count(): ret_val = {obj.slug for obj in to_delete_qs} - log.info("(Sync Versions) Deleted Versions: [%s]", ' '.join(ret_val)) + log.info('(Sync Versions) Deleted Versions: [%s]', ' '.join(ret_val)) to_delete_qs.delete() return ret_val return set() -def index_search_request(version, page_list, commit, project_scale, page_scale, - section=True, delete=True): +def index_search_request( + version, page_list, commit, project_scale, page_scale, section=True, + delete=True): """ Update search indexes with build output JSON. @@ -89,21 +98,25 @@ def index_search_request(version, page_list, commit, project_scale, page_scale, project = version.project log_msg = ' '.join([page['path'] for page in page_list]) - log.info("Updating search index: project=%s pages=[%s]", - project.slug, log_msg) + log.info( + 'Updating search index: project=%s pages=[%s]', + project.slug, + log_msg, + ) project_obj = ProjectIndex() - project_obj.index_document(data={ - 'id': project.pk, - 'name': project.name, - 'slug': project.slug, - 'description': project.description, - 'lang': project.language, - 'author': [user.username for user in project.users.all()], - 'url': project.get_absolute_url(), - 'tags': None, - 'weight': project_scale, - }) + project_obj.index_document( + data={ + 'id': project.pk, + 'name': project.name, + 'slug': project.slug, + 'description': project.description, + 'lang': project.language, + 'author': [user.username for user in project.users.all()], + 'url': project.get_absolute_url(), + 'tags': None, + 'weight': project_scale, + }) page_obj = PageIndex() section_obj = SectionIndex() @@ -112,7 +125,7 @@ def index_search_request(version, page_list, commit, project_scale, page_scale, routes = [project.slug] routes.extend([p.parent.slug for p in project.superprojects.all()]) for page in page_list: - log.debug("Indexing page: %s:%s", project.slug, page['path']) + log.debug('Indexing page: %s:%s', project.slug, page['path']) to_hash = '-'.join([project.slug, version.slug, page['path']]) page_id = hashlib.md5(to_hash.encode('utf-8')).hexdigest() index_list.append({ @@ -129,8 +142,12 @@ def index_search_request(version, page_list, commit, project_scale, page_scale, }) if section: for sect in page['sections']: - id_to_hash = '-'.join([project.slug, version.slug, - page['path'], sect['id']]) + id_to_hash = '-'.join([ + project.slug, + version.slug, + page['path'], + sect['id'], + ]) section_index_list.append({ 'id': (hashlib.md5(id_to_hash.encode('utf-8')).hexdigest()), 'project': project.slug, @@ -142,28 +159,52 @@ def index_search_request(version, page_list, commit, project_scale, page_scale, 'weight': page_scale, }) for route in routes: - section_obj.bulk_index(section_index_list, parent=page_id, - routing=route) + section_obj.bulk_index( + section_index_list, + parent=page_id, + routing=route, + ) for route in routes: page_obj.bulk_index(index_list, parent=project.slug, routing=route) if delete: - log.info("Deleting files not in commit: %s", commit) + log.info('Deleting files not in commit: %s', commit) # TODO: AK Make sure this works delete_query = { - "query": { - "bool": { - "must": [ - {"term": {"project": project.slug, }}, - {"term": {"version": version.slug, }}, + 'query': { + 'bool': { + 'must': [ + { + 'term': { + 'project': project.slug, + }, + }, + { + 'term': { + 'version': version.slug, + }, + }, ], - "must_not": { - "term": { - "commit": commit - } - } - } - } + 'must_not': { + 'term': { + 'commit': commit, + }, + }, + }, + }, } page_obj.delete_document(body=delete_query) + + +class RemoteOrganizationPagination(PageNumberPagination): + page_size = 25 + + +class RemoteProjectPagination(PageNumberPagination): + page_size = 15 + + +class ProjectPagination(PageNumberPagination): + page_size = 100 + max_page_size = 1000 diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -1,33 +1,34 @@ +# -*- coding: utf-8 -*- """Endpoints for listing Projects, Versions, Builds, etc.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import logging from django.shortcuts import get_object_or_404 -from rest_framework import decorators, permissions, viewsets, status +from rest_framework import decorators, permissions, status, viewsets from rest_framework.decorators import detail_route from rest_framework.renderers import JSONRenderer from rest_framework.response import Response -from readthedocs.builds.constants import BRANCH -from readthedocs.builds.constants import TAG +from readthedocs.builds.constants import BRANCH, TAG from readthedocs.builds.models import Build, BuildCommandResult, Version from readthedocs.core.utils import trigger_build from readthedocs.core.utils.extend import SettingsOverrideObject -from readthedocs.oauth.services import GitHubService, registry from readthedocs.oauth.models import RemoteOrganization, RemoteRepository -from readthedocs.projects.models import Project, EmailHook, Domain +from readthedocs.oauth.services import GitHubService, registry +from readthedocs.projects.models import Domain, EmailHook, Project from readthedocs.projects.version_handling import determine_stable_version -from ..permissions import (APIPermission, APIRestrictedPermission, - RelatedProjectIsOwner, IsOwner) -from ..serializers import (BuildSerializer, BuildAdminSerializer, - BuildCommandSerializer, - ProjectSerializer, ProjectAdminSerializer, - VersionSerializer, VersionAdminSerializer, - DomainSerializer, RemoteOrganizationSerializer, - RemoteRepositorySerializer) from .. import utils as api_utils +from ..permissions import ( + APIPermission, APIRestrictedPermission, IsOwner, RelatedProjectIsOwner) +from ..serializers import ( + BuildAdminSerializer, BuildCommandSerializer, BuildSerializer, + DomainSerializer, ProjectAdminSerializer, ProjectSerializer, + RemoteOrganizationSerializer, RemoteRepositorySerializer, + VersionAdminSerializer, VersionSerializer) log = logging.getLogger(__name__) @@ -44,7 +45,8 @@ class UserSelectViewSet(viewsets.ModelViewSet): def get_serializer_class(self): try: - if self.request.user.is_staff and self.admin_serializer_class is not None: + if (self.request.user.is_staff and + self.admin_serializer_class is not None): return self.admin_serializer_class except AttributeError: pass @@ -57,31 +59,34 @@ def get_queryset(self): class ProjectViewSet(UserSelectViewSet): - """List, filter, etc. Projects.""" + """List, filter, etc, Projects.""" permission_classes = [APIPermission] renderer_classes = (JSONRenderer,) serializer_class = ProjectSerializer admin_serializer_class = ProjectAdminSerializer model = Project - paginate_by = 100 - paginate_by_param = 'page_size' - max_paginate_by = 1000 + pagination_class = api_utils.ProjectPagination @decorators.detail_route() def valid_versions(self, request, **kwargs): """Maintain state of versions that are wanted.""" project = get_object_or_404( Project.objects.api(request.user), pk=kwargs['pk']) - if not project.num_major or not project.num_minor or not project.num_point: + if (not project.num_major or not project.num_minor or + not project.num_point): return Response( - {'error': 'Project does not support point version control'}, - status=status.HTTP_400_BAD_REQUEST) + { + 'error': 'Project does not support point version control', + }, + status=status.HTTP_400_BAD_REQUEST, + ) version_strings = project.supported_versions() # Disable making old versions inactive for now. # project.versions.exclude(verbose_name__in=version_strings).update(active=False) - project.versions.filter( - verbose_name__in=version_strings).update(active=True) + project.versions.filter(verbose_name__in=version_strings).update( + active=True, + ) return Response({ 'flat': version_strings, }) @@ -90,7 +95,7 @@ def valid_versions(self, request, **kwargs): def translations(self, *_, **__): translations = self.get_object().translations.all() return Response({ - 'translations': ProjectSerializer(translations, many=True).data + 'translations': ProjectSerializer(translations, many=True).data, }) @detail_route() @@ -100,7 +105,7 @@ def subprojects(self, request, **kwargs): rels = project.subprojects.all() children = [rel.child for rel in rels] return Response({ - 'subprojects': ProjectSerializer(children, many=True).data + 'subprojects': ProjectSerializer(children, many=True).data, }) @detail_route() @@ -109,7 +114,7 @@ def active_versions(self, request, **kwargs): Project.objects.api(request.user), pk=kwargs['pk']) versions = project.versions.filter(active=True) return Response({ - 'versions': VersionSerializer(versions, many=True).data + 'versions': VersionSerializer(versions, many=True).data, }) @decorators.detail_route(permission_classes=[permissions.IsAdminUser]) @@ -118,7 +123,7 @@ def token(self, request, **kwargs): Project.objects.api(request.user), pk=kwargs['pk']) token = GitHubService.get_token_for_project(project, force_local=True) return Response({ - 'token': token + 'token': token, }) @decorators.detail_route() @@ -126,16 +131,18 @@ def canonical_url(self, request, **kwargs): project = get_object_or_404( Project.objects.api(request.user), pk=kwargs['pk']) return Response({ - 'url': project.get_docs_url() + 'url': project.get_docs_url(), }) - @decorators.detail_route(permission_classes=[permissions.IsAdminUser], methods=['post']) + @decorators.detail_route( + permission_classes=[permissions.IsAdminUser], methods=['post']) def sync_versions(self, request, **kwargs): # noqa: D205 """ - Sync the version data in the repo (on the build server) with what we - have in the database. + Sync the version data in the repo (on the build server). - Returns the identifiers for the versions that have been deleted. + Version data in the repo is synced with what we have in the database. + + :returns: the identifiers for the versions that have been deleted. """ project = get_object_or_404( Project.objects.api(request.user), pk=kwargs['pk']) @@ -162,22 +169,27 @@ def sync_versions(self, request, **kwargs): # noqa: D205 added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception as e: - log.exception("Sync Versions Error: %s", e.message) - return Response({'error': e.message}, status=status.HTTP_400_BAD_REQUEST) + log.exception('Sync Versions Error: %s', e.message) + return Response( + { + 'error': e.message, + }, + status=status.HTTP_400_BAD_REQUEST, + ) promoted_version = project.update_stable_version() if promoted_version: new_stable = project.get_stable_version() log.info( - "Triggering new stable build: {project}:{version}".format( + 'Triggering new stable build: {project}:{version}'.format( project=project.slug, - version=new_stable.identifier)) + version=new_stable.identifier, + )) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. - if ( - activate_new_stable and + if (activate_new_stable and promoted_version.slug in added_versions): promoted_version.active = True promoted_version.save() @@ -241,12 +253,14 @@ class RemoteOrganizationViewSet(viewsets.ReadOnlyModelViewSet): renderer_classes = (JSONRenderer,) serializer_class = RemoteOrganizationSerializer model = RemoteOrganization - paginate_by = 25 + pagination_class = api_utils.RemoteOrganizationPagination def get_queryset(self): - return (self.model.objects.api(self.request.user) - .filter(account__provider__in=[service.adapter.provider_id - for service in registry])) + return ( + self.model.objects.api(self.request.user).filter( + account__provider__in=[ + service.adapter.provider_id for service in registry + ])) class RemoteRepositoryViewSet(viewsets.ReadOnlyModelViewSet): @@ -254,15 +268,15 @@ class RemoteRepositoryViewSet(viewsets.ReadOnlyModelViewSet): renderer_classes = (JSONRenderer,) serializer_class = RemoteRepositorySerializer model = RemoteRepository + pagination_class = api_utils.RemoteProjectPagination def get_queryset(self): query = self.model.objects.api(self.request.user) org = self.request.query_params.get('org', None) if org is not None: query = query.filter(organization__pk=org) - query = query.filter(account__provider__in=[service.adapter.provider_id - for service in registry]) + query = query.filter( + account__provider__in=[ + service.adapter.provider_id for service in registry + ]) return query - - def get_paginate_by(self): - return self.request.query_params.get('page_size', 25)
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -265,6 +265,43 @@ def test_project_features_multiple_projects(self): self.assertIn('features', resp.data) self.assertEqual(resp.data['features'], [feature.feature_id]) + def test_project_pagination(self): + for _ in range(100): + get(Project) + + resp = self.client.get('/api/v2/project/') + self.assertEqual(resp.status_code, 200) + self.assertEqual(len(resp.data['results']), 100) # page_size + self.assertIn('?page=2', resp.data['next']) + + def test_remote_repository_pagination(self): + account = get(SocialAccount, provider='github') + user = get(User, socialaccount_set=[account]) + for _ in range(20): + get(RemoteRepository, users=[user], account=account) + + client = APIClient() + client.force_authenticate(user=user) + + resp = client.get('/api/v2/remote/repo/') + self.assertEqual(resp.status_code, 200) + self.assertEqual(len(resp.data['results']), 15) # page_size + self.assertIn('?page=2', resp.data['next']) + + def test_remote_organization_pagination(self): + account = get(SocialAccount, provider='github') + user = get(User, socialaccount_set=[account]) + for _ in range(30): + get(RemoteOrganization, users=[user], account=account) + + client = APIClient() + client.force_authenticate(user=user) + + resp = client.get('/api/v2/remote/org/') + self.assertEqual(resp.status_code, 200) + self.assertEqual(len(resp.data['results']), 25) # page_size + self.assertIn('?page=2', resp.data['next']) + class APIImportTests(TestCase):
Import UI not showing all organizations I suspect I recently went over 10 organizations in my GitHub account. Now when I see my list of organizations, I'm missing an organization for which I'm an owner (/yougov): ![image](https://user-images.githubusercontent.com/308610/32730628-b82901de-c855-11e7-8713-270c3d081ce7.png) I suspect there's either some pagination going on with the organizations or there's a hard-coded limit of 10. In either case, I seem to be unable to filter by that organization.
This sounds like you might be right, this is likely a bug. This endpoint it's paginated by 25: https://github.com/rtfd/readthedocs.org/blob/6fd828216fede4a64259c5d5f199f8e4fd5163e1/readthedocs/restapi/views/model_views.py#L244 Not sure why you are getting only 10. On the other hand, I didn't find a another possible filter/paginted in the https://github.com/rtfd/readthedocs.org/blob/6fd828216fede4a64259c5d5f199f8e4fd5163e1/readthedocs/projects/static-src/projects/js/import.js code :/
2018-01-27T19:32:38
readthedocs/readthedocs.org
3,571
readthedocs__readthedocs.org-3571
[ "3449" ]
df8079a18ed9c15e9c85f0646e3e9169e725b310
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -270,6 +270,13 @@ def clean_parent(self): _('Subproject nesting is not supported')) return self.project + def clean_child(self): + child = self.cleaned_data['child'] + if child == self.project: + raise forms.ValidationError( + _('A project can not be a subproject of itself')) + return child + def get_subproject_queryset(self): """ Return scrubbed subproject choice queryset. @@ -280,7 +287,8 @@ def get_subproject_queryset(self): queryset = ( Project.objects.for_admin_user(self.user) .exclude(subprojects__isnull=False) - .exclude(superprojects__isnull=False)) + .exclude(superprojects__isnull=False) + .exclude(pk=self.project.pk)) return queryset
diff --git a/readthedocs/rtd_tests/tests/test_subprojects.py b/readthedocs/rtd_tests/tests/test_subprojects.py --- a/readthedocs/rtd_tests/tests/test_subprojects.py +++ b/readthedocs/rtd_tests/tests/test_subprojects.py @@ -143,6 +143,21 @@ def test_excludes_existing_subprojects(self): [''], ) + def test_exclude_self_project_as_subproject(self): + user = fixture.get(User) + project = fixture.get(Project, users=[user]) + + form = ProjectRelationshipForm( + {'child': project.pk}, + project=project, + user=user + ) + self.assertFalse(form.is_valid()) + self.assertNotIn( + project.id, + [proj_id for (proj_id, __) in form.fields['child'].choices] + ) + @override_settings(PUBLIC_DOMAIN='readthedocs.org') class ResolverBase(TestCase):
Don't allow to create a subproject of the project itself ## Expected Result When creating a subproject, the current project should no be listed as a child and no allowed to be a subproject of the project itself. ## Actual Result The current project is listed as a child project, if someone chooses, it can't be easily deleted. The current solution is manually entering to the edit page of the subproject (https://readthedocs.org/dashboard/{project}/subprojects/{subproject}/edit/). See #3173
2018-02-02T00:51:35
readthedocs/readthedocs.org
3,581
readthedocs__readthedocs.org-3581
[ "3070" ]
8dad7c824cb08c8b0c08b86536c625f16e1b4e0b
diff --git a/readthedocs/projects/migrations/0038_change-default-python-interpreter.py b/readthedocs/projects/migrations/0038_change-default-python-interpreter.py new file mode 100644 --- /dev/null +++ b/readthedocs/projects/migrations/0038_change-default-python-interpreter.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.18 on 2019-02-04 16:49 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('projects', '0037_add_htmlfile'), + ] + + operations = [ + migrations.AlterField( + model_name='project', + name='python_interpreter', + field=models.CharField(choices=[('python', 'CPython 2.x'), ('python3', 'CPython 3.x')], default='python3', help_text='The Python interpreter used to create the virtual environment.', max_length=20, verbose_name='Python Interpreter'), + ), + ] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -303,7 +303,7 @@ class Project(models.Model): _('Python Interpreter'), max_length=20, choices=constants.PYTHON_CHOICES, - default='python', + default='python3', help_text=_( 'The Python interpreter used to create the virtual ' 'environment.',
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -1468,7 +1468,7 @@ def test_get_version_by_id(self): 'language': 'en', 'name': 'Pip', 'programming_language': 'words', - 'python_interpreter': 'python', + 'python_interpreter': 'python3', 'repo': 'https://github.com/pypa/pip', 'repo_type': 'git', 'requirements_file': None, diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py --- a/readthedocs/rtd_tests/tests/test_config_integration.py +++ b/readthedocs/rtd_tests/tests/test_config_integration.py @@ -97,7 +97,7 @@ def test_python_supported_versions_default_image_1_0(self, load_config): ], 'use_system_packages': self.project.use_system_packages, 'requirements_file': self.project.requirements_file, - 'python_version': 2, + 'python_version': 3, 'sphinx_configuration': mock.ANY, 'build_image': 'readthedocs/build:1.0', 'doctype': self.project.documentation_type, @@ -112,7 +112,7 @@ def test_python_supported_versions_default_image_1_0(self, load_config): path=mock.ANY, env_config=expected_env_config, ) - self.assertEqual(config.python.version, 2) + self.assertEqual(config.python.version, 3) @mock.patch('readthedocs.doc_builder.config.load_config') def test_python_supported_versions_image_1_0(self, load_config): @@ -151,8 +151,8 @@ def test_python_supported_versions_image_latest(self, load_config): def test_python_default_version(self, load_config): load_config.side_effect = create_load() config = load_yaml_config(self.version) - self.assertEqual(config.python.version, 2) - self.assertEqual(config.python_interpreter, 'python2.7') + self.assertEqual(config.python.version, 3) + self.assertEqual(config.python_interpreter, 'python3.7') @mock.patch('readthedocs.doc_builder.config.load_config') def test_python_set_python_version_on_project(self, load_config):
Change Python environment default to 3 Python 2 is going to stop being maintained in less than 3 years. We should switch our default Python version for projects to Python 3, as this should increase the number of python 3 projects being built on RTD by a large number. It's just a small settings change, but putting it here for discussion.
The code to be changed is here: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/models.py#L200
2018-02-06T22:08:40
readthedocs/readthedocs.org
3,593
readthedocs__readthedocs.org-3593
[ "2431" ]
5a3938580dd156138c9b8cace6148d3c4e970a93
diff --git a/readthedocs/redirects/models.py b/readthedocs/redirects/models.py --- a/readthedocs/redirects/models.py +++ b/readthedocs/redirects/models.py @@ -78,13 +78,30 @@ class Meta(object): ordering = ('-update_dt',) def __str__(self): - if self.redirect_type == 'prefix': - return ugettext('Prefix Redirect:') + ' %s ->' % self.from_url - elif self.redirect_type == 'page': - return ugettext('Page Redirect:') + ' %s -> %s' % ( - self.from_url, - self.to_url) - return ugettext('Redirect: %s' % self.get_redirect_type_display()) + redirect_text = '{type}: {from_to_url}' + if self.redirect_type in ['prefix', 'page', 'exact']: + return redirect_text.format( + type=self.get_redirect_type_display(), + from_to_url=self.get_from_to_url_display() + ) + return ugettext('Redirect: {}'.format( + self.get_redirect_type_display()) + ) + + def get_from_to_url_display(self): + if self.redirect_type in ['prefix', 'page', 'exact']: + from_url = self.from_url + to_url = self.to_url + if self.redirect_type == 'prefix': + to_url = '/{lang}/{version}/'.format( + lang=self.project.language, + version=self.project.default_version + ) + return '{from_url} -> {to_url}'.format( + from_url=from_url, + to_url=to_url + ) + return '' def get_full_path(self, filename, language=None, version_slug=None): """
List of redirects doesn't say which URLs are being redirected (if exact redirects) ## Details - Project URL: https://readthedocs.org/projects/bigchaindb/ - List of project redirects (URL): https://readthedocs.org/dashboard/bigchaindb/redirects/ ## Expected Result When I make a bunch of redirects, I would expect the list of exact redirects to say the URL being redirected (and maybe where). Maybe the list would be simple but I'd be able to click on each item to see the details (such as the URL which get redirected and where). ## Actual Result The actual list of redirects doesn't say anything about each one, other than it's an "Exact Redirect" (as seen in the image below). Also, I can't click on a redirect to find out more information about it. What if I want to remove one (but only one) redirect? How can I pick the one to remove? <hr> ![list_of_redirects](https://cloud.githubusercontent.com/assets/1210951/18984645/06cac79a-86f4-11e6-9d5b-7854c4959615.png)
2018-02-11T01:10:18
readthedocs/readthedocs.org
3,631
readthedocs__readthedocs.org-3631
[ "2246" ]
c709316473a54d173f4df05dacc7bb47f839f534
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -27,6 +27,7 @@ 'sphinxcontrib.httpdomain', 'djangodocs', 'doc_extensions', + 'sphinx_tabs.tabs', ] templates_path = ['_templates']
Clarify set up text when using project name The project name is currently `pip` but the text should clarify why we're using `pip` -- 'we'll use a test project of `pip` blah blah' refs #2240
2018-02-18T03:08:07
readthedocs/readthedocs.org
3,641
readthedocs__readthedocs.org-3641
[ "3637" ]
648e6fd4b64e46add0aced992e5479349a799f64
diff --git a/readthedocs/urls.py b/readthedocs/urls.py --- a/readthedocs/urls.py +++ b/readthedocs/urls.py @@ -34,6 +34,8 @@ url(r'^$', HomepageView.as_view(), name='homepage'), url(r'^support/', SupportView.as_view(), name='support'), url(r'^security/', TemplateView.as_view(template_name='security.html')), + url(r'^.well-known/security.txt', + TemplateView.as_view(template_name='security.txt', content_type='text/plain')), ] rtd_urls = [
Improve security contact webpage We need to improve our documentation about a user who found a security issue could contact us to report this vulnerability in a confidential way. This page should be clear regarding how to report the issue, how submit a patch (wihtout making it public) and what to do receive feedback / discuss about the solution. There is a page already but it's too poor: https://readthedocs.org/security/
2018-02-19T23:47:40
readthedocs/readthedocs.org
3,649
readthedocs__readthedocs.org-3649
[ "3396" ]
6317e064c2c5e7b27c7866ca0b5daf14e56e5677
diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -326,8 +326,26 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ log.exception('failed to sync supported versions') try: if not first_save: - broadcast(type='app', task=tasks.symlink_project, - args=[self.pk],) + log.info( + 'Re-symlinking project and subprojects: project=%s', + self.slug, + ) + broadcast( + type='app', + task=tasks.symlink_project, + args=[self.pk], + ) + log.info( + 'Re-symlinking superprojects: project=%s', + self.slug, + ) + for superproject in self.superprojects.all(): + broadcast( + type='app', + task=tasks.symlink_project, + args=[superproject.pk], + ) + except Exception: log.exception('failed to symlink project') try:
diff --git a/readthedocs/rtd_tests/tests/test_project_symlinks.py b/readthedocs/rtd_tests/tests/test_project_symlinks.py --- a/readthedocs/rtd_tests/tests/test_project_symlinks.py +++ b/readthedocs/rtd_tests/tests/test_project_symlinks.py @@ -5,16 +5,16 @@ import os import shutil import tempfile -import collections -from functools import wraps import mock from django.conf import settings +from django.core.urlresolvers import reverse from django.test import TestCase, override_settings from django_dynamic_fixture import get from readthedocs.builds.models import Version from readthedocs.projects.models import Project, Domain +from readthedocs.projects.tasks import symlink_project from readthedocs.core.symlink import PublicSymlink, PrivateSymlink @@ -908,3 +908,202 @@ def test_symlink_no_error(self): self.symlink.run() except: self.fail('Symlink run raised an exception on unicode slug') + + def test_symlink_broadcast_calls_on_project_save(self): + """ + Test calls to ``readthedocs.core.utils.broadcast`` on Project.save(). + + When a Project is saved, we need to check that we are calling + ``broadcast`` utility with the proper task and arguments to re-symlink + them. + """ + with mock.patch('readthedocs.projects.models.broadcast') as broadcast: + project = get(Project) + # skipped on first save + broadcast.assert_not_called() + + broadcast.reset_mock() + project.description = 'New description' + project.save() + # called once for this project itself + broadcast.assert_any_calls( + type='app', + task=symlink_project, + args=[project.pk], + ) + + broadcast.reset_mock() + subproject = get(Project) + # skipped on first save + broadcast.assert_not_called() + + project.add_subproject(subproject) + # subproject.save() is not called + broadcast.assert_not_called() + + subproject.description = 'New subproject description' + subproject.save() + # subproject symlinks + broadcast.assert_any_calls( + type='app', + task=symlink_project, + args=[subproject.pk], + ) + # superproject symlinks + broadcast.assert_any_calls( + type='app', + task=symlink_project, + args=[project.pk], + ) + + +@override_settings() +class TestPublicPrivateSymlink(TempSiterootCase, TestCase): + + def setUp(self): + super(TestPublicPrivateSymlink, self).setUp() + from django.contrib.auth.models import User + + self.user = get(User) + self.project = get( + Project, name='project', slug='project', privacy_level='public', + users=[self.user], main_language_project=None) + self.project.versions.update(privacy_level='public') + self.project.save() + + self.subproject = get( + Project, name='subproject', slug='subproject', privacy_level='public', + users=[self.user], main_language_project=None) + self.subproject.versions.update(privacy_level='public') + self.subproject.save() + + def test_change_subproject_privacy(self): + """ + Change subproject's ``privacy_level`` creates proper symlinks. + + When the ``privacy_level`` changes in the subprojects, we need to + re-symlink the superproject also to keep in sync its symlink under the + private/public roots. + """ + filesystem_before = { + 'private_cname_project': {}, + 'private_cname_root': {}, + 'private_web_root': { + 'project': { + 'en': {}, + }, + 'subproject': { + 'en': {}, + }, + }, + 'public_cname_project': {}, + 'public_cname_root': {}, + 'public_web_root': { + 'project': { + 'en': { + 'latest': { + 'type': 'link', + 'target': 'user_builds/project/rtd-builds/latest', + }, + }, + 'projects': { + 'subproject': { + 'type': 'link', + 'target': 'public_web_root/subproject', + }, + }, + }, + 'subproject': { + 'en': { + 'latest': { + 'type': 'link', + 'target': 'user_builds/subproject/rtd-builds/latest', + }, + }, + }, + }, + } + + filesystem_after = { + 'private_cname_project': {}, + 'private_cname_root': {}, + 'private_web_root': { + 'project': { + 'en': {}, + 'projects': { + 'subproject': { + 'type': 'link', + 'target': 'private_web_root/subproject', + }, + }, + }, + 'subproject': { + 'en': { + 'latest': { + 'type': 'link', + 'target': 'user_builds/subproject/rtd-builds/latest', + }, + }, + }, + }, + 'public_cname_project': {}, + 'public_cname_root': {}, + 'public_web_root': { + 'project': { + 'en': { + 'latest': { + 'type': 'link', + 'target': 'user_builds/project/rtd-builds/latest', + }, + }, + 'projects': {}, + }, + 'subproject': { + 'en': {}, + }, + }, + } + + self.assertEqual(self.project.subprojects.all().count(), 0) + self.assertEqual(self.subproject.superprojects.all().count(), 0) + self.project.add_subproject(self.subproject) + self.assertEqual(self.project.subprojects.all().count(), 1) + self.assertEqual(self.subproject.superprojects.all().count(), 1) + + self.assertTrue(self.project.versions.first().active) + self.assertTrue(self.subproject.versions.first().active) + symlink_project(self.project.pk) + + self.assertFilesystem(filesystem_before) + + self.client.force_login(self.user) + self.client.post( + reverse('project_version_detail', + kwargs={ + 'project_slug': self.subproject.slug, + 'version_slug': self.subproject.versions.first().slug, + }), + data={'privacy_level': 'private', 'active': True}, + ) + + self.assertEqual(self.subproject.versions.first().privacy_level, 'private') + self.assertTrue(self.subproject.versions.first().active) + + self.client.post( + reverse('projects_advanced', + kwargs={ + 'project_slug': self.subproject.slug, + }), + data={ + # Required defaults + 'python_interpreter': 'python', + 'default_version': 'latest', + + 'privacy_level': 'private', + }, + ) + + self.assertTrue(self.subproject.versions.first().active) + self.subproject.refresh_from_db() + self.assertEqual(self.subproject.privacy_level, 'private') + self.assertFilesystem(filesystem_after) diff --git a/requirements/testing.txt b/requirements/testing.txt --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -9,3 +9,4 @@ Mercurial==4.4.2 # local debugging tools pdbpp +datadiff
Subproject is not updated on save/build When changing a subproject of a superproject, for instance updating the privacy level of the project, symlinks for the superproject are not updated. This might be a regression. ## To reproduce * Create a project A, privacy level public * Create a project B, privacy level private * Assign project B as a subproject of project A * Project B URLs should 404 * Alter project B privacy level to public * Project B URLs still 404 ## Expected Result * Project B URLs shouldn't 404 after updating to public privacy level ## Resolving We should either trigger a save of the project's superproject, or explicitly call a task for re-symlinking the project in the following cases: * When a subproject is deleted * When a subproject privacy level is changed * When a subproject version privacy level is changed (?? not sure here, might need testing on this as well)
I was able to replicate this, none symlink on `A/projects` are created. When project A is re-builded the symlink is created. Also when removing the subproject the symlink is not erased. Again, rebuilding the project A fix this. Great, I'm glad this is reproduceable. So, we should either trigger a save of the superproject in these cases, or explicitly call a task to resymlink the superproject. The second option definitely works, but perhaps a full save of the project makes more sense technically. I'll update the description with more info I followed the steps to reproduce it mentioned here and I was able to reproduce it. Then, the solution proposed (call a `A.save()`) does work and I can access to http://localhost:8000/docs/a/projects/B/en/latest/ ``` In [10]: a = Project.objects.get(slug='a') In [11]: a.save() [21/Feb/2018 14:08:34] readthedocs.core.symlink:93[2712]: INFO (Build) [a:] Symlinking subproject: b -> b [21/Feb/2018 14:08:34] readthedocs.core.symlink:93[2712]: INFO (Build) [a:] Symlinking subproject: B -> b [21/Feb/2018 14:08:34] readthedocs.core.symlink:93[2712]: INFO (Build) [a:] Symlinking Version: Version latest of A (1173) [21/Feb/2018 14:08:34] celery.app.trace:123[2712]: INFO Task readthedocs.projects.tasks.symlink_project[fff11308-f0c1-4639-aeab-dea3541f2ad0] succeeded in 0.033074999002565164s: None [21/Feb/2018 14:08:34] readthedocs.projects.tasks:980[2712]: INFO (Build) [a:] Updating static metadata [21/Feb/2018 14:08:34] celery.app.trace:123[2712]: INFO Task readthedocs.projects.tasks.update_static_metadata[7f9b4e54-0399-43da-ab66-11b271848c15] succeeded in 0.010115133998624515s: None ``` I'm trying to write a test case to make this fail first, but I don't know how to manage symlinks from the tests and check the documentation URLs yet.
2018-02-22T01:56:07
readthedocs/readthedocs.org
3,657
readthedocs__readthedocs.org-3657
[ "3655" ]
5e46cbdc962db40d31ff964d4e6125119811a845
diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -151,6 +151,15 @@ def sync_repo(self): except Exception: log.exception('Unknown Sync Versions Exception') + # TODO this is duplicated in the classes below, and this should be + # refactored out anyways, as calling from the method removes the original + # caller from logging. + def _log(self, msg): + log.info(LOG_TEMPLATE + .format(project=self.project.slug, + version=self.version.slug, + msg=msg)) + class SyncRepositoryTask(SyncRepositoryMixin, Task):
AttributeError: 'SyncRepositoryTask' object has no attribute '_log' https://sentry.io/read-the-docs/readthedocs-org/issues/471122664/ ``` AttributeError: 'SyncRepositoryTask' object has no attribute '_log' File "readthedocs/projects/tasks.py", line 175, in run self.sync_repo() File "readthedocs/projects/tasks.py", line 111, in sync_repo self._log( An unhandled exception was raised during VCS syncing ```
2018-02-22T23:54:10
readthedocs/readthedocs.org
3,661
readthedocs__readthedocs.org-3661
[ "3627" ]
0a4a287767af0d20fbc0ecea8079ad9efd7adb1e
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -55,6 +55,10 @@ def repo_exists(self): code, _, _ = self.run('git', 'status', record=False) return code == 0 + def submodules_exists(self): + code, out, _ = self.run('git', 'submodule', 'status', record=False) + return code == 0 and bool(out) + def fetch(self): code, _, _ = self.run('git', 'fetch', '--tags', '--prune') if code != 0: @@ -187,9 +191,10 @@ def checkout(self, identifier=None): self.run('git', 'clean', '-d', '-f', '-f') # Update submodules - self.run('git', 'submodule', 'sync') - self.run('git', 'submodule', 'update', - '--init', '--recursive', '--force') + if self.submodules_exists(): + self.run('git', 'submodule', 'sync') + self.run('git', 'submodule', 'update', + '--init', '--recursive', '--force') return code, out, err
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -78,6 +78,16 @@ def test_parse_git_tags(self): self.project.vcs_repo().parse_tags(data)] self.assertEqual(expected_tags, given_ids) + def test_check_for_submodules(self): + repo = self.project.vcs_repo() + + repo.checkout() + self.assertFalse(repo.submodules_exists()) + + # The submodule branch contains one submodule + repo.checkout('submodule') + self.assertTrue(repo.submodules_exists()) + class TestHgBackend(RTDTestCase): def setUp(self): diff --git a/readthedocs/rtd_tests/utils.py b/readthedocs/rtd_tests/utils.py --- a/readthedocs/rtd_tests/utils.py +++ b/readthedocs/rtd_tests/utils.py @@ -34,6 +34,13 @@ def make_test_git(): log.info(check_output(['git', 'init'] + [directory], env=env)) log.info(check_output(['git', 'add', '.'], env=env)) log.info(check_output(['git', 'commit', '-m"init"'], env=env)) + # Add repo itself as submodule + log.info(check_output(['git', 'checkout', '-b', 'submodule'], env=env)) + log.info(check_output(['git', 'submodule', 'add', '-b', 'master', './', 'submodule'], env=env)) + log.info(check_output(['git', 'add', '.'], env=env)) + log.info(check_output(['git', 'commit', '-m"Add submodule"'], env=env)) + # Checkout to master branch again + log.info(check_output(['git', 'checkout', 'master'], env=env)) chdir(path) return directory
Conditionally do git submodule operations We shouldn't be doing submodule operations on repositories that don't have submodules. We can check for this by determining if a repository has a `.gitmodules` file at the top level, and if not, we can skip these commands. Raised in #3520 as we're storing all these as extraneous commands now.
2018-02-23T04:44:04
readthedocs/readthedocs.org
3,666
readthedocs__readthedocs.org-3666
[ "3664" ]
e25acfa5d13bfaf8ed9338ae1f82d72d55ce62e9
diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -71,7 +71,7 @@ def INSTALLED_APPS(self): # noqa 'django.contrib.humanize', # third party apps - 'linaro_django_pagination', + 'dj_pagination', 'taggit', 'guardian', 'django_gravatar', @@ -131,7 +131,7 @@ def USE_PROMOS(self): # noqa 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', - 'linaro_django_pagination.middleware.PaginationMiddleware', + 'dj_pagination.middleware.PaginationMiddleware', 'readthedocs.core.middleware.SubdomainMiddleware', 'readthedocs.core.middleware.SingleVersionMiddleware', 'corsheaders.middleware.CorsMiddleware',
Upgrade django-pagination We are using a fork of the unmaintained `django-pagination` (https://github.com/zyga/django-pagination). On the other hand, I found that there is another fork that maybe is more maintained at https://github.com/pydanny/dj-pagination I opened an issue that the 3dparty repository at https://github.com/zyga/django-pagination/issues/42 Ref #2435
2018-02-23T17:39:03
readthedocs/readthedocs.org
3,683
readthedocs__readthedocs.org-3683
[ "3539" ]
af867c1d85a157b7106734fe45a0fabde9385083
diff --git a/readthedocs/builds/views.py b/readthedocs/builds/views.py --- a/readthedocs/builds/views.py +++ b/readthedocs/builds/views.py @@ -3,6 +3,7 @@ """Views for builds app.""" import logging +import textwrap from django.contrib import messages from django.contrib.auth.decorators import login_required @@ -15,7 +16,10 @@ from django.urls import reverse from django.utils.decorators import method_decorator from django.views.generic import DetailView, ListView +from requests.utils import quote +from urllib.parse import urlparse +from readthedocs.doc_builder.exceptions import BuildEnvironmentError from readthedocs.builds.models import Build, Version from readthedocs.core.permissions import AdminPermission from readthedocs.core.utils import trigger_build @@ -104,6 +108,49 @@ class BuildDetail(BuildBase, DetailView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['project'] = self.project + + build = self.get_object() + if build.error != BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format(build_id=build.pk): + # Do not suggest to open an issue if the error is not generic + return context + + scheme = ( + 'https://github.com/rtfd/readthedocs.org/issues/new' + '?title={title}{build_id}' + '&body={body}' + ) + + # TODO: we could use ``.github/ISSUE_TEMPLATE.md`` here, but we would + # need to add some variables to it which could impact in the UX when + # filling an issue from the web + body = """ + ## Details: + + * Project URL: https://readthedocs.org/projects/{project_slug}/ + * Build URL(if applicable): https://readthedocs.org{build_path} + * Read the Docs username(if applicable): {username} + + ## Expected Result + + *A description of what you wanted to happen* + + ## Actual Result + + *A description of what actually happened*""".format( + project_slug=self.project, + build_path=self.request.path, + username=self.request.user, + ) + + scheme_dict = { + 'title': quote('Build error with build id #'), + 'build_id': context['build'].id, + 'body': quote(textwrap.dedent(body)), + } + + issue_url = scheme.format(**scheme_dict) + issue_url = urlparse(issue_url).geturl() + context['issue_url'] = issue_url return context
Link to open an issue from a failed Build with pre-filled details There are several issues that are reported where there are missing information in the details section. To solve this I was thinking on generating a link inside this message that could contain all the information pre-filled: ![captura de pantalla_2018-01-22_15-07-15](https://user-images.githubusercontent.com/244656/35242041-6ed4e288-ff86-11e7-9b89-c69a45505318.png) I found that github supports _some_ query arguments in the URL but I didn't found a way to use those key/args inside the template itself. https://help.github.com/articles/about-automation-for-issues-and-pull-requests-with-query-parameters/ I think it would be awesome if the user can just click a link and the username, project url, etc could be pre-filled automatically.
Could you not have the link generated with that information in the body tag? Yeah, _maybe_. The template is 227 chars long, if we add the project and build link plus username I'm not sure if we are not creating a too long URL that it's not standard. I know there is a limit and I think it depends on the browser also... https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers We could do that since the limit is around 2000 chars. Neat idea. We need to make sure wouldn't show on the .com though, as we don't want github issues for those issues. What about putting this information on the header of the raw build log (see #3585)? So the user can just referrer that link or copy/paste the raw log on the issue. Hi @humitos, May I work on this! @bansalnitish It doesn't look like @humitos has opened a PR for this; he's only specified a need for it. I'd say go ahead! :) Thanks @RichardLitt! Hi @humitos, Can you please help me in reproducing this issue! @bansalnitish hey! What you need? The idea is to add a link in the build page (showed when it fails) that opens an issue in this Github project. (there is a link to the GH docs to generate that link in the description) I got it @humitos, Thanks! I will be soon submitting a PR for the same!
2018-02-26T18:14:30
readthedocs/readthedocs.org
3,693
readthedocs__readthedocs.org-3693
[ "3644" ]
09f8282be35c20dcca42c85d3f845bba8b784cfe
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -122,8 +122,8 @@ def parse_tags(self, data): @property def branches(self): # Only show remote branches - retcode, stdout, _ = self.run('git', 'branch', '-r') - # error (or no tags found) + retcode, stdout, _ = self.run('git', 'branch', '-r', record_as_success=True) + # error (or no branches found) if retcode != 0: return [] return self.parse_branches(stdout)
After build is triggered, state is shown as failed To reproduce, fire off a new build, make sure to catch the build list page while VCS operations are happening. Build will be in a failure state. This is a regression where we are setting the state of the build to failed without checking that the build has completed. This might be a byproduct of using multiple environments during the build process.
@davidfischer noted he also saw this in production, which means it might not even be related to the VCS changes Mmm... I'm not able to reproduce this in my local instance with `master` branch. I see the states: `Triggered`, `Clonning` and `Installing` without problem. Same behaviour in .org production @humitos I also experiment this behavior on my local instance: - Build any project - Enter to the build (failure is shown) - Wait ~5 seconds, reload the page - Build is continued and successful ![screenshot-2018-2-21 bookpy read the docs](https://user-images.githubusercontent.com/4975310/36497585-bcb8b084-1709-11e8-8b30-da6743bc8bf1.png) ![screenshot-2018-2-21 bookpy read the docs 1](https://user-images.githubusercontent.com/4975310/36497591-bff0aa2c-1709-11e8-9ffe-0896f0ec44a1.png) ![screenshot-2018-2-21 bookpy read the docs 2](https://user-images.githubusercontent.com/4975310/36497595-c3492faa-1709-11e8-87d0-f8310f40b488.png) I haven't see this on production, yet. @stsewd i get the same results as you once i click on the build, but the build list page is where i see a failure. ![image](https://user-images.githubusercontent.com/1140183/36554941-defc2234-17bd-11e8-81ac-65e2c4a99eba.png) Another clue: I can reproduce on first build of the project, not afterwards though. @humitos I think the problem is with a project without tags. If I ran this command on a project without tags, the exit code is `1` (here the build is marked as failure). ![error](https://user-images.githubusercontent.com/4975310/36575270-1cf18bba-1818-11e8-9223-d0071a383827.png) Then on another step the exit code is forced to be 0 (here is when the build goes back to the normality) Another clue: if I build the same project, the second build doesn't initially show the build as failing in the build list, but if I quickly select the build, it's in a failed state by the end of the VCS commands: ![image](https://user-images.githubusercontent.com/1140183/36608417-01c1d074-1887-11e8-965d-b5b2006aafd8.png) This also causes the page to stop loading the build commands from the API, as we halt this process with JS when a build has failed. Reloading the page, I see I failed on the next command.
2018-02-27T15:29:05
readthedocs/readthedocs.org
3,790
readthedocs__readthedocs.org-3790
[ "3778" ]
adb5392c47a1b3fe506524d898dd147b038d4140
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -229,6 +229,36 @@ class Meta(object): 'tags', ) + def clean_language(self): + language = self.cleaned_data['language'] + project = self.instance + if project: + msg = _( + 'There is already a "{lang}" translation ' + 'for the {proj} project.' + ) + if project.translations.filter(language=language).exists(): + raise forms.ValidationError( + msg.format(lang=language, proj=project.slug) + ) + main_project = project.main_language_project + if main_project: + if main_project.language == language: + raise forms.ValidationError( + msg.format(lang=language, proj=main_project.slug) + ) + siblings = ( + main_project.translations + .filter(language=language) + .exclude(pk=project.pk) + .exists() + ) + if siblings: + raise forms.ValidationError( + msg.format(lang=language, proj=main_project.slug) + ) + return language + class ProjectRelationshipForm(forms.ModelForm):
diff --git a/readthedocs/rtd_tests/tests/test_project.py b/readthedocs/rtd_tests/tests/test_project.py --- a/readthedocs/rtd_tests/tests/test_project.py +++ b/readthedocs/rtd_tests/tests/test_project.py @@ -6,6 +6,7 @@ import json from django.contrib.auth.models import User +from django.forms.models import model_to_dict from django.test import TestCase from django_dynamic_fixture import get from mock import patch @@ -258,6 +259,71 @@ def test_user_cant_delete_other_user_translations(self): self.assertEqual(resp.status_code, 404) self.assertIn(project_b, project_a.translations.all()) + def test_user_cant_change_lang_to_translation_lang(self): + user_a = User.objects.get(username='eric') + project_a = Project.objects.get(slug='read-the-docs') + project_b = get( + Project, users=[user_a], + language='es', main_language_project=None + ) + + project_a.translations.add(project_b) + project_a.save() + + # User tries to change the language + # to the same of the translation + self.client.login(username=user_a.username, password='test') + self.assertIn(project_b, project_a.translations.all()) + self.assertEqual(project_a.language, 'en') + self.assertEqual(project_b.language, 'es') + data = model_to_dict(project_a) + data['language'] = 'es' + resp = self.client.post( + reverse( + 'projects_edit', + args=[project_a.slug] + ), + data=data, + follow=True + ) + self.assertEqual(resp.status_code, 200) + self.assertContains(resp, 'error') + self.assertContains( + resp, + 'There is already a &quot;es&quot; translation ' + 'for the read-the-docs project' + ) + + def test_user_can_change_project_whith_same_lang(self): + user_a = User.objects.get(username='eric') + project_a = Project.objects.get(slug='read-the-docs') + project_b = get( + Project, users=[user_a], + language='es', main_language_project=None + ) + + project_a.translations.add(project_b) + project_a.save() + + # User save the project with no modifications + self.client.login(username=user_a.username, password='test') + self.assertIn(project_b, project_a.translations.all()) + self.assertEqual(project_a.language, 'en') + self.assertEqual(project_b.language, 'es') + data = model_to_dict(project_a) + # Same languge + data['language'] = 'en' + resp = self.client.post( + reverse( + 'projects_edit', + args=[project_a.slug] + ), + data=data, + follow=True + ) + self.assertEqual(resp.status_code, 200) + self.assertNotContains(resp, 'error') + def test_token(self): r = self.client.get('/api/v2/project/6/token/', {}) resp = json.loads(r.content) diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -12,7 +12,7 @@ from readthedocs.projects.exceptions import ProjectSpamError from readthedocs.projects.forms import ( - ProjectBasicsForm, ProjectExtraForm, TranslationForm) + ProjectBasicsForm, ProjectExtraForm, TranslationForm, UpdateProjectForm) from readthedocs.projects.models import Project @@ -260,3 +260,82 @@ def test_not_already_translation(self): 'is already a translation', ''.join(form.errors['project']) ) + + def test_cant_change_language_to_translation_lang(self): + self.project_a_es.translations.add(self.project_b_en) + self.project_a_es.translations.add(self.project_c_br) + self.project_a_es.save() + + # Parent project tries to change lang + form = UpdateProjectForm( + { + 'documentation_type': 'sphinx', + 'language': 'en', + }, + instance=self.project_a_es + ) + self.assertFalse(form.is_valid()) + self.assertIn( + 'There is already a "en" translation', + ''.join(form.errors['language']) + ) + + # Translation tries to change lang + form = UpdateProjectForm( + { + 'documentation_type': 'sphinx', + 'language': 'es', + }, + instance=self.project_b_en + ) + self.assertFalse(form.is_valid()) + self.assertIn( + 'There is already a "es" translation', + ''.join(form.errors['language']) + ) + + # Translation tries to change lang + # to the same as its sibling + form = UpdateProjectForm( + { + 'documentation_type': 'sphinx', + 'language': 'br', + }, + instance=self.project_b_en + ) + self.assertFalse(form.is_valid()) + self.assertIn( + 'There is already a "br" translation', + ''.join(form.errors['language']) + ) + + def test_can_change_language_to_self_lang(self): + self.project_a_es.translations.add(self.project_b_en) + self.project_a_es.translations.add(self.project_c_br) + self.project_a_es.save() + + # Parent project tries to change lang + form = UpdateProjectForm( + { + 'repo': 'https://github.com/test/test', + 'repo_type': self.project_a_es.repo_type, + 'name': self.project_a_es.name, + 'documentation_type': 'sphinx', + 'language': 'es', + }, + instance=self.project_a_es + ) + self.assertTrue(form.is_valid()) + + # Translation tries to change lang + form = UpdateProjectForm( + { + 'repo': 'https://github.com/test/test', + 'repo_type': self.project_b_en.repo_type, + 'name': self.project_b_en.name, + 'documentation_type': 'sphinx', + 'language': 'en', + }, + instance=self.project_b_en + ) + self.assertTrue(form.is_valid())
Additional validation when changing the project language Validate for translations when changing the project language, so we don't ended with inconsistencies and both symlinks can't point to the same project.
2018-03-13T21:27:43
readthedocs/readthedocs.org
3,793
readthedocs__readthedocs.org-3793
[ "3744" ]
3306d3aa44156c89d87721aecf10c1d13a9e5df3
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -623,6 +623,8 @@ def __init__(self, *args, **kwargs): ) if self.config and self.config.build_image: self.container_image = self.config.build_image + if self.project.container_image: + self.container_image = self.project.container_image if self.project.container_mem_limit: self.container_mem_limit = self.project.container_mem_limit if self.project.container_time_limit: @@ -782,6 +784,13 @@ def get_container_host_config(self): }) return create_host_config(binds=binds) + @property + def image_hash(self): + """Return the hash of the Docker image.""" + client = self.get_client() + image_metadata = client.inspect_image(self.container_image) + return image_metadata.get('Id') + @property def container_id(self): """Return id of container if it is valid.""" @@ -824,13 +833,13 @@ def update_build_from_container_state(self): def create_container(self): """Create docker container.""" client = self.get_client() - image = self.container_image - if self.project.container_image: - image = self.project.container_image try: - log.info('Creating Docker container: image=%s', image) + log.info( + 'Creating Docker container: image=%s', + self.container_image, + ) self.container = client.create_container( - image=image, + image=self.container_image, command=('/bin/sh -c "sleep {time}; exit {exit}"' .format(time=self.container_time_limit, exit=DOCKER_TIMEOUT_EXIT_CODE)), diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -131,13 +131,12 @@ def is_obsolete(self): environment_conf = json.load(fpath) env_python_version = environment_conf['python']['version'] env_build_image = environment_conf['build']['image'] + env_build_hash = environment_conf['build']['hash'] except (IOError, TypeError, KeyError, ValueError): log.error('Unable to read/parse readthedocs-environment.json file') return False - # TODO: remove getattr when https://github.com/rtfd/readthedocs.org/pull/3339 got merged - build_image = getattr(self.config, 'build_image', self.version.project.container_image) or DOCKER_IMAGE # noqa - + build_image = self.config.build_image or DOCKER_IMAGE # If the user define the Python version just as a major version # (e.g. ``2`` or ``3``) we won't know exactly which exact version was # used to create the venv but we can still compare it against the new @@ -145,19 +144,19 @@ def is_obsolete(self): return any([ env_python_version != self.config.python_full_version, env_build_image != build_image, + env_build_hash != self.build_env.image_hash, ]) def save_environment_json(self): """Save on disk Python and build image versions used to create the venv.""" - # TODO: remove getattr when https://github.com/rtfd/readthedocs.org/pull/3339 got merged - build_image = getattr(self.config, 'build_image', self.version.project.container_image) or DOCKER_IMAGE # noqa - + build_image = self.config.build_image or DOCKER_IMAGE data = { 'python': { 'version': self.config.python_full_version, }, 'build': { 'image': build_image, + 'hash': self.build_env.image_hash, }, }
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -9,7 +9,9 @@ absolute_import, division, print_function, unicode_literals) import os.path +import json import re +import tempfile import uuid from builtins import str @@ -837,14 +839,54 @@ def test_command_oom_kill(self): u'Command killed due to excessive memory consumption\n') - - -class TestAutoWipeEnvironment(TestCase): +class AutoWipeEnvironmentBase(object): fixtures = ['test_data'] + build_env_class = None def setUp(self): self.pip = Project.objects.get(slug='pip') self.version = self.pip.versions.get(slug='0.8') + self.build_env = self.build_env_class( + project=self.pip, + version=self.version, + build={'id': DUMMY_BUILD_ID}, + ) + + def test_save_environment_json(self): + config_data = { + 'build': { + 'image': '2.0', + }, + 'python': { + 'version': 2.7, + }, + } + yaml_config = create_load(config_data)()[0] + config = ConfigWrapper(version=self.version, yaml_config=yaml_config) + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=config, + ) + + with patch( + 'readthedocs.doc_builder.python_environments.PythonEnvironment.environment_json_path', + return_value=tempfile.mktemp(suffix='envjson'), + ): + python_env.save_environment_json() + json_data = json.load(open(python_env.environment_json_path())) + + expected_data = { + 'build': { + 'image': 'readthedocs/build:2.0', + 'hash': 'a1b2c3', + }, + 'python': { + 'version': 2.7, + }, + } + self.assertDictEqual(json_data, expected_data) def test_is_obsolete_without_env_json_file(self): yaml_config = create_load()()[0] @@ -854,7 +896,7 @@ def test_is_obsolete_without_env_json_file(self): exists.return_value = False python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) @@ -868,7 +910,7 @@ def test_is_obsolete_with_invalid_env_json_file(self): exists.return_value = True python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) @@ -888,15 +930,14 @@ def test_is_obsolete_with_json_different_python_version(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 3.5}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 3.5}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) - @pytest.mark.xfail(reason='build.image is not being considered yet') def test_is_obsolete_with_json_different_build_image(self): config_data = { 'build': { @@ -911,10 +952,10 @@ def test_is_obsolete_with_json_different_build_image(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 2.7}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 2.7}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) @@ -937,10 +978,10 @@ def test_is_obsolete_with_project_different_build_image(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 2.7}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 2.7}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) @@ -959,10 +1000,55 @@ def test_is_obsolete_with_json_same_data_as_version(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 3.5}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 3.5}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertFalse(python_env.is_obsolete) + + def test_is_obsolete_with_json_different_build_hash(self): + config_data = { + 'build': { + 'image': '2.0', + }, + 'python': { + 'version': 2.7, + }, + } + yaml_config = create_load(config_data)()[0] + config = ConfigWrapper(version=self.version, yaml_config=yaml_config) + + # Set container_image manually + self.pip.container_image = 'readthedocs/build:2.0' + self.pip.save() + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=config, + ) + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "foo"}, "python": {"version": 2.7}}' # noqa + with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa + exists.return_value = True + self.assertTrue(python_env.is_obsolete) + + +@patch( + 'readthedocs.doc_builder.environments.DockerBuildEnvironment.image_hash', + PropertyMock(return_value='a1b2c3'), +) +class AutoWipeDockerBuildEnvironmentTest(AutoWipeEnvironmentBase, TestCase): + build_env_class = DockerBuildEnvironment + + [email protected]( + reason='PythonEnvironment needs to be refactored to do not rely on DockerBuildEnvironment', +) +@patch( + 'readthedocs.doc_builder.environments.DockerBuildEnvironment.image_hash', + PropertyMock(return_value='a1b2c3'), +) +class AutoWipeLocalBuildEnvironmentTest(AutoWipeEnvironmentBase, TestCase): + build_env_class = LocalBuildEnvironment
Track the Docker image hash in readthedocs-environment.json We are doing some basic checks on python version change around our `readthedocs-environment.json` metadata file. We should also drop the build image SHA in there, so that if it ever changes, we know to wipe the build env. For instance, we're saving here: https://github.com/rtfd/readthedocs.org/blob/7be1b805d59c0683855f05a252850bba09287a20/readthedocs/doc_builder/python_environments.py#L150-L162 On save, we should: * Check to see if docker builds are enabled before writing information about the docker build * Get the build image name that the build would use * Query the docker API for the build image SHA that matches the build image name * Write that to disk On build, we should check this file against the current image SHA and if they differ, the image has changed and we should rebuild without the cache.
I agree with this idea. I think we just need to add the `build.hash` option to the JSON file and check it again in the `is_obsolete` method. `docker inspect` does the magic to get the `Id`: ``` $ docker inspect readthedocs/build:2.0 | jq ".[0].Id" "sha256:9468fcb95b87a9a5cd58c2d37da35983df8c3334a80181fed12b5a04892fffb9" ``` I can take this one if you want and do it. +1 on keeping any specific data that we can on disk, so we can do sanity checking against it.
2018-03-14T01:42:29
readthedocs/readthedocs.org
3,831
readthedocs__readthedocs.org-3831
[ "3644" ]
ed5a1bdea2eda89ef48f5bf5dd895bfbb32aec26
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -183,21 +183,19 @@ def get_command(self): def save(self): """Save this command and result via the API.""" - exit_code = self.exit_code - # Force record this command as success to avoid Build reporting errors # on commands that are just for checking purposes and do not interferes # in the Build if self.record_as_success: log.warning('Recording command exit_code as success') - exit_code = 0 + self.exit_code = 0 data = { 'build': self.build_env.build.get('id'), 'command': self.get_command(), 'description': self.description, 'output': self.output, - 'exit_code': exit_code, + 'exit_code': self.exit_code, 'start_time': self.start_time, 'end_time': self.end_time, } @@ -345,7 +343,6 @@ def run_command_class( # ``build_env`` is passed as ``kwargs`` when it's called from a # ``*BuildEnvironment`` build_cmd = cls(cmd, **kwargs) - self.commands.append(build_cmd) build_cmd.run() if record: @@ -354,6 +351,12 @@ def run_command_class( # only ones that can be saved/recorded) self.record_command(build_cmd) + # We want append this command to the list of commands only if it has + # to be recorded in the database (to keep consistency) and also, it + # has to be added after ``self.record_command`` since its + # ``exit_code`` can be altered because of ``record_as_success`` + self.commands.append(build_cmd) + if build_cmd.failed: msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command())
After build is triggered, state is shown as failed To reproduce, fire off a new build, make sure to catch the build list page while VCS operations are happening. Build will be in a failure state. This is a regression where we are setting the state of the build to failed without checking that the build has completed. This might be a byproduct of using multiple environments during the build process.
@davidfischer noted he also saw this in production, which means it might not even be related to the VCS changes Mmm... I'm not able to reproduce this in my local instance with `master` branch. I see the states: `Triggered`, `Clonning` and `Installing` without problem. Same behaviour in .org production @humitos I also experiment this behavior on my local instance: - Build any project - Enter to the build (failure is shown) - Wait ~5 seconds, reload the page - Build is continued and successful ![screenshot-2018-2-21 bookpy read the docs](https://user-images.githubusercontent.com/4975310/36497585-bcb8b084-1709-11e8-8b30-da6743bc8bf1.png) ![screenshot-2018-2-21 bookpy read the docs 1](https://user-images.githubusercontent.com/4975310/36497591-bff0aa2c-1709-11e8-9ffe-0896f0ec44a1.png) ![screenshot-2018-2-21 bookpy read the docs 2](https://user-images.githubusercontent.com/4975310/36497595-c3492faa-1709-11e8-87d0-f8310f40b488.png) I haven't see this on production, yet. @stsewd i get the same results as you once i click on the build, but the build list page is where i see a failure. ![image](https://user-images.githubusercontent.com/1140183/36554941-defc2234-17bd-11e8-81ac-65e2c4a99eba.png) Another clue: I can reproduce on first build of the project, not afterwards though. @humitos I think the problem is with a project without tags. If I ran this command on a project without tags, the exit code is `1` (here the build is marked as failure). ![error](https://user-images.githubusercontent.com/4975310/36575270-1cf18bba-1818-11e8-9223-d0071a383827.png) Then on another step the exit code is forced to be 0 (here is when the build goes back to the normality) Another clue: if I build the same project, the second build doesn't initially show the build as failing in the build list, but if I quickly select the build, it's in a failed state by the end of the VCS commands: ![image](https://user-images.githubusercontent.com/1140183/36608417-01c1d074-1887-11e8-965d-b5b2006aafd8.png) This also causes the page to stop loading the build commands from the API, as we halt this process with JS when a build has failed. Reloading the page, I see I failed on the next command. I checked this again and I found something new: 1. I triggered a `stable` build 1. I kill my celery instance because it gets stucked 1. The build is in `Triggered` 1. I run celery again 1. I re-trigger the `stable` build 1. I get this error Also, I have this crazy log in my `debug.log` ``` [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING --- Logging error --- [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING Traceback (most recent call last): [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/doc_builder/backends/sphinx.py", line 148, in append_conf self.version.get_conf_py_path() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/builds/models.py", line 228, in get_conf_py_path conf_py_path = self.project.conf_dir(self.slug) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/models.py", line 557, in conf_dir conf_file = self.conf_file(version) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/models.py", line 553, in conf_file ProjectConfigurationError.NOT_FOUND [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING readthedocs.projects.exceptions.ProjectConfigurationError: A configuration file was not found. Make sure you have a conf.py file in your repository. [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING During handling of the above exception, another exception occurred: [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING Traceback (most recent call last): [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 399, in run_build outcomes = self.build_docs() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 583, in build_docs outcomes['html'] = self.build_docs_html() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 600, in build_docs_html html_builder.append_conf() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/doc_builder/backends/sphinx.py", line 151, in append_conf self._write_config(master_doc=master_doc) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/doc_builder/backends/sphinx.py", line 63, in _write_config safe_write(conf_file, conf_template) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/utils.py", line 125, in safe_write fh.write(contents.encode('utf-8', 'ignore')) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING TypeError: write() argument must be str, not SafeBytes [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING During handling of the above exception, another exception occurred: [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING Traceback (most recent call last): [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/lib/python3.6/logging/__init__.py", line 995, in emit stream.write(self.terminator) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING OSError: [Errno 5] Input/output error [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING Call stack: [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/rtd3/bin/celery", line 11, in <module> sys.exit(main()) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/__main__.py", line 14, in main _main() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/celery.py", line 326, in main cmd.execute_from_commandline(argv) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/celery.py", line 488, in execute_from_commandline super(CeleryCommand, self).execute_from_commandline(argv))) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/base.py", line 281, in execute_from_commandline return self.handle_argv(self.prog_name, argv[1:]) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/celery.py", line 480, in handle_argv return self.execute(command, argv) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/celery.py", line 412, in execute ).run_from_argv(self.prog_name, argv[1:], command=argv[0]) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/worker.py", line 221, in run_from_argv return self(*args, **options) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/base.py", line 244, in __call__ ret = self.run(*args, **kwargs) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bin/worker.py", line 256, in run worker.start() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/worker/worker.py", line 203, in start self.blueprint.start(self) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start step.start(parent) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/bootsteps.py", line 370, in start return self.obj.start() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/concurrency/base.py", line 131, in start self.on_start() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/concurrency/prefork.py", line 112, in on_start **self.options) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 422, in __init__ super(AsynPool, self).__init__(processes, *args, **kwargs) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/pool.py", line 1007, in __init__ self._create_worker_process(i) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 439, in _create_worker_process return super(AsynPool, self)._create_worker_process(i) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/pool.py", line 1116, in _create_worker_process w.start() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/process.py", line 124, in start self._popen = self._Popen(self) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/context.py", line 333, in _Popen return Popen(process_obj) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/popen_fork.py", line 24, in __init__ self._launch(process_obj) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/popen_fork.py", line 79, in _launch code = process_obj._bootstrap() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/process.py", line 327, in _bootstrap self.run() [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/process.py", line 114, in run self._target(*self._args, **self._kwargs) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/pool.py", line 289, in __call__ sys.exit(self.workloop(pid=pid)) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/billiard/pool.py", line 358, in workloop result = (True, prepare_result(fun(*args, **kwargs))) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/app/trace.py", line 537, in _fast_trace_task uuid, args, kwargs, request, [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/app/trace.py", line 374, in trace_task R = retval = fun(*args, **kwargs) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/.pyenv/versions/3.6.4/envs/rtd3/lib/python3.6/site-packages/celery/app/trace.py", line 629, in __protected_call__ return self.run(*args, **kwargs) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 297, in run self.run_build(record=record, docker=docker) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/projects/tasks.py", line 414, in run_build log.warning('No build ID, not syncing files') [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING File "/home/humitos/rtfd/code/readthedocs.org/readthedocs/doc_builder/environments.py", line 720, in __exit__ msg='Build finished')) [06/Mar/2018 15:39:29] celery.redirected:235[20794]: WARNING Message: '(Build) [discoin:stable] Build finished' Arguments: () ``` I have more clues... First, the issue happen when RTD doesn't find a `conf.py` and raises `ProjectConfigurationError`. That exception is being handled by our code by creating a `conf.py` based in a template, so we call `render_to_string` that returns a `django.utils.safestring.SafeText` the one that we pass to `safe_write` which encodes it to `utf8` and call `fh.write` with a `django.utils.safestring.SafeBytes` which fails. So, at that time, I think the Build is marked as FAILED but since it's an intermediate step that "it's OK to failed" (`ProjectConfigurationError` is not a error to abort) RTD continue building the docs. I'm not 100% sure yet, but I wanted to write this down before I forget :) I just noticed that even after closing Celery, there were some celery process around and that probably made things goes that crazy. OK, I have two suppositions here: 1. go crazy with celery processes 1. this happen when running django and celery with Py3 and the project doesn't have a proper `index.rst` (or raise `ProjectConfigurationError` for some other reason that is not that terrible to abort the build). I created a PR at #3745 that should fix number 2). I've been trying to replicate the error on that branch and I wasn't able to. I'd like you to take a look and try to test it. I'm not sure if 1) worth to test it, since it's an edge case and we shouldn't be running multiple celery processes. I will give it a try, though.
2018-03-22T16:11:18
readthedocs/readthedocs.org
3,843
readthedocs__readthedocs.org-3843
[ "3842" ]
f4e645dcd5186459b70403f4ead15673f104611c
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -9,8 +9,9 @@ import os import re -from django.core.exceptions import ValidationError import git +from django.core.exceptions import ValidationError +from git.exc import BadName from six import PY2, StringIO from readthedocs.core.validators import validate_repository_url @@ -273,8 +274,13 @@ def find_ref(self, ref): return ref def ref_exists(self, ref): - code, _, _ = self.run('git', 'show-ref', ref, record_as_success=True) - return code == 0 + try: + r = git.Repo(self.working_dir) + if r.commit(ref): + return True + except (BadName, ValueError): + return False + return False @property def env(self):
Stable versions are broken For instance: http://readthedocs.org/projects/docs/builds/6938938/
Related issues https://github.com/rtfd/readthedocs.org/issues/3837 Previous `stable` passing is this one: http://readthedocs.org/projects/docs/builds/6933018/ Note that the only difference is that now the `git checkout` command is adding the `origin/` to the commit. Guessing, I would say that maybe some output has changed between Git 1.9 and Git 2.7.4 that was upgraded with the Ubuntu upgrade (a couple of days ago). Not sure, anyway. @humitos I think it is due the git upgrade, on my local instance I can't reproduce this. But the checkout commands are different ![screenshot-2018-3-24 cice-consortium cice read the docs](https://user-images.githubusercontent.com/4975310/37859813-e53ba4e4-2ee7-11e8-9866-3f33f4e98da3.png) ![screenshot-2018-3-23 test-build read the docs](https://user-images.githubusercontent.com/4975310/37859814-e8799512-2ee7-11e8-9cbb-00733d1250d0.png) Now I was able to reproduce this on my local instance with this project https://github.com/irmen/Pyro4 ![screenshot-2018-3-23 pyoro read the docs](https://user-images.githubusercontent.com/4975310/37860022-8e0702be-2eeb-11e8-886a-408427782a20.png) Found it! The error is here https://github.com/rtfd/readthedocs.org/blob/f3c0c0aa33c6017951b900e540d61ef36ea71d4e/readthedocs/vcs_support/backends/git.py#L250-L252 https://github.com/rtfd/readthedocs.org/blob/f3c0c0aa33c6017951b900e540d61ef36ea71d4e/readthedocs/vcs_support/backends/git.py#L256-L258 This is because before `code` was the original `exit_code` of the command, but after #3831(I think) the original `exit_code` is losed. This is something I was wondering while writing tests on https://github.com/rtfd/readthedocs.org/pull/3764#issuecomment-375827453 So, probably other places that rely on the original `exit_code` are going to be in trouble... @stsewd great find! i think you're right
2018-03-24T06:10:20
readthedocs/readthedocs.org
3,860
readthedocs__readthedocs.org-3860
[ "3857" ]
5a3938580dd156138c9b8cace6148d3c4e970a93
diff --git a/readthedocs/core/validators.py b/readthedocs/core/validators.py --- a/readthedocs/core/validators.py +++ b/readthedocs/core/validators.py @@ -52,6 +52,11 @@ def __call__(self, value): @deconstructible class RepositoryURLValidator(object): + disallow_relative_url = True + + # Pattern for ``[email protected]:user/repo`` pattern + re_git_user = re.compile(r'^[\w]+@.+') + def __call__(self, value): allow_private_repos = getattr(settings, 'ALLOW_PRIVATE_REPOS', False) public_schemes = ['https', 'http', 'git', 'ftps', 'ftp'] @@ -60,28 +65,51 @@ def __call__(self, value): if allow_private_repos: valid_schemes += private_schemes url = urlparse(value) - if ( - ( # pylint: disable=too-many-boolean-expressions - url.scheme not in valid_schemes and - '@' not in value and - not value.startswith('lp:') - ) or - ( - value.startswith('/') or - value.startswith('file://') or - value.startswith('.') - ) - ): - # Avoid ``/path/to/local/file`` and ``file://`` scheme but allow - # ``[email protected]:user/project.git`` and ``lp:bazaar`` - raise ValidationError(_('Invalid scheme for URL')) - elif '&&' in value or '|' in value: + + # Malicious characters go first + if '&&' in value or '|' in value: raise ValidationError(_('Invalid character in the URL')) - elif ( - ('@' in value or url.scheme in private_schemes) and - not allow_private_repos - ): - raise ValidationError('Clonning via SSH is not supported') - return value + elif url.scheme in valid_schemes: + return value + + # Repo URL is not a supported scheme at this point, but there are + # several cases where we might support it + # Launchpad + elif value.startswith('lp:'): + return value + # Relative paths are conditionally supported + elif value.startswith('.') and not self.disallow_relative_url: + return value + # SSH cloning and ``[email protected]:user/project.git`` + elif self.re_git_user.search(value) or url.scheme in private_schemes: + if allow_private_repos: + return value + else: + # Throw a more helpful error message + raise ValidationError('Manual cloning via SSH is not supported') + + # No more valid URLs without supported URL schemes + raise ValidationError(_('Invalid scheme for URL')) + + +class SubmoduleURLValidator(RepositoryURLValidator): + + """ + A URL validator for repository submodules + + If a repository has a relative submodule, the URL path is effectively the + supermodule's remote ``origin`` URL with the relative path applied. + + From the git docs:: + + ``<repository>`` is the URL of the new submodule's origin repository. + This may be either an absolute URL, or (if it begins with ``./`` or + ``../``), the location relative to the superproject's default remote + repository + """ + + disallow_relative_url = False + validate_repository_url = RepositoryURLValidator() +validate_submodule_url = SubmoduleURLValidator() diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -13,7 +13,7 @@ import git from six import PY2, StringIO -from readthedocs.core.validators import validate_repository_url +from readthedocs.core.validators import validate_submodule_url from readthedocs.projects.exceptions import RepositoryError from readthedocs.vcs_support.base import BaseVCS, VCSVersion @@ -79,7 +79,7 @@ def are_submodules_valid(self): repo = git.Repo(self.working_dir) for submodule in repo.submodules: try: - validate_repository_url(submodule.url) + validate_submodule_url(submodule.url) except ValidationError: return False return True
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -1,6 +1,7 @@ from __future__ import absolute_import from os.path import exists +import pytest from django.contrib.auth.models import User import django_dynamic_fixture as fixture @@ -106,11 +107,16 @@ def test_check_submodule_urls(self): repo = self.project.vcs_repo() repo.checkout('submodule') self.assertTrue(repo.are_submodules_valid()) + repo.checkout('relativesubmodule') + self.assertTrue(repo.are_submodules_valid()) + @pytest.mark.xfail(strict=True, reason="Fixture is not working correctly") + def test_check_invalid_submodule_urls(self): with self.assertRaises(RepositoryError) as e: repo.checkout('invalidsubmodule') self.assertEqual(e.msg, RepositoryError.INVALID_SUBMODULES) + class TestHgBackend(RTDTestCase): def setUp(self): hg_repo = make_test_hg() diff --git a/readthedocs/rtd_tests/utils.py b/readthedocs/rtd_tests/utils.py --- a/readthedocs/rtd_tests/utils.py +++ b/readthedocs/rtd_tests/utils.py @@ -69,6 +69,14 @@ def make_test_git(): log.info(check_output(['git', 'add', '.'], env=env)) log.info(check_output(['git', 'commit', '-m"Add submodule"'], env=env)) + # Add a relative submodule URL in the relativesubmodule branch + log.info(check_output(['git', 'checkout', '-b', 'relativesubmodule', 'master'], env=env)) + log.info(check_output( + ['git', 'submodule', 'add', '-b', 'master', './', 'relativesubmodule'], + env=env + )) + log.info(check_output(['git', 'add', '.'], env=env)) + log.info(check_output(['git', 'commit', '-m"Add relative submodule"'], env=env)) # Add an invalid submodule URL in the invalidsubmodule branch log.info(check_output(['git', 'checkout', '-b', 'invalidsubmodule', 'master'], env=env)) log.info(check_output( @@ -77,6 +85,7 @@ def make_test_git(): )) log.info(check_output(['git', 'add', '.'], env=env)) log.info(check_output(['git', 'commit', '-m"Add invalid submodule"'], env=env)) + # Checkout to master branch again log.info(check_output(['git', 'checkout', 'master'], env=env)) chdir(path)
Docs build fails with "One or more submodule URLs are not valid." ## Details * Project URL: http://opnfvdocsdemo.readthedocs.io/ * Build URL (if applicable): First failure: http://readthedocs.org/projects/opnfvdocsdemo/builds/6933536/, Error message seen: http://readthedocs.org/projects/opnfvdocsdemo/builds/6940325/ * Read the Docs username (if applicable): opnfv-rtd ## Expected Result Given our [.gitmodules](https://git.opnfv.org/opnfvdocs/tree/.gitmodules) file, the build succeeds. The last change to our .gitmodules file was 8 days ago (March 19th). ## Actual Result An error: > One or more submodule URLs are not valid. is displayed and the build fails.
Confirmed this is a bug. Gitpython reports submodules like: ``` git.Submodule(name=docs/submodules/apex, path=docs/submodules/apex, url=../apex, branch_path=refs/heads/.) ``` In the case of relative URLs, git operates not as we'd expect: > <repository> is the URL of the new submodule’s origin repository. This may be either an absolute URL, or (if it begins with ./ or ../), the location relative to the superproject’s default remote repository So for a parent project with remote `origin` of: `https://github.com/user/repo` And a submodule with URL of `../other-repo`, the effective submodule URL is: `https://github.com/user/other-repo`, not a local disk repo `/path/repo/../other-repo/`
2018-03-26T22:32:19
readthedocs/readthedocs.org
3,880
readthedocs__readthedocs.org-3880
[ "3861" ]
7aa6f4d41acbc8b35b7f2b891496ae0ded9f68f8
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -625,6 +625,8 @@ def __init__(self, *args, **kwargs): ) if self.config and self.config.build_image: self.container_image = self.config.build_image + if self.project.container_image: + self.container_image = self.project.container_image if self.project.container_mem_limit: self.container_mem_limit = self.project.container_mem_limit if self.project.container_time_limit: @@ -786,6 +788,13 @@ def get_container_host_config(self): mem_limit=self.container_mem_limit, ) + @property + def image_hash(self): + """Return the hash of the Docker image.""" + client = self.get_client() + image_metadata = client.inspect_image(self.container_image) + return image_metadata.get('Id') + @property def container_id(self): """Return id of container if it is valid.""" @@ -828,13 +837,13 @@ def update_build_from_container_state(self): def create_container(self): """Create docker container.""" client = self.get_client() - image = self.container_image - if self.project.container_image: - image = self.project.container_image try: - log.info('Creating Docker container: image=%s', image) + log.info( + 'Creating Docker container: image=%s', + self.container_image, + ) self.container = client.create_container( - image=image, + image=self.container_image, command=('/bin/sh -c "sleep {time}; exit {exit}"' .format(time=self.container_time_limit, exit=DOCKER_TIMEOUT_EXIT_CODE)), diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -109,12 +109,14 @@ def environment_json_path(self): @property def is_obsolete(self): """ - Determine if the Python version of the venv obsolete. + Determine if the environment is obsolete for different reasons. It checks the the data stored at ``readthedocs-environment.json`` and - compares it against the Python version in the project version to be - built and the Docker image used to create the venv against the one in - the project version config. + compares it with the one to be used. In particular: + + * the Python version (e.g. 2.7, 3, 3.6, etc) + * the Docker image name + * the Docker image hash :returns: ``True`` when it's obsolete and ``False`` otherwise @@ -129,15 +131,23 @@ def is_obsolete(self): try: with open(self.environment_json_path(), 'r') as fpath: environment_conf = json.load(fpath) - env_python_version = environment_conf['python']['version'] - env_build_image = environment_conf['build']['image'] except (IOError, TypeError, KeyError, ValueError): - log.error('Unable to read/parse readthedocs-environment.json file') - return False - - # TODO: remove getattr when https://github.com/rtfd/readthedocs.org/pull/3339 got merged - build_image = getattr(self.config, 'build_image', self.version.project.container_image) or DOCKER_IMAGE # noqa - + log.warning('Unable to read/parse readthedocs-environment.json file') + # We remove the JSON file here to avoid cycling over time with a + # corrupted file. + os.remove(self.environment_json_path()) + return True + + env_python = environment_conf.get('python', {}) + env_build = environment_conf.get('build', {}) + + # By defaulting non-existent options to ``None`` we force a wipe since + # we don't know how the environment was created + env_python_version = env_python.get('version', None) + env_build_image = env_build.get('image', None) + env_build_hash = env_build.get('hash', None) + + build_image = self.config.build_image or DOCKER_IMAGE # If the user define the Python version just as a major version # (e.g. ``2`` or ``3``) we won't know exactly which exact version was # used to create the venv but we can still compare it against the new @@ -145,19 +155,19 @@ def is_obsolete(self): return any([ env_python_version != self.config.python_full_version, env_build_image != build_image, + env_build_hash != self.build_env.image_hash, ]) def save_environment_json(self): """Save on disk Python and build image versions used to create the venv.""" - # TODO: remove getattr when https://github.com/rtfd/readthedocs.org/pull/3339 got merged - build_image = getattr(self.config, 'build_image', self.version.project.container_image) or DOCKER_IMAGE # noqa - + build_image = self.config.build_image or DOCKER_IMAGE data = { 'python': { 'version': self.config.python_full_version, }, 'build': { 'image': build_image, + 'hash': self.build_env.image_hash, }, }
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -9,7 +9,9 @@ absolute_import, division, print_function, unicode_literals) import os.path +import json import re +import tempfile import uuid from builtins import str @@ -837,14 +839,54 @@ def test_command_oom_kill(self): u'Command killed due to excessive memory consumption\n') - - -class TestAutoWipeEnvironment(TestCase): +class AutoWipeEnvironmentBase(object): fixtures = ['test_data'] + build_env_class = None def setUp(self): self.pip = Project.objects.get(slug='pip') self.version = self.pip.versions.get(slug='0.8') + self.build_env = self.build_env_class( + project=self.pip, + version=self.version, + build={'id': DUMMY_BUILD_ID}, + ) + + def test_save_environment_json(self): + config_data = { + 'build': { + 'image': '2.0', + }, + 'python': { + 'version': 2.7, + }, + } + yaml_config = create_load(config_data)()[0] + config = ConfigWrapper(version=self.version, yaml_config=yaml_config) + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=config, + ) + + with patch( + 'readthedocs.doc_builder.python_environments.PythonEnvironment.environment_json_path', + return_value=tempfile.mktemp(suffix='envjson'), + ): + python_env.save_environment_json() + json_data = json.load(open(python_env.environment_json_path())) + + expected_data = { + 'build': { + 'image': 'readthedocs/build:2.0', + 'hash': 'a1b2c3', + }, + 'python': { + 'version': 2.7, + }, + } + self.assertDictEqual(json_data, expected_data) def test_is_obsolete_without_env_json_file(self): yaml_config = create_load()()[0] @@ -854,7 +896,7 @@ def test_is_obsolete_without_env_json_file(self): exists.return_value = False python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) @@ -868,7 +910,7 @@ def test_is_obsolete_with_invalid_env_json_file(self): exists.return_value = True python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) @@ -888,10 +930,10 @@ def test_is_obsolete_with_json_different_python_version(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 3.5}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 3.5}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) @@ -910,10 +952,10 @@ def test_is_obsolete_with_json_different_build_image(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 2.7}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 2.7}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) @@ -936,10 +978,10 @@ def test_is_obsolete_with_project_different_build_image(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 2.7}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 2.7}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertTrue(python_env.is_obsolete) @@ -958,10 +1000,82 @@ def test_is_obsolete_with_json_same_data_as_version(self): python_env = Virtualenv( version=self.version, - build_env=None, + build_env=self.build_env, config=config, ) - env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 3.5}}' + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "a1b2c3"}, "python": {"version": 3.5}}' # noqa with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa exists.return_value = True self.assertFalse(python_env.is_obsolete) + + def test_is_obsolete_with_json_different_build_hash(self): + config_data = { + 'build': { + 'image': '2.0', + }, + 'python': { + 'version': 2.7, + }, + } + yaml_config = create_load(config_data)()[0] + config = ConfigWrapper(version=self.version, yaml_config=yaml_config) + + # Set container_image manually + self.pip.container_image = 'readthedocs/build:2.0' + self.pip.save() + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=config, + ) + env_json_data = '{"build": {"image": "readthedocs/build:2.0", "hash": "foo"}, "python": {"version": 2.7}}' # noqa + with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa + exists.return_value = True + self.assertTrue(python_env.is_obsolete) + + def test_is_obsolete_with_json_missing_build_hash(self): + config_data = { + 'build': { + 'image': '2.0', + 'hash': 'a1b2c3', + }, + 'python': { + 'version': 2.7, + }, + } + yaml_config = create_load(config_data)()[0] + config = ConfigWrapper(version=self.version, yaml_config=yaml_config) + + # Set container_image manually + self.pip.container_image = 'readthedocs/build:2.0' + self.pip.save() + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=config, + ) + env_json_data = '{"build": {"image": "readthedocs/build:2.0"}, "python": {"version": 2.7}}' # noqa + with patch('os.path.exists') as exists, patch('readthedocs.doc_builder.python_environments.open', mock_open(read_data=env_json_data)) as _open: # noqa + exists.return_value = True + self.assertTrue(python_env.is_obsolete) + + +@patch( + 'readthedocs.doc_builder.environments.DockerBuildEnvironment.image_hash', + PropertyMock(return_value='a1b2c3'), +) +class AutoWipeDockerBuildEnvironmentTest(AutoWipeEnvironmentBase, TestCase): + build_env_class = DockerBuildEnvironment + + [email protected]( + reason='PythonEnvironment needs to be refactored to do not rely on DockerBuildEnvironment', +) +@patch( + 'readthedocs.doc_builder.environments.DockerBuildEnvironment.image_hash', + PropertyMock(return_value='a1b2c3'), +) +class AutoWipeLocalBuildEnvironmentTest(AutoWipeEnvironmentBase, TestCase): + build_env_class = LocalBuildEnvironment
Default to `None` when the key doesn't exist in environment.json On #3793 we should handle different cases when calling `is_obsolete`: 1. `environment.json` doesn't exist -> False 1. the JSON doesn't contain a new key (use `None`) -> True 1. fail at parsing the JSON for some reason -> True Ref: https://github.com/rtfd/readthedocs.org/pull/3793#discussion_r176828412
2018-03-30T00:00:46
readthedocs/readthedocs.org
4,325
readthedocs__readthedocs.org-4325
[ "3570" ]
3458cde068c057882337112513cc594d51ec4adf
diff --git a/readthedocs/api/base.py b/readthedocs/api/base.py --- a/readthedocs/api/base.py +++ b/readthedocs/api/base.py @@ -12,7 +12,7 @@ from django.core.cache import cache from django.shortcuts import get_object_or_404 from tastypie import fields -from tastypie.authorization import DjangoAuthorization +from tastypie.authorization import DjangoAuthorization, ReadOnlyAuthorization from tastypie.constants import ALL, ALL_WITH_RELATIONS from tastypie.http import HttpCreated from tastypie.resources import ModelResource @@ -39,7 +39,7 @@ class Meta(object): allowed_methods = ['get', 'post', 'put'] queryset = Project.objects.api() authentication = PostAuthentication() - authorization = DjangoAuthorization() + authorization = ReadOnlyAuthorization() excludes = ['path', 'featured', 'programming_language'] filtering = { 'users': ALL_WITH_RELATIONS,
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -418,8 +418,8 @@ def test_get_invalid_raw_log(self): class APITests(TestCase): fixtures = ['eric.json', 'test_data.json'] - def test_make_project(self): - """Test that a superuser can use the API.""" + def test_cant_make_project(self): + """Test that a user can't use the API to create projects.""" post_data = { 'name': 'awesome-project', 'repo': 'https://github.com/ericholscher/django-kong.git', @@ -430,16 +430,7 @@ def test_make_project(self): content_type='application/json', HTTP_AUTHORIZATION='Basic %s' % super_auth, ) - self.assertEqual(resp.status_code, 201) - self.assertEqual(resp['location'], '/api/v1/project/24/') - resp = self.client.get( - '/api/v1/project/24/', - data={'format': 'json'}, - HTTP_AUTHORIZATION='Basic %s' % eric_auth, - ) - self.assertEqual(resp.status_code, 200) - obj = json.loads(resp.content) - self.assertEqual(obj['slug'], 'awesome-project') + self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED) def test_user_doesnt_get_full_api_return(self): user_normal = get(User, is_staff=False)
Tastypie needs to upgrade our dependencies to support Python3 If you go to http://localhost:8000/api/v1/project/46162/sync_versions/ while running your instance with Python3 you will get this error: > 'dict' object has no attribute 'has_key' I found that we are using `mimetype`, https://github.com/rtfd/readthedocs.org/blob/df8079a18ed9c15e9c85f0646e3e9169e725b310/requirements/pip.txt#L49 and `python-mimeparse` should be used. We should write a simple test for this, otherwise we will find this out in production :) Ref: http://django-tastypie.readthedocs.io/en/latest/python3.html#changed-requirements
will api v1 still be supported in the long term? I will like to work on this issue @humitos @humitos @stsewd the webpage is not available on my local server @ http://localhost:8000/api/v1/project/46162/sync_versions/ a screenshot of error page: ![image](https://user-images.githubusercontent.com/23264605/36116277-a8a860c6-105b-11e8-94b4-9d65493ad828.png) @142ayushkumar that's because that url was just an example, you need to create a project on your local instance and take the id. `http://localhost:8000/api/v1/project/{id}/sync_versions/`. And I believe this endpoint only support POST requests, but I'm not sure. Thanks for your interest on contributing :) @stsewd, I am facing some problem in webhook integration on my local instance the same way i followed on my live account and it worked. But on the local server ![image](https://user-images.githubusercontent.com/23264605/36158003-39beba6a-1101-11e8-8b4e-76cbe71e02a5.png) And one more thing is that how to run my local instance with python3 I am getting some error `python3 manage.py runserver` ![image](https://user-images.githubusercontent.com/23264605/36158136-91d48b4e-1101-11e8-9b39-560613b06e1e.png) @142ayushkumar did you have any problem when building your project? You can check the builds section `/projects/django-kon/builds`. And about your other problem I thinks it's because you only setup your project for python2, please follow the installation steps with a python3 virtualenv. ![image](https://user-images.githubusercontent.com/23264605/36158961-a47ba500-1103-11e8-8364-5f3de99ac97b.png) Build is sucessful. @142ayushkumar That isn't a successfully build, it's just triggered. I think you need to setup your build environment (see https://docs.readthedocs.io/en/latest/development/buildenvironments.html), by the way, that guide needs improvement, so probably you may want to read https://github.com/rtfd/readthedocs.org/pull/2692/files too. @stsewd, Can you help me with the [configuration part](https://docs.readthedocs.io/en/latest/development/buildenvironments.html#configuration). How to change these configurations? You need to create a file `readthedocs/settings/local_settings.py` and put there your settings. This is my local settings https://gist.github.com/stsewd/1fd3178435397c72c563dc83480dc663, you need to set `DOCKER_IMAGE` to yours. @stsewd where to save the Dockerfile(as mentioned [here](https://github.com/rtfd/readthedocs.org/pull/2692/files?diff=split#diff-bbaadadacee305c8e9d6825771319cfbR48)) in the readthedocs.org folder or in settings? @stsewd @humitos Also after saving the local settings as you suggested I am getting this error ![image](https://user-images.githubusercontent.com/23264605/36214867-88b3c852-11d0-11e8-9cfd-d1d901fa7e9e.png) @142ayushkumar in any place you like, the important step here is building the image `docker build -f Dockerfile.user -t readthedocs/build:2.0-modified .` on the same directory where the Docker file is. If you use my local_settings.py, you don't need these lines https://gist.github.com/stsewd/1fd3178435397c72c563dc83480dc663#file-local_settings-py-L18-L26. @stsewd I am still getting error Error log when i tried to build image using dockerfile ![image](https://user-images.githubusercontent.com/23264605/36256667-dd5cad8e-1279-11e8-9101-8775acfce710.png) because i have saved my file named as `Dockerfile` not `Dockerfile.user` but the latter also shows errors > will api v1 still be supported in the long term? @stsewd the API v1 is _deprecated_ but people still use it. So, we don't want to add features or support it if this involves too much work, but making this small change to keep the compatibility shouldn't be so complicated and I think it worth the effort. Anyway, the first step is to write a test that fails with Python3 so at least we are covered and this error doesn't pass silently. @142ayushkumar you should use `docker build -t readthedocs/build:2.0-modified .` Also I believe we are populating this issue with irrelevant content, probably is better to make this questions on https://gitter.im/rtfd/readthedocs.org or irc (#readthedocs), I would be glad to help you there if you don't mind. @stsewd @humitos I would like to work on this issue.. @sriks123 I think the steps given by @humitos are very clear, just follow them. If anything isn't clear, feel free to ask. @humitos I was submitting a PR to fix this, but I realize that this endpoint doesn't work at all. the `_sync_versions` and `_delete_versions` were deleted on https://github.com/rtfd/readthedocs.org/commit/d11cc9f2a95054c5be3a53e9b47eb17d684f1d38#diff-aeef6dfbd37b07c9cc73482e8c2fb36eL95 So always return an exception https://github.com/rtfd/readthedocs.org/blob/393e31ad3a9aafee297df64f1a654ffcda7ef04a/readthedocs/api/base.py#L81-L108 I think that was used internally only, now RTD uses the apiv2 endpoint to sync versions. Should this endpoint be deleted then? This code was removed in https://github.com/rtfd/readthedocs.org/pull/4038. We only need to update the `mimeparse` dependency. BUT noted that tastypie already includes this in their requirements https://github.com/django-tastypie/django-tastypie/blob/v0.13.0/requirements.txt (same version that we have 0.13.0) Also I wanna point this that a found about tastypie and breaks our code when updating to `0.13.2` https://github.com/django-tastypie/django-tastypie/issues/1407 Based on docs I wrote documenting our APIv2, I stated that we'd support APIv1 through at least Jan 2019. If that isn't possible, let me know. https://docs.readthedocs.io/en/latest/api/v1.html I don't think this will be a problem when updating django or python, so we can hold till 2019 :grin:, but there are some solutions in the related issue (I'm not really familiar with the Authorization part, so I'm not sure how this will affect our code if we want to apply that fix). @davidfischer I found that we need to update tastypie to support django 1.10 (#4319), if isn't possible to adapt our code to https://github.com/django-tastypie/django-tastypie/issues/1407 we will need to remove tastypie, I'm not sure what decision will be taken here (keep tastypie to extend the api v1 life or upgrade django).
2018-07-03T19:52:35
readthedocs/readthedocs.org
4,431
readthedocs__readthedocs.org-4431
[ "4092" ]
c709316473a54d173f4df05dacc7bb47f839f534
diff --git a/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py b/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py new file mode 100644 --- /dev/null +++ b/readthedocs/projects/migrations/0028_remove_comments_and_update_old_migration.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.16 on 2018-10-31 10:08 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('projects', '0027_remove_json_with_html_feature'), + ] + + operations = [ + migrations.RemoveField( + model_name='project', + name='allow_comments', + ), + migrations.RemoveField( + model_name='project', + name='comment_moderation', + ), + migrations.AlterField( + model_name='domain', + name='https', + field=models.BooleanField(default=False, help_text='Always use HTTPS for this domain', verbose_name='Use HTTPS'), + ), + migrations.AlterField( + model_name='project', + name='documentation_type', + field=models.CharField(choices=[('auto', 'Automatically Choose'), ('sphinx', 'Sphinx Html'), ('mkdocs', 'Mkdocs (Markdown)'), ('sphinx_htmldir', 'Sphinx HtmlDir'), ('sphinx_singlehtml', 'Sphinx Single Page HTML')], default='sphinx', help_text='Type of documentation you are building. <a href="http://www.sphinx-doc.org/en/stable/builders.html#sphinx.builders.html.DirectoryHTMLBuilder">More info</a>.', max_length=20, verbose_name='Documentation type'), + ), + migrations.AlterField( + model_name='project', + name='language', + field=models.CharField(choices=[('aa', 'Afar'), ('ab', 'Abkhaz'), ('af', 'Afrikaans'), ('am', 'Amharic'), ('ar', 'Arabic'), ('as', 'Assamese'), ('ay', 'Aymara'), ('az', 'Azerbaijani'), ('ba', 'Bashkir'), ('be', 'Belarusian'), ('bg', 'Bulgarian'), ('bh', 'Bihari'), ('bi', 'Bislama'), ('bn', 'Bengali'), ('bo', 'Tibetan'), ('br', 'Breton'), ('ca', 'Catalan'), ('co', 'Corsican'), ('cs', 'Czech'), ('cy', 'Welsh'), ('da', 'Danish'), ('de', 'German'), ('dz', 'Dzongkha'), ('el', 'Greek'), ('en', 'English'), ('eo', 'Esperanto'), ('es', 'Spanish'), ('et', 'Estonian'), ('eu', 'Basque'), ('fa', 'Iranian'), ('fi', 'Finnish'), ('fj', 'Fijian'), ('fo', 'Faroese'), ('fr', 'French'), ('fy', 'Western Frisian'), ('ga', 'Irish'), ('gd', 'Scottish Gaelic'), ('gl', 'Galician'), ('gn', 'Guarani'), ('gu', 'Gujarati'), ('ha', 'Hausa'), ('hi', 'Hindi'), ('he', 'Hebrew'), ('hr', 'Croatian'), ('hu', 'Hungarian'), ('hy', 'Armenian'), ('ia', 'Interlingua'), ('id', 'Indonesian'), ('ie', 'Interlingue'), ('ik', 'Inupiaq'), ('is', 'Icelandic'), ('it', 'Italian'), ('iu', 'Inuktitut'), ('ja', 'Japanese'), ('jv', 'Javanese'), ('ka', 'Georgian'), ('kk', 'Kazakh'), ('kl', 'Kalaallisut'), ('km', 'Khmer'), ('kn', 'Kannada'), ('ko', 'Korean'), ('ks', 'Kashmiri'), ('ku', 'Kurdish'), ('ky', 'Kyrgyz'), ('la', 'Latin'), ('ln', 'Lingala'), ('lo', 'Lao'), ('lt', 'Lithuanian'), ('lv', 'Latvian'), ('mg', 'Malagasy'), ('mi', 'Maori'), ('mk', 'Macedonian'), ('ml', 'Malayalam'), ('mn', 'Mongolian'), ('mr', 'Marathi'), ('ms', 'Malay'), ('mt', 'Maltese'), ('my', 'Burmese'), ('na', 'Nauru'), ('ne', 'Nepali'), ('nl', 'Dutch'), ('no', 'Norwegian'), ('oc', 'Occitan'), ('om', 'Oromo'), ('or', 'Oriya'), ('pa', 'Panjabi'), ('pl', 'Polish'), ('ps', 'Pashto'), ('pt', 'Portuguese'), ('qu', 'Quechua'), ('rm', 'Romansh'), ('rn', 'Kirundi'), ('ro', 'Romanian'), ('ru', 'Russian'), ('rw', 'Kinyarwanda'), ('sa', 'Sanskrit'), ('sd', 'Sindhi'), ('sg', 'Sango'), ('si', 'Sinhala'), ('sk', 'Slovak'), ('sl', 'Slovenian'), ('sm', 'Samoan'), ('sn', 'Shona'), ('so', 'Somali'), ('sq', 'Albanian'), ('sr', 'Serbian'), ('ss', 'Swati'), ('st', 'Southern Sotho'), ('su', 'Sudanese'), ('sv', 'Swedish'), ('sw', 'Swahili'), ('ta', 'Tamil'), ('te', 'Telugu'), ('tg', 'Tajik'), ('th', 'Thai'), ('ti', 'Tigrinya'), ('tk', 'Turkmen'), ('tl', 'Tagalog'), ('tn', 'Tswana'), ('to', 'Tonga'), ('tr', 'Turkish'), ('ts', 'Tsonga'), ('tt', 'Tatar'), ('tw', 'Twi'), ('ug', 'Uyghur'), ('uk', 'Ukrainian'), ('ur', 'Urdu'), ('uz', 'Uzbek'), ('vi', 'Vietnamese'), ('vo', 'Volapuk'), ('wo', 'Wolof'), ('xh', 'Xhosa'), ('yi', 'Yiddish'), ('yo', 'Yoruba'), ('za', 'Zhuang'), ('zh', 'Chinese'), ('zu', 'Zulu'), ('nb_NO', 'Norwegian Bokmal'), ('pt_BR', 'Brazilian Portuguese'), ('es_MX', 'Mexican Spanish'), ('uk_UA', 'Ukrainian'), ('zh_CN', 'Simplified Chinese'), ('zh_TW', 'Traditional Chinese')], default='en', help_text="The language the project documentation is rendered in. Note: this affects your project's URL.", max_length=20, verbose_name='Language'), + ), + migrations.AlterField( + model_name='project', + name='privacy_level', + field=models.CharField(choices=[('public', 'Public'), ('protected', 'Protected'), ('private', 'Private')], default='public', help_text='Level of privacy that you want on the repository. Protected means public but not in listings.', max_length=20, verbose_name='Privacy Level'), + ), + migrations.AlterField( + model_name='project', + name='python_interpreter', + field=models.CharField(choices=[('python', 'CPython 2.x'), ('python3', 'CPython 3.x')], default='python', help_text='The Python interpreter used to create the virtual environment.', max_length=20, verbose_name='Python Interpreter'), + ), + migrations.AlterField( + model_name='project', + name='version_privacy_level', + field=models.CharField(choices=[('public', 'Public'), ('protected', 'Protected'), ('private', 'Private')], default='public', help_text='Default level of privacy you want on built versions of documentation.', max_length=20, verbose_name='Version Privacy Level'), + ), + ] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -137,10 +137,6 @@ class Project(models.Model): 'DirectoryHTMLBuilder">More info</a>.')) # Project features - # TODO: remove this? - allow_comments = models.BooleanField(_('Allow Comments'), default=False) - comment_moderation = models.BooleanField( - _('Comment Moderation'), default=False,) cdn_enabled = models.BooleanField(_('CDN Enabled'), default=False) analytics_code = models.CharField( _('Analytics code'), max_length=50, null=True, blank=True,
Remove unused field from Project model When removing the comments app from the codebase in https://github.com/rtfd/readthedocs.org/pull/3802. A field was missing to be removed, we need to remove this and make a migration. https://github.com/rtfd/readthedocs.org/blob/696a25f9b3350bb047caf1899a876aa1aa0fcbaa/readthedocs/projects/models.py#L140-L143
I would like to take up this issue if no one else is doing it. @Alig1493 thanks for the interest, but there is a PR in progress already https://github.com/rtfd/readthedocs.org/pull/4097 @stsewd the PR seems closed for the time being. Will it be alright for me to look into it at this moment? @Alig1493 sure, feel free to send a PR @stsewd I have already made some progress on this issue. Just a couple of questions regarding the test cases pertaining to these two fields: * should they be removed? * should they be changed accordingly? If it's the latter then I might end up needing some assistance in resolving them. Thank you in advance. @stsewd I've made a PR here https://github.com/rtfd/readthedocs.org/pull/4431 Any guidance regarding this would be much appreciated. @Alig1493 reviewing!
2018-07-26T13:29:57
readthedocs/readthedocs.org
4,451
readthedocs__readthedocs.org-4451
[ "4279" ]
86043cc92cdbad9a492664e54e11664e101a8ff7
diff --git a/docs/doc_extensions.py b/docs/doc_extensions.py --- a/docs/doc_extensions.py +++ b/docs/doc_extensions.py @@ -6,11 +6,14 @@ djangosetting Output an inline literal of the corresponding setting value. Useful for keeping documentation up to date without editing on settings changes. -""" -from docutils import nodes, utils +buildpyversions + Output a comma separated list of the supported python versions for a + Read the Docs build image. +""" from django.conf import settings +from docutils import nodes, utils from readthedocs.projects.models import Feature @@ -23,8 +26,23 @@ def django_setting_role(typ, rawtext, text, lineno, inliner, options=None, return [node], [] +def python_supported_versions_role(typ, rawtext, text, lineno, inliner, + options=None, content=None): + """Up to date supported python versions for each build image.""" + image = '{}:{}'.format(settings.DOCKER_DEFAULT_IMAGE, text) + image_settings = settings.DOCKER_IMAGE_SETTINGS[image] + python_versions = image_settings['python']['supported_versions'] + node_list = [] + separator = ', ' + for i, version in enumerate(python_versions): + node_list.append(nodes.literal(version, version)) + if i < len(python_versions) - 1: + node_list.append(nodes.Text(separator)) + return (node_list, []) + + def feature_flags_role(typ, rawtext, text, lineno, inliner, options=None, - content=None): + content=None): """Up to date feature flags from the application.""" all_features = Feature.FEATURES requested_feature = utils.unescape(text) @@ -40,9 +58,13 @@ def setup(_): 'djangosetting', django_setting_role ) + roles.register_local_role( + 'buildpyversions', + python_supported_versions_role, + ) roles.register_local_role( 'featureflags', - feature_flags_role + feature_flags_role, ) return {
Documentation for the v2 of the configuration file At first, I was thinking to automate this given the schema, but the spec isn't very large so we can just hand-write this without too much effort.
2018-07-30T20:21:18
readthedocs/readthedocs.org
4,607
readthedocs__readthedocs.org-4607
[ "4455" ]
f0bdcbf5126c457e53b80f6d5e1c1d2d5cac8312
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -17,6 +17,7 @@ from .models import Build, Conda, Mkdocs, Python, Sphinx, Submodules from .parser import ParseError, parse from .validation import ( + VALUE_NOT_FOUND, ValidationError, validate_bool, validate_choice, @@ -25,7 +26,6 @@ validate_file, validate_list, validate_string, - validate_value_exists, ) __all__ = ( @@ -54,6 +54,7 @@ PYTHON_INVALID = 'python-invalid' SUBMODULES_INVALID = 'submodules-invalid' INVALID_KEYS_COMBINATION = 'invalid-keys-combination' +INVALID_KEY = 'invalid-key' DOCKER_DEFAULT_IMAGE = 'readthedocs/build' DOCKER_DEFAULT_VERSION = '2.0' @@ -176,6 +177,41 @@ def catch_validation_error(self, key): source_position=self.source_position, ) + def pop(self, name, container, default, raise_ex): + """ + Search and pop a key inside a dict. + + This will pop the keys recursively if the container is empty. + + :param name: the key name in a list form (``['key', 'inner']``) + :param container: a dictionary that contains the key + :param default: default value to return if the key doesn't exists + :param raise_ex: if True, raises an exception when a key is not found + """ + key = name[0] + validate_dict(container) + if key in container: + if len(name) > 1: + value = self.pop(name[1:], container[key], default, raise_ex) + if not container[key]: + container.pop(key) + else: + value = container.pop(key) + return value + if raise_ex: + raise ValidationError(key, VALUE_NOT_FOUND) + return default + + def pop_config(self, key, default=None, raise_ex=False): + """ + Search and pop a key (recursively) from `self.raw_config`. + + :param key: the key name in a dotted form (``key.innerkey``) + :param default: Optionally, it can receive a default value + :param raise_ex: If True, raises an exception when the key is not found + """ + return self.pop(key.split('.'), self.raw_config, default, raise_ex) + def validate(self): raise NotImplementedError() @@ -594,6 +630,7 @@ def validate(self): # TODO: remove later self.validate_final_doc_type() self._config['submodules'] = self.validate_submodules() + self.validate_keys() def validate_formats(self): """ @@ -602,7 +639,7 @@ def validate_formats(self): The ``ALL`` keyword can be used to indicate that all formats are used. We ignore the default values here. """ - formats = self.raw_config.get('formats', []) + formats = self.pop_config('formats', []) if formats == ALL: return self.valid_formats with self.catch_validation_error('formats'): @@ -622,7 +659,7 @@ def validate_conda(self): conda = {} with self.catch_validation_error('conda.environment'): - environment = validate_value_exists('environment', raw_conda) + environment = self.pop_config('conda.environment', raise_ex=True) conda['environment'] = validate_file(environment, self.base_path) return conda @@ -637,7 +674,7 @@ def validate_build(self): validate_dict(raw_build) build = {} with self.catch_validation_error('build.image'): - image = raw_build.get('image', self.default_build_image) + image = self.pop_config('build.image', self.default_build_image) build['image'] = '{}:{}'.format( DOCKER_DEFAULT_IMAGE, validate_choice( @@ -674,7 +711,7 @@ def validate_python(self): python = {} with self.catch_validation_error('python.version'): - version = raw_python.get('version', 3) + version = self.pop_config('python.version', 3) if isinstance(version, six.string_types): try: version = int(version) @@ -690,7 +727,7 @@ def validate_python(self): with self.catch_validation_error('python.requirements'): requirements = self.defaults.get('requirements_file') - requirements = raw_python.get('requirements', requirements) + requirements = self.pop_config('python.requirements', requirements) if requirements != '' and requirements is not None: requirements = validate_file(requirements, self.base_path) python['requirements'] = requirements @@ -699,14 +736,16 @@ def validate_python(self): install = ( 'setup.py' if self.defaults.get('install_project') else None ) - install = raw_python.get('install', install) + install = self.pop_config('python.install', install) if install is not None: validate_choice(install, self.valid_install_options) python['install_with_setup'] = install == 'setup.py' python['install_with_pip'] = install == 'pip' with self.catch_validation_error('python.extra_requirements'): - extra_requirements = raw_python.get('extra_requirements', []) + extra_requirements = self.pop_config( + 'python.extra_requirements', [] + ) extra_requirements = validate_list(extra_requirements) if extra_requirements and not python['install_with_pip']: self.error( @@ -724,8 +763,8 @@ def validate_python(self): 'use_system_packages', False, ) - system_packages = raw_python.get( - 'system_packages', + system_packages = self.pop_config( + 'python.system_packages', system_packages, ) python['use_system_site_packages'] = validate_bool(system_packages) @@ -778,13 +817,13 @@ def validate_mkdocs(self): mkdocs = {} with self.catch_validation_error('mkdocs.configuration'): - configuration = raw_mkdocs.get('configuration') + configuration = self.pop_config('mkdocs.configuration', None) if configuration is not None: configuration = validate_file(configuration, self.base_path) mkdocs['configuration'] = configuration with self.catch_validation_error('mkdocs.fail_on_warning'): - fail_on_warning = raw_mkdocs.get('fail_on_warning', False) + fail_on_warning = self.pop_config('mkdocs.fail_on_warning', False) mkdocs['fail_on_warning'] = validate_bool(fail_on_warning) return mkdocs @@ -812,7 +851,7 @@ def validate_sphinx(self): sphinx = {} with self.catch_validation_error('sphinx.builder'): builder = validate_choice( - raw_sphinx.get('builder', 'html'), + self.pop_config('sphinx.builder', 'html'), self.valid_sphinx_builders.keys(), ) sphinx['builder'] = self.valid_sphinx_builders[builder] @@ -822,13 +861,15 @@ def validate_sphinx(self): # The default value can be empty if not configuration: configuration = None - configuration = raw_sphinx.get('configuration', configuration) + configuration = self.pop_config( + 'sphinx.configuration', configuration + ) if configuration is not None: configuration = validate_file(configuration, self.base_path) sphinx['configuration'] = configuration with self.catch_validation_error('sphinx.fail_on_warning'): - fail_on_warning = raw_sphinx.get('fail_on_warning', False) + fail_on_warning = self.pop_config('sphinx.fail_on_warning', False) sphinx['fail_on_warning'] = validate_bool(fail_on_warning) return sphinx @@ -870,7 +911,7 @@ def validate_submodules(self): submodules = {} with self.catch_validation_error('submodules.include'): - include = raw_submodules.get('include', []) + include = self.pop_config('submodules.include', []) if include != ALL: include = [ validate_string(submodule) @@ -880,7 +921,7 @@ def validate_submodules(self): with self.catch_validation_error('submodules.exclude'): default = [] if submodules['include'] else ALL - exclude = raw_submodules.get('exclude', default) + exclude = self.pop_config('submodules.exclude', default) if exclude != ALL: exclude = [ validate_string(submodule) @@ -902,11 +943,54 @@ def validate_submodules(self): ) with self.catch_validation_error('submodules.recursive'): - recursive = raw_submodules.get('recursive', False) + recursive = self.pop_config('submodules.recursive', False) submodules['recursive'] = validate_bool(recursive) return submodules + def validate_keys(self): + """ + Checks that we don't have extra keys (invalid ones). + + This should be called after all the validations are done + and all keys are popped from `self.raw_config`. + """ + msg = ( + 'Invalid configuration option: {}. ' + 'Make sure the key name is correct.' + ) + # The version key isn't popped, but it's + # validated in `load`. + self.pop_config('version', None) + wrong_key = '.'.join(self._get_extra_key(self.raw_config)) + if wrong_key: + self.error( + wrong_key, + msg.format(wrong_key), + code=INVALID_KEY, + ) + + def _get_extra_key(self, value): + """ + Get the extra keyname (list form) of a dict object. + + If there is more than one extra key, the first one is returned. + + Example:: + + { + 'key': { + 'name': 'inner', + } + } + + Will return `['key', 'name']`. + """ + if isinstance(value, dict) and value: + key_name = next(iter(value)) + return [key_name] + self._get_extra_key(value[key_name]) + return [] + @property def formats(self): return self._config['formats']
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -4,20 +4,42 @@ import os import re import textwrap +from collections import OrderedDict import pytest from mock import DEFAULT, patch from pytest import raises from readthedocs.config import ( - ALL, BuildConfigV1, BuildConfigV2, ConfigError, - ConfigOptionNotSupportedError, InvalidConfig, ProjectConfig, load) + ALL, + BuildConfigV1, + BuildConfigV2, + ConfigError, + ConfigOptionNotSupportedError, + InvalidConfig, + ProjectConfig, + load, +) from readthedocs.config.config import ( - CONFIG_FILENAME_REGEX, CONFIG_NOT_SUPPORTED, CONFIG_REQUIRED, NAME_INVALID, - NAME_REQUIRED, PYTHON_INVALID, VERSION_INVALID) + CONFIG_FILENAME_REGEX, + CONFIG_NOT_SUPPORTED, + CONFIG_REQUIRED, + INVALID_KEY, + NAME_INVALID, + NAME_REQUIRED, + PYTHON_INVALID, + VERSION_INVALID, +) from readthedocs.config.models import Conda from readthedocs.config.validation import ( - INVALID_BOOL, INVALID_CHOICE, INVALID_LIST, INVALID_PATH, INVALID_STRING) + INVALID_BOOL, + INVALID_CHOICE, + INVALID_LIST, + INVALID_PATH, + INVALID_STRING, + VALUE_NOT_FOUND, + ValidationError, +) from .utils import apply_fs @@ -1702,3 +1724,83 @@ def test_submodules_recursive_explict_default(self): assert build.submodules.include == [] assert build.submodules.exclude == [] assert build.submodules.recursive is False + + @pytest.mark.parametrize('value,key', [ + ({'typo': 'something'}, 'typo'), + ( + { + 'pyton': { + 'version': 'another typo', + } + }, + 'pyton.version' + ), + ( + { + 'build': { + 'image': 'latest', + 'extra': 'key', + } + }, + 'build.extra' + ) + ]) + def test_strict_validation(self, value, key): + build = self.get_build_config(value) + with raises(InvalidConfig) as excinfo: + build.validate() + assert excinfo.value.key == key + assert excinfo.value.code == INVALID_KEY + + def test_strict_validation_pops_all_keys(self): + build = self.get_build_config({ + 'version': 2, + 'python': { + 'version': 3, + }, + }) + build.validate() + assert build.raw_config == {} + + @pytest.mark.parametrize('value,expected', [ + ({}, []), + ({'one': 1}, ['one']), + ({'one': {'two': 3}}, ['one', 'two']), + (OrderedDict([('one', 1), ('two', 2)]), ['one']), + (OrderedDict([('one', {'two': 2}), ('three', 3)]), ['one', 'two']), + ]) + def test_get_extra_key(self, value, expected): + build = self.get_build_config({}) + assert build._get_extra_key(value) == expected + + def test_pop_config_single(self): + build = self.get_build_config({'one': 1}) + build.pop_config('one') + assert build.raw_config == {} + + def test_pop_config_nested(self): + build = self.get_build_config({'one': {'two': 2}}) + build.pop_config('one.two') + assert build.raw_config == {} + + def test_pop_config_nested_with_residue(self): + build = self.get_build_config({'one': {'two': 2, 'three': 3}}) + build.pop_config('one.two') + assert build.raw_config == {'one': {'three': 3}} + + def test_pop_config_default_none(self): + build = self.get_build_config({'one': {'two': 2, 'three': 3}}) + assert build.pop_config('one.four') is None + assert build.raw_config == {'one': {'two': 2, 'three': 3}} + + def test_pop_config_default(self): + build = self.get_build_config({'one': {'two': 2, 'three': 3}}) + assert build.pop_config('one.four', 4) == 4 + assert build.raw_config == {'one': {'two': 2, 'three': 3}} + + def test_pop_config_raise_exception(self): + build = self.get_build_config({'one': {'two': 2, 'three': 3}}) + with raises(ValidationError) as excinfo: + build.pop_config('one.four', raise_ex=True) + assert excinfo.value.value == 'four' + assert excinfo.value.code == VALUE_NOT_FOUND
Support strict validation for the configuration file We can support strict validation in the configuration file (v2 only at this point). ## What do I mean by _strict validation_? Just to validate that there aren't other keys in the config file ## Why do we want this? It can prevent typos :). It should be straightforward to implement this in the current validation steps. Just pop each value whenever it is used.
I'm +1 for this as it will prevent future bugs. I think this is something good to have. I will avoid confusions to users, and by given them a good validation error message, it's something that they can fix by themselves.
2018-09-05T22:38:35
readthedocs/readthedocs.org
4,608
readthedocs__readthedocs.org-4608
[ "2692" ]
edf2e60f66750b747f74f66a26bc72c2bc6ff74f
diff --git a/readthedocs/settings/dev.py b/readthedocs/settings/dev.py --- a/readthedocs/settings/dev.py +++ b/readthedocs/settings/dev.py @@ -83,3 +83,14 @@ def MIDDLEWARE(self): from .local_settings import * # noqa except ImportError: pass + +# Allow for local settings override to trigger images name change +try: + if DOCKER_USE_DEV_IMAGES: + DOCKER_IMAGE_SETTINGS = { + key.replace('readthedocs/build:', 'readthedocs/build-dev:'): settings + for (key, settings) + in DOCKER_IMAGE_SETTINGS.items() + } +except NameError: + pass
Docker build tip I found a solution for my issue regarding local permissions between my host machine and the docker container and I added a _Tip_ inside the docker section. Fixes one of the issues reported at #2658
> I think there is a proper fix to this that doesn't involve building a new image. We should find out what that is and make sure that is documented instead. I agree with that, but I didn't find another (better) way yet. On the other hand, the RTD image could have more pre-defined users with a couple of gid and uid (1000-1005, for example, since they are really common). I don't know. 2 different ideas to fix the docker/developer workflow: 1. use a `LOCAL_USER_ID = "os.system('tid -u')"` setting with the current user as default -probably with a better way of get it. For this, we will need to install `conda` for _all the users instead of only `docs`_. So, probably for this we could mount _envs directories_ like how we mount the `user_builds` 1. another idea that could allow first-time (or 1-time) contributors is to have a docker image for the whole RTD Django Project and follow these steps: 1. clone the repo 1. run `docker run <image>` 1. modify the code as you wish 1. run `docker run <image>` 1. make the PR cc @agjohnson I'm just adding this here to keep it together in the same place since I had to modify my old hack to allow conda. So, this is my new hacky `Dockerfile` image: ```dockerfile # Read the Docs - Environment base FROM readthedocs/build:2.0 MAINTAINER Manuel Kaufmann <[email protected]> LABEL version="humitos" USER root # UID and GID from readthedocs/user RUN groupadd --gid 1000 humitos RUN useradd -m --uid 1000 --gid 1000 humitos USER humitos # Install miniconda as humitos user WORKDIR /home/humitos RUN curl -O https://repo.continuum.io/miniconda/Miniconda2-4.3.11-Linux-x86_64.sh RUN bash Miniconda2-4.3.11-Linux-x86_64.sh -b -p /home/humitos/miniconda2/ # we can't just use ENV and prepend our own conda since 'python2.7' # will be taken from there and `virtualenv` is not installed. So, it fails. # http://stackoverflow.com/questions/11650840/linux-remove-path-from-path-variable RUN export PATH=`echo $PATH | sed -e 's/:\/home\/docs\/miniconda2\/bin\/$//'` ENV PATH=$PATH:/home/humitos/miniconda2/bin # Add conda-forge channel with the highest channel priority RUN conda config --add channels conda-forge CMD ["/bin/bash"] ```` We still don't have a better solution for this and it's the setup that I'm using to develop but I think, as Anthony said, this shouldn't be like this (the default option to develop), but on the other hand, we don't have anything else. Should I close this PR? @agjohnson is your setup with a better choice to suggest to newcomers? @ericholscher what's your setup? is it _shareable_ or it's more complex? I just.. run docker? I don't have anything special I don't think about the users in my setup. Mmm... This is weird then. Why it _does_ just work for you but not for me or other people? I'd like to have more information / talk about your setup or something because "just run docker" doesn't work on my Ubuntu nor Archlinux and I think it didn't work for Anthony also running OSX and I think that's why he uses Vagrant. 😕 @humitos, in my case, it works well on Windows 10 using MinGW64 (MSYS2). See [#48](https://github.com/rtfd/readthedocs-docker-images/issues/48#issuecomment-353768951). Indeed, it is a VM, so I suppose it's the same case as Anthony's Vargrant. However, it does not work in a Travis job (which is Ubuntu Trusty): https://travis-ci.org/1138-4EB/ghdl/jobs/320951163 Just out of curiosity, why are you adding a `docs` user? --- BTW, I fixed it by ensuring that each user writes only to directories which are already owned, and accesses any other directory with read-only permissions. I.e., I used `docker cp`: ``` docker run --name sphinx -tv /$(pwd):/src readthedocs/build:latest bash -c "\ pip3 install sphinx recommonmark sphinx_rtd_theme && \ cp -r /src/doc ~/ && cd ~/doc && \ make html" docker cp sphinx:/home/docs/doc/build/html doc/ docker rm -f sphinx ``` hi @1138-4EB > Just out of curiosity, why are you adding a docs user? I didn't create the docs user, it comes with the original RTD docker image. I created my own user inside the docker image (`humitos`) and I need to do this because of permission issues (explained in the issue linked in the description). This docker image is used by the `readthedocs.org` code itself, it's not used as you mentioned the in the issue linked. I can run the original image as you did without any problem but when it's ran by RTD code itself there are a couple of problems that this Docker Build Tip helps to solve. @humitos After having some issues with virtualbox, I decided to give Docker for MacOS a try again. It seems to work pretty well now, so I've be using that. Like @ericholscher said, this solution just works, I don't worry about permissions or anything, it just runs. @1138-4EB we need the `docs` user because we execute commands as a non-privileged user on both the container host and guest I think as we discussed in another PR, a `Dockerfile` in `contrib/` that has a configurable UID through a docker build arg is the closest we'll get. > I think as we discussed in another PR, a Dockerfile in contrib/ that has a configurable UID through a docker build arg is the closest we'll get. If we all agree on this, I can create a PR with this and update the docs. Besides, it seems that this will be only necessary in Linux so I add a note about this. > This docker image is used by the readthedocs.org code itself, it's not used as you mentioned the in the issue linked. Where can I find the readthedocs code that uses the image with a given project? I.e. clone a repo with a `.readthedocs.yml` and run `rtd build all`. I saw how to spin RTD as a service and access it through the browser, but I'd like to reproduce a build without all that overhead. > I can run the original image as you did without any problem but when it's ran by RTD code itself there are a couple of problems that this Docker Build Tip helps to solve. Where can I find info about those problems? Can `docker cp` be used in the scripts so that using the same user is not a requirement? I mean, even though the solutions you are proposing are technically correct, it seems ackward. It makes sense "only" if readthedocs needs to replace a single native command with a single docker run. > @1138-4EB we need the docs user because we execute commands as a non-privileged user on both the container host and guest As explained above, you should be able to execute commands as two non-privileged users, one inside the container and a different one outside. It is 'cleaner' if each of them can only modify it's own sources, because it avoids the container modifying external resources which should not be touched, or generating auxiliary files that should not be left in the base repo. @humitos thoughts on this? I'm considering closing it, since it's quite old, but if it should be merged, we should do something with it :)
2018-09-05T22:55:31
readthedocs/readthedocs.org
4,622
readthedocs__readthedocs.org-4622
[ "4359" ]
937e8f8f92c89dee39ad227b6bf23a146ed43081
diff --git a/readthedocs/builds/views.py b/readthedocs/builds/views.py --- a/readthedocs/builds/views.py +++ b/readthedocs/builds/views.py @@ -1,26 +1,33 @@ +# -*- coding: utf-8 -*- + """Views for builds app.""" -from __future__ import absolute_import -from builtins import object +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + import logging -from django.shortcuts import get_object_or_404 -from django.views.generic import ListView, DetailView +from builtins import object +from django.contrib.auth.decorators import login_required +from django.core.urlresolvers import reverse from django.http import ( HttpResponseForbidden, HttpResponsePermanentRedirect, HttpResponseRedirect, ) -from django.contrib.auth.decorators import login_required -from readthedocs.core.permissions import AdminPermission -from django.core.urlresolvers import reverse +from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator +from django.views.generic import DetailView, ListView from readthedocs.builds.models import Build, Version +from readthedocs.core.permissions import AdminPermission from readthedocs.core.utils import trigger_build from readthedocs.projects.models import Project - log = logging.getLogger(__name__) @@ -31,9 +38,11 @@ def get_queryset(self): self.project_slug = self.kwargs.get('project_slug', None) self.project = get_object_or_404( Project.objects.protected(self.request.user), - slug=self.project_slug + slug=self.project_slug, + ) + queryset = Build.objects.public( + user=self.request.user, project=self.project ) - queryset = Build.objects.public(user=self.request.user, project=self.project) return queryset @@ -54,8 +63,10 @@ def post(self, request, project_slug): slug=version_slug, ) - trigger_build(project=project, version=version) - return HttpResponseRedirect(reverse('builds_project_list', args=[project.slug])) + _, build = trigger_build(project=project, version=version) + return HttpResponseRedirect( + reverse('builds_detail', args=[project.slug, build.pk]), + ) class BuildList(BuildBase, BuildTriggerMixin, ListView): @@ -63,11 +74,14 @@ class BuildList(BuildBase, BuildTriggerMixin, ListView): def get_context_data(self, **kwargs): context = super(BuildList, self).get_context_data(**kwargs) - active_builds = self.get_queryset().exclude(state="finished").values('id') + active_builds = self.get_queryset().exclude(state='finished' + ).values('id') context['project'] = self.project context['active_builds'] = active_builds - context['versions'] = Version.objects.public(user=self.request.user, project=self.project) + context['versions'] = Version.objects.public( + user=self.request.user, project=self.project + ) context['build_qs'] = self.get_queryset() return context @@ -84,9 +98,14 @@ def get_context_data(self, **kwargs): # Old build view redirects + def builds_redirect_list(request, project_slug): # pylint: disable=unused-argument - return HttpResponsePermanentRedirect(reverse('builds_project_list', args=[project_slug])) + return HttpResponsePermanentRedirect( + reverse('builds_project_list', args=[project_slug]) + ) def builds_redirect_detail(request, project_slug, pk): # pylint: disable=unused-argument - return HttpResponsePermanentRedirect(reverse('builds_detail', args=[project_slug, pk])) + return HttpResponsePermanentRedirect( + reverse('builds_detail', args=[project_slug, pk]) + ) diff --git a/readthedocs/core/utils/__init__.py b/readthedocs/core/utils/__init__.py --- a/readthedocs/core/utils/__init__.py +++ b/readthedocs/core/utils/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + """Common utilty functions.""" from __future__ import absolute_import @@ -14,13 +15,11 @@ from django.utils.functional import allow_lazy from django.utils.safestring import SafeText, mark_safe from django.utils.text import slugify as slugify_base -from future.backports.urllib.parse import urlparse from celery import group, chord from readthedocs.builds.constants import LATEST, BUILD_STATE_TRIGGERED from readthedocs.doc_builder.constants import DOCKER_LIMITS - log = logging.getLogger(__name__) SYNC_USER = getattr(settings, 'SYNC_USER', getpass.getuser()) @@ -40,9 +39,9 @@ def broadcast(type, task, args, kwargs=None, callback=None): # pylint: disable= kwargs = {} default_queue = getattr(settings, 'CELERY_DEFAULT_QUEUE', 'celery') if type in ['web', 'app']: - servers = getattr(settings, "MULTIPLE_APP_SERVERS", [default_queue]) + servers = getattr(settings, 'MULTIPLE_APP_SERVERS', [default_queue]) elif type in ['build']: - servers = getattr(settings, "MULTIPLE_BUILD_SERVERS", [default_queue]) + servers = getattr(settings, 'MULTIPLE_BUILD_SERVERS', [default_queue]) tasks = [] for server in servers: @@ -71,7 +70,12 @@ def cname_to_slug(host): def prepare_build( - project, version=None, record=True, force=False, immutable=True): + project, + version=None, + record=True, + force=False, + immutable=True, +): """ Prepare a build in a Celery task for project and version. @@ -132,11 +136,14 @@ def prepare_build( options['soft_time_limit'] = time_limit options['time_limit'] = int(time_limit * 1.2) - return update_docs_task.signature( - args=(project.pk,), - kwargs=kwargs, - options=options, - immutable=True, + return ( + update_docs_task.signature( + args=(project.pk,), + kwargs=kwargs, + options=options, + immutable=True, + ), + build, ) @@ -151,9 +158,9 @@ def trigger_build(project, version=None, record=True, force=False): :param version: version of the project to be built. Default: ``latest`` :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed - :returns: Celery AsyncResult promise + :returns: A tuple (Celery AsyncResult promise, Task Signature from ``prepare_build``) """ - update_docs_task = prepare_build( + update_docs_task, build = prepare_build( project, version, record, @@ -165,11 +172,13 @@ def trigger_build(project, version=None, record=True, force=False): # Current project is skipped return None - return update_docs_task.apply_async() + return (update_docs_task.apply_async(), build) -def send_email(recipient, subject, template, template_html, context=None, - request=None, from_email=None, **kwargs): # pylint: disable=unused-argument +def send_email( + recipient, subject, template, template_html, context=None, request=None, + from_email=None, **kwargs +): # pylint: disable=unused-argument """ Alter context passed in and call email send task. @@ -183,10 +192,14 @@ def send_email(recipient, subject, template, template_html, context=None, if context is None: context = {} context['uri'] = '{scheme}://{host}'.format( - scheme='https', host=settings.PRODUCTION_DOMAIN) - send_email_task.delay(recipient=recipient, subject=subject, template=template, - template_html=template_html, context=context, from_email=from_email, - **kwargs) + scheme='https', + host=settings.PRODUCTION_DOMAIN, + ) + send_email_task.delay( + recipient=recipient, subject=subject, template=template, + template_html=template_html, context=context, from_email=from_email, + **kwargs + ) def slugify(value, *args, **kwargs): diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py --- a/readthedocs/projects/views/private.py +++ b/readthedocs/projects/views/private.py @@ -1,8 +1,13 @@ # -*- coding: utf-8 -*- + """Project views for authenticated users.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import logging @@ -13,8 +18,11 @@ from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.http import ( - Http404, HttpResponseBadRequest, HttpResponseNotAllowed, - HttpResponseRedirect) + Http404, + HttpResponseBadRequest, + HttpResponseNotAllowed, + HttpResponseRedirect, +) from django.middleware.csrf import get_token from django.shortcuts import get_object_or_404, render from django.utils.safestring import mark_safe @@ -22,22 +30,39 @@ from django.views.generic import ListView, TemplateView, View from formtools.wizard.views import SessionWizardView from vanilla import CreateView, DeleteView, DetailView, GenericView, UpdateView + from readthedocs.builds.forms import VersionForm from readthedocs.builds.models import Version from readthedocs.core.mixins import ListViewWithForm, LoginRequiredMixin -from readthedocs.core.utils import broadcast, trigger_build, prepare_build +from readthedocs.core.utils import broadcast, prepare_build, trigger_build from readthedocs.integrations.models import HttpExchange, Integration from readthedocs.oauth.services import registry -from readthedocs.oauth.utils import update_webhook from readthedocs.oauth.tasks import attach_webhook +from readthedocs.oauth.utils import update_webhook from readthedocs.projects import tasks from readthedocs.projects.forms import ( - DomainForm, EmailHookForm, IntegrationForm, ProjectAdvancedForm, - ProjectAdvertisingForm, ProjectBasicsForm, ProjectExtraForm, - ProjectRelationshipForm, RedirectForm, TranslationForm, UpdateProjectForm, - UserForm, WebHookForm, build_versions_form) + DomainForm, + EmailHookForm, + IntegrationForm, + ProjectAdvancedForm, + ProjectAdvertisingForm, + ProjectBasicsForm, + ProjectExtraForm, + ProjectRelationshipForm, + RedirectForm, + TranslationForm, + UpdateProjectForm, + UserForm, + WebHookForm, + build_versions_form, +) from readthedocs.projects.models import ( - Domain, EmailHook, Project, ProjectRelationship, WebHook) + Domain, + EmailHook, + Project, + ProjectRelationship, + WebHook, +) from readthedocs.projects.signals import project_import from readthedocs.projects.views.base import ProjectAdminMixin, ProjectSpamMixin @@ -119,7 +144,9 @@ def project_versions(request, project_slug): like to have built. """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) if not project.is_imported: raise Http404 @@ -135,19 +162,27 @@ def project_versions(request, project_slug): return HttpResponseRedirect(project_dashboard) return render( - request, 'projects/project_versions.html', - {'form': form, 'project': project}) + request, + 'projects/project_versions.html', + {'form': form, 'project': project}, + ) @login_required def project_version_detail(request, project_slug, version_slug): """Project version detail page.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) version = get_object_or_404( Version.objects.public( - user=request.user, project=project, only_active=False), - slug=version_slug) + user=request.user, + project=project, + only_active=False, + ), + slug=version_slug, + ) form = VersionForm(request.POST or None, instance=version) @@ -157,15 +192,20 @@ def project_version_detail(request, project_slug, version_slug): if 'active' in form.changed_data and version.active is False: log.info('Removing files for version %s', version.slug) broadcast( - type='app', task=tasks.clear_artifacts, args=[version.get_artifact_paths()]) + type='app', + task=tasks.clear_artifacts, + args=[version.get_artifact_paths()], + ) version.built = False version.save() url = reverse('project_version_list', args=[project.slug]) return HttpResponseRedirect(url) return render( - request, 'projects/project_version_detail.html', - {'form': form, 'project': project, 'version': version}) + request, + 'projects/project_version_detail.html', + {'form': form, 'project': project, 'version': version}, + ) @login_required @@ -177,7 +217,9 @@ def project_delete(request, project_slug): confirmation of delete. """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) if request.method == 'POST': broadcast(type='app', task=tasks.remove_dir, args=[project.doc_path]) @@ -240,11 +282,12 @@ def done(self, form_list, **kwargs): self.trigger_initial_build(project) return HttpResponseRedirect( - reverse('projects_detail', args=[project.slug])) + reverse('projects_detail', args=[project.slug]), + ) def trigger_initial_build(self, project): """Trigger initial build.""" - update_docs = prepare_build(project) + update_docs, build = prepare_build(project) task_promise = chain( attach_webhook.si(project.pk, self.request.user.pk), update_docs, @@ -275,10 +318,13 @@ def get(self, request, *args, **kwargs): data = self.get_form_data() project = Project.objects.for_admin_user( - request.user).filter(repo=data['repo']).first() + request.user, + ).filter(repo=data['repo']).first() if project is not None: messages.success( - request, _('The demo project is already imported!')) + request, + _('The demo project is already imported!'), + ) else: kwargs = self.get_form_kwargs() form = self.form_class(data=data, **kwargs) @@ -287,7 +333,9 @@ def get(self, request, *args, **kwargs): project.save() trigger_build(project) messages.success( - request, _('Your demo project is currently being imported')) + request, + _('Your demo project is currently being imported'), + ) else: messages.error( request, @@ -295,14 +343,15 @@ def get(self, request, *args, **kwargs): ) return HttpResponseRedirect(reverse('projects_dashboard')) return HttpResponseRedirect( - reverse('projects_detail', args=[project.slug])) + reverse('projects_detail', args=[project.slug]), + ) def get_form_data(self): """Get form data to post to import form.""" return { 'name': '{0}-demo'.format(self.request.user.username), 'repo_type': 'git', - 'repo': 'https://github.com/readthedocs/template.git' + 'repo': 'https://github.com/readthedocs/template.git', } def get_form_kwargs(self): @@ -336,7 +385,8 @@ def get(self, request, *args, **kwargs): .exclude( provider__in=[ service.adapter.provider_id for service in registry - ]) + ], + ) ) # yapf: disable for account in deprecated_accounts: provider_account = account.get_provider_account() @@ -346,10 +396,12 @@ def get(self, request, *args, **kwargs): _( 'There is a problem with your {service} account, ' 'try reconnecting your account on your ' - '<a href="{url}">connected services page</a>.').format( - service=provider_account.get_brand()['name'], - url=reverse('socialaccount_connections')) - )) # yapf: disable + '<a href="{url}">connected services page</a>.', + ).format( + service=provider_account.get_brand()['name'], + url=reverse('socialaccount_connections'), + ) + )), # yapf: disable ) return super(ImportView, self).get(request, *args, **kwargs) @@ -368,7 +420,8 @@ def get_context_data(self, **kwargs): context = super(ImportView, self).get_context_data(**kwargs) context['view_csrf_token'] = get_token(self.request) context['has_connected_accounts'] = SocialAccount.objects.filter( - user=self.request.user).exists() + user=self.request.user, + ).exists() return context @@ -385,8 +438,10 @@ def get_queryset(self): def get_form(self, data=None, files=None, **kwargs): kwargs['user'] = self.request.user - return super(ProjectRelationshipMixin, - self).get_form(data, files, **kwargs) + return super( + ProjectRelationshipMixin, + self, + ).get_form(data, files, **kwargs) def form_valid(self, form): broadcast( @@ -426,7 +481,9 @@ def get(self, request, *args, **kwargs): def project_users(request, project_slug): """Project users view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = UserForm(data=request.POST or None, project=project) @@ -449,9 +506,13 @@ def project_users_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) user = get_object_or_404( - User.objects.all(), username=request.POST.get('username')) + User.objects.all(), + username=request.POST.get('username'), + ) if user == request.user: raise Http404 project.users.remove(user) @@ -463,7 +524,9 @@ def project_users_delete(request, project_slug): def project_notifications(request, project_slug): """Project notification view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) email_form = EmailHookForm(data=request.POST or None, project=project) webhook_form = WebHookForm(data=request.POST or None, project=project) @@ -501,14 +564,18 @@ def project_notifications_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) try: project.emailhook_notifications.get( - email=request.POST.get('email')).delete() + email=request.POST.get('email'), + ).delete() except EmailHook.DoesNotExist: try: project.webhook_notifications.get( - url=request.POST.get('email')).delete() + url=request.POST.get('email'), + ).delete() except WebHook.DoesNotExist: raise Http404 project_dashboard = reverse('projects_notifications', args=[project.slug]) @@ -519,7 +586,9 @@ def project_notifications_delete(request, project_slug): def project_translations(request, project_slug): """Project translations view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = TranslationForm( data=request.POST or None, parent=project, @@ -566,7 +635,9 @@ def project_translations_delete(request, project_slug, child_slug): def project_redirects(request, project_slug): """Project redirects view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = RedirectForm(data=request.POST or None, project=project) @@ -578,8 +649,10 @@ def project_redirects(request, project_slug): redirects = project.redirects.all() return render( - request, 'projects/project_redirects.html', - {'form': form, 'project': project, 'redirects': redirects}) + request, + 'projects/project_redirects.html', + {'form': form, 'project': project, 'redirects': redirects}, + ) @login_required @@ -588,15 +661,20 @@ def project_redirects_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) redirect = get_object_or_404( - project.redirects, pk=request.POST.get('id_pk')) + project.redirects, + pk=request.POST.get('id_pk'), + ) if redirect.project == project: redirect.delete() else: raise Http404 return HttpResponseRedirect( - reverse('projects_redirects', args=[project.slug])) + reverse('projects_redirects', args=[project.slug]), + ) @login_required @@ -607,21 +685,33 @@ def project_version_delete_html(request, project_slug, version_slug): This marks a version as not built """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) version = get_object_or_404( Version.objects.public( - user=request.user, project=project, only_active=False), - slug=version_slug) + user=request.user, + project=project, + only_active=False, + ), + slug=version_slug, + ) if not version.active: version.built = False version.save() - broadcast(type='app', task=tasks.clear_artifacts, args=[version.get_artifact_paths()]) + broadcast( + type='app', + task=tasks.clear_artifacts, + args=[version.get_artifact_paths()], + ) else: return HttpResponseBadRequest( - "Can't delete HTML for an active version.") + "Can't delete HTML for an active version.", + ) return HttpResponseRedirect( - reverse('project_version_list', kwargs={'project_slug': project_slug})) + reverse('project_version_list', kwargs={'project_slug': project_slug}), + ) class DomainMixin(ProjectAdminMixin, PrivateViewMixin): @@ -719,7 +809,8 @@ def get_template_names(self): suffix = self.SUFFIX_MAP.get(integration_type, integration_type) return ( 'projects/integration_{0}{1}.html' - .format(suffix, self.template_name_suffix)) + .format(suffix, self.template_name_suffix) + ) class IntegrationDelete(IntegrationMixin, DeleteView):
diff --git a/readthedocs/rtd_tests/tests/test_views.py b/readthedocs/rtd_tests/tests/test_views.py --- a/readthedocs/rtd_tests/tests/test_views.py +++ b/readthedocs/rtd_tests/tests/test_views.py @@ -1,19 +1,26 @@ -from __future__ import absolute_import +# -*- coding: utf-8 -*- +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import mock from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase from django.utils.six.moves.urllib.parse import urlsplit -from django_dynamic_fixture import get -from django_dynamic_fixture import new +from django_dynamic_fixture import get, new from readthedocs.builds.constants import LATEST +from readthedocs.builds.models import Build from readthedocs.core.permissions import AdminPermission -from readthedocs.projects.models import ImportedFile -from readthedocs.projects.models import Project from readthedocs.projects.forms import UpdateProjectForm - +from readthedocs.projects.models import ImportedFile, Project class Testmaker(TestCase): + def setUp(self): self.eric = User(username='eric') self.eric.set_password('test') @@ -27,21 +34,24 @@ def test_imported_docs(self): self.assertEqual(r.status_code, 200) r = self.client.get('/dashboard/import/manual/', {}) self.assertEqual(r.status_code, 200) - form = UpdateProjectForm(data={ - 'name': 'Django Kong', - 'repo': 'https://github.com/ericholscher/django-kong', - 'repo_type': 'git', - 'description': 'OOHHH AH AH AH KONG SMASH', - 'language': 'en', - 'default_branch': '', - 'project_url': 'http://django-kong.rtfd.org', - 'default_version': LATEST, - 'privacy_level': 'public', - 'version_privacy_level': 'public', - 'python_interpreter': 'python', - 'documentation_type': 'sphinx', - 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be', - }, user=user) + form = UpdateProjectForm( + data={ + 'name': 'Django Kong', + 'repo': 'https://github.com/ericholscher/django-kong', + 'repo_type': 'git', + 'description': 'OOHHH AH AH AH KONG SMASH', + 'language': 'en', + 'default_branch': '', + 'project_url': 'http://django-kong.rtfd.org', + 'default_version': LATEST, + 'privacy_level': 'public', + 'version_privacy_level': 'public', + 'python_interpreter': 'python', + 'documentation_type': 'sphinx', + 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be', + }, + user=user, + ) _ = form.save() _ = Project.objects.get(slug='django-kong') @@ -111,11 +121,13 @@ def test_project_delete(self): def test_subprojects_delete(self): # This URL doesn't exist anymore, 404 response = self.client.get( - '/dashboard/pip/subprojects/delete/a-subproject/') + '/dashboard/pip/subprojects/delete/a-subproject/', + ) self.assertEqual(response.status_code, 404) # New URL response = self.client.get( - '/dashboard/pip/subprojects/a-subproject/delete/') + '/dashboard/pip/subprojects/a-subproject/delete/', + ) self.assertRedirectToLogin(response) def test_subprojects(self): @@ -143,7 +155,9 @@ def test_project_translations(self): self.assertRedirectToLogin(response) def test_project_translations_delete(self): - response = self.client.get('/dashboard/pip/translations/delete/a-translation/') + response = self.client.get( + '/dashboard/pip/translations/delete/a-translation/' + ) self.assertRedirectToLogin(response) def test_project_redirects(self): @@ -168,7 +182,8 @@ def setUp(self): slug='file', path='file.html', md5='abcdef', - commit='1234567890abcdef') + commit='1234567890abcdef', + ) def test_random_page_view_redirects(self): response = self.client.get('/random/') @@ -188,7 +203,9 @@ def test_404_for_with_no_imported_files(self): response = self.client.get('/random/pip/') self.assertEqual(response.status_code, 404) + class SubprojectViewTests(TestCase): + def setUp(self): self.user = new(User, username='test') self.user.set_password('test') @@ -201,10 +218,15 @@ def setUp(self): self.client.login(username='test', password='test') def test_deny_delete_for_non_project_admins(self): - response = self.client.get('/dashboard/my-mainproject/subprojects/delete/my-subproject/') + response = self.client.get( + '/dashboard/my-mainproject/subprojects/delete/my-subproject/' + ) self.assertEqual(response.status_code, 404) - self.assertTrue(self.subproject in [r.child for r in self.project.subprojects.all()]) + self.assertTrue( + self.subproject in + [r.child for r in self.project.subprojects.all()] + ) def test_admins_can_delete_subprojects(self): self.project.users.add(self.user) @@ -212,24 +234,56 @@ def test_admins_can_delete_subprojects(self): # URL doesn't exist anymore, 404 response = self.client.get( - '/dashboard/my-mainproject/subprojects/delete/my-subproject/') + '/dashboard/my-mainproject/subprojects/delete/my-subproject/', + ) self.assertEqual(response.status_code, 404) # This URL still doesn't accept GET, 405 response = self.client.get( - '/dashboard/my-mainproject/subprojects/my-subproject/delete/') + '/dashboard/my-mainproject/subprojects/my-subproject/delete/', + ) self.assertEqual(response.status_code, 405) - self.assertTrue(self.subproject in [r.child for r in self.project.subprojects.all()]) + self.assertTrue( + self.subproject in + [r.child for r in self.project.subprojects.all()] + ) # Test POST response = self.client.post( - '/dashboard/my-mainproject/subprojects/my-subproject/delete/') + '/dashboard/my-mainproject/subprojects/my-subproject/delete/', + ) self.assertEqual(response.status_code, 302) - self.assertTrue(self.subproject not in [r.child for r in self.project.subprojects.all()]) - - def test_project_admins_can_delete_subprojects_that_they_are_not_admin_of(self): + self.assertTrue( + self.subproject not in + [r.child for r in self.project.subprojects.all()] + ) + + def test_project_admins_can_delete_subprojects_that_they_are_not_admin_of( + self + ): self.project.users.add(self.user) self.assertFalse(AdminPermission.is_admin(self.user, self.subproject)) response = self.client.post( - '/dashboard/my-mainproject/subprojects/my-subproject/delete/') + '/dashboard/my-mainproject/subprojects/my-subproject/delete/', + ) self.assertEqual(response.status_code, 302) - self.assertTrue(self.subproject not in [r.child for r in self.project.subprojects.all()]) + self.assertTrue( + self.subproject not in + [r.child for r in self.project.subprojects.all()] + ) + + +class BuildViewTests(TestCase): + fixtures = ['eric', 'test_data'] + + def setUp(self): + self.client.login(username='eric', password='test') + + @mock.patch('readthedocs.projects.tasks.update_docs_task') + def test_build_redirect(self, mock): + r = self.client.post('/projects/pip/builds/', {'version_slug': '0.8.1'}) + build = Build.objects.filter(project__slug='pip').latest() + self.assertEqual(r.status_code, 302) + self.assertEqual( + r._headers['location'][1], + '/projects/pip/builds/%s/' % build.pk, + )
Triggering a build manually redirects to build listing Currently, if you trigger a build manually from the project details page or from the build listing page the user is redirected to the build listing page. I think it would be better if the user is redirected to the build details page since the build object is already created. In the details page the user can see that the build was triggered easily and maybe wait for completion while checking the commands. > If we agree on making this change, we should mark this issue as `Good First Issue`
Yeah, this bugs me a lot. It should be a small change. I'd like to take a stab at this issue, if no one would mind... Any pointers as to which part of the codebase needs the relevant changes here? :) @pradyunsg here is the current redirect https://github.com/rtfd/readthedocs.org/blob/dde104daf23f580887e79d4fae91cf9577fbbb76/readthedocs/builds/views.py#L58-L58, we need to change that.
2018-09-08T07:48:02
readthedocs/readthedocs.org
4,676
readthedocs__readthedocs.org-4676
[ "4648" ]
273a636ece8ae18b798d74590ede42396f8a4216
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -71,7 +71,7 @@ gettext_compact = False html_theme = 'sphinx_rtd_theme' -# html_static_path = ['_static'] +html_static_path = ['_static'] html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_logo = 'img/logo.svg' html_theme_options = {
Improve Intro and Getting Started documentation I think the [introduction](https://docs.readthedocs.io/en/latest/) and [getting started guide](https://docs.readthedocs.io/en/latest/getting_started.html) could use a few improvements to make it easier for brand new users who may not already know about Sphinx/MkDocs/Markdown/RestructuredText and are just looking for a guide on how to write some docs. I also think our introduction could stand some improvements to point users in the right direction. We have a lot of docs, but a few layout and explanation improvements will help users find the right section for them. Here are some specific goals and improvements: - Make it easier to start a brand new docs project * Have a getting started guide for Sphinx * Have a getting started guide for Sphinx with commonmark * Have a getting started guide for MkDocs * Explain the why between the above technologies - Improve the intro paragraphs ("Read the Docs hosts documentation for...") on the index page to explain RTD's value proposition and why somebody should choose Read the Docs. - Full sentence/paragraph descriptions on different sections (eg. User documentation) rather than just a big toctree.
I think this is a great idea, and needed. I still don't fully grok how to do this, either. Aside: Is there a reason there are two milestones, `Documentation` and `Better User Documentation`? Could we join them? Some existing content which might be useful to steal: * http://ericholscher.com/blog/2016/jul/1/sphinx-and-rtd-for-writers/ * https://sphinx-tutorial.readthedocs.io/ > Aside: Is there a reason there are two milestones, Documentation and Better User Documentation? Could we join them? I don't know the answer to this. Perhaps @agjohnson does?
2018-09-28T22:55:34
readthedocs/readthedocs.org
4,696
readthedocs__readthedocs.org-4696
[ "4671" ]
2d9ca7df405a15c3e7e278c68c3fb9f98fe13d36
diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py --- a/readthedocs/doc_builder/backends/sphinx.py +++ b/readthedocs/doc_builder/backends/sphinx.py @@ -74,7 +74,12 @@ def get_config_params(self): # TODO this should be handled better in the theme conf_py_path = os.path.join( os.path.sep, - self.config_file, + os.path.dirname( + os.path.relpath( + self.config_file, + self.project.checkout_path(self.version.slug) + ) + ), '', ) remote_version = self.version.commit_name
diff --git a/readthedocs/rtd_tests/tests/test_doc_builder.py b/readthedocs/rtd_tests/tests/test_doc_builder.py --- a/readthedocs/rtd_tests/tests/test_doc_builder.py +++ b/readthedocs/rtd_tests/tests/test_doc_builder.py @@ -38,6 +38,39 @@ def setUp(self): BaseSphinx.type = 'base' BaseSphinx.sphinx_build_dir = tempfile.mkdtemp() + @patch('readthedocs.doc_builder.backends.sphinx.BaseSphinx.docs_dir') + @patch('readthedocs.projects.models.Project.checkout_path') + @override_settings(DONT_HIT_API=True) + def test_conf_py_path(self, checkout_path, docs_dir): + """ + Test the conf_py_path that is added to the conf.py file. + + This value is used from the theme and footer + to build the ``View`` and ``Edit`` on link. + """ + tmp_dir = tempfile.mkdtemp() + checkout_path.return_value = tmp_dir + docs_dir.return_value = tmp_dir + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=None, + ) + base_sphinx = BaseSphinx( + build_env=self.build_env, + python_env=python_env, + ) + + for value, expected in (('conf.py', '/'), ('docs/conf.py', '/docs/')): + base_sphinx.config_file = os.path.join( + tmp_dir, value + ) + params = base_sphinx.get_config_params() + self.assertEqual( + params['conf_py_path'], + expected + ) + @patch( 'readthedocs.doc_builder.backends.sphinx.SPHINX_TEMPLATE_DIR', '/tmp/sphinx-template-dir',
"Edit on GitHub" url broken for sphinx projects ## Details * Read the Docs project URL: https://jsxc.readthedocs.io * Build URL (if applicable): https://github.com/jsxc/documentation ## Expected Result "Edit on GitHub", "View on GitHub" and so on should link to https://github.com/jsxc/documentation. ## Actual Result Those links points to https://github.com/jsxc/documentation/blob/master/home/docs/checkouts/readthedocs.org/user_builds/jsxc/checkouts/latest/conf.py/index.rst As you can see the string `/home/docs/checkouts/readthedocs.org/user_builds/jsxc/checkouts/latest/conf.py` is added to the url. The only thing that I can think of, is that I have currently no tags in my repo. Maybe this is the reason, I don't know.
Thanks for the report, this is a duplicate of #1917 I have a PR that fixes this in #3525 @stsewd I don't think this is a duplicate issue as this bug is appearing in sphinx based projects too. In the #4482 the value for `conf_py_path` is changed from a relative to an absolute path. This is passed to the sphinx build context and then used in the readthedocs default theme to [generate the "Edit on [VCS]" link](https://github.com/rtfd/sphinx_rtd_theme/blob/310043695f75cac495f8bde089a301e53f46d6db/sphinx_rtd_theme/breadcrumbs.html#L45). This is also documented in: https://docs.readthedocs.io/en/latest/vcs.html?highlight=conf_py_path Maybe you need an absolute path here, but we also need to provide a relative path in the checkout to the docs root in order to generate a correct "Edit on [VCS]" link. oops, didn't see that your project is sphinx type. Yeah, that's a bug @stsewd that's not my project, I just noticed the same bug in a sphinx based project and commented this same issue instead of opening a new one. Thanks for your commitment.
2018-10-01T19:14:53
readthedocs/readthedocs.org
4,704
readthedocs__readthedocs.org-4704
[ "4388" ]
a8bd00a517960ea9062033d2b75a7f3462ada3dc
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -128,8 +128,17 @@ class BuildConfigBase(object): """ Config that handles the build of one particular documentation. - You need to call ``validate`` before the config is ready to use. Also - setting the ``output_base`` is required before using it for a build. + .. note:: + + You need to call ``validate`` before the config is ready to use. + + :param env_config: A dict that cointains additional information + about the environment. + :param raw_config: A dict with all configuration without validation. + :param source_file: The file that contains the configuration. + All paths are relative to this file. + If a dir is given, the configuration was loaded + from another source (like the web admin). """ version = None @@ -139,21 +148,27 @@ def __init__(self, env_config, raw_config, source_file, source_position): self.raw_config = raw_config self.source_file = source_file self.source_position = source_position - self.base_path = os.path.dirname(self.source_file) + if os.path.isdir(self.source_file): + self.base_path = self.source_file + else: + self.base_path = os.path.dirname(self.source_file) self.defaults = self.env_config.get('defaults', {}) self._config = {} def error(self, key, message, code): """Raise an error related to ``key``.""" - source = '{file} [{pos}]'.format( - file=os.path.relpath(self.source_file, self.base_path), - pos=self.source_position, - ) - error_message = '{source}: {message}'.format( - source=source, - message=message, - ) + if not os.path.isdir(self.source_file): + source = '{file} [{pos}]'.format( + file=os.path.relpath(self.source_file, self.base_path), + pos=self.source_position, + ) + error_message = '{source}: {message}'.format( + source=source, + message=message, + ) + else: + error_message = message raise InvalidConfig( key=key, code=code, @@ -271,10 +286,9 @@ def validate_output_base(self): """Validates that ``output_base`` exists and set its absolute path.""" assert 'output_base' in self.env_config, ( '"output_base" required in "env_config"') - base_path = os.path.dirname(self.source_file) output_base = os.path.abspath( os.path.join( - self.env_config.get('output_base', base_path), + self.env_config.get('output_base', self.base_path), ) ) return output_base @@ -302,10 +316,9 @@ def validate_base(self): if 'base' in self.raw_config: base = self.raw_config['base'] else: - base = os.path.dirname(self.source_file) + base = self.base_path with self.catch_validation_error('base'): - base_path = os.path.dirname(self.source_file) - base = validate_directory(base, base_path) + base = validate_directory(base, self.base_path) return base def validate_build(self): @@ -452,9 +465,8 @@ def validate_conda(self): conda_environment = None if 'file' in raw_conda: with self.catch_validation_error('conda.file'): - base_path = os.path.dirname(self.source_file) conda_environment = validate_file( - raw_conda['file'], base_path + raw_conda['file'], self.base_path ) conda['environment'] = conda_environment @@ -469,9 +481,8 @@ def validate_requirements_file(self): requirements_file = self.raw_config['requirements_file'] if not requirements_file: return None - base_path = os.path.dirname(self.source_file) with self.catch_validation_error('requirements_file'): - validate_file(requirements_file, base_path) + validate_file(requirements_file, self.base_path) return requirements_file def validate_formats(self): diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py --- a/readthedocs/doc_builder/config.py +++ b/readthedocs/doc_builder/config.py @@ -73,7 +73,7 @@ def load_yaml_config(version): config = BuildConfigV1( env_config=env_config, raw_config={}, - source_file=path.join(checkout_path, 'empty'), + source_file=checkout_path, source_position=0, ) config.validate()
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -834,6 +834,14 @@ def test_version(self): build = self.get_build_config({}) assert build.version == '2' + def test_correct_error_when_source_is_dir(self, tmpdir): + build = self.get_build_config({}, source_file=str(tmpdir)) + with raises(InvalidConfig) as excinfo: + build.error(key='key', message='Message', code='code') + # We don't have any extra information about + # the source_file. + assert str(excinfo.value) == 'Invalid "key": Message' + def test_formats_check_valid(self): build = self.get_build_config({'formats': ['htmlzip', 'pdf', 'epub']}) build.validate()
Refactor BuildConfig to not use hardcoded constants While reviewing https://github.com/rtfd/readthedocs.org/pull/4379 it came to my attention that we are using some hardcoded constants for logic like the string `empty` for a path to a nonexistant file. This causes a lot of weird behavior in tests. Instead, we should simply check if these variables exist. They should use `None` instead of a nonexistent file.
2018-10-02T17:49:50
readthedocs/readthedocs.org
4,710
readthedocs__readthedocs.org-4710
[ "3610" ]
d06d47b33436a7da9f782596476761a162e19dc7
diff --git a/readthedocs/builds/forms.py b/readthedocs/builds/forms.py --- a/readthedocs/builds/forms.py +++ b/readthedocs/builds/forms.py @@ -1,12 +1,19 @@ """Django forms for the builds app.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + from builtins import object from django import forms +from django.utils.translation import ugettext_lazy as _ -from readthedocs.builds.models import VersionAlias, Version -from readthedocs.projects.models import Project +from readthedocs.builds.models import Version, VersionAlias from readthedocs.core.utils import trigger_build +from readthedocs.projects.models import Project class AliasForm(forms.ModelForm): @@ -33,6 +40,22 @@ class Meta(object): model = Version fields = ['active', 'privacy_level', 'tags'] + def clean_active(self): + active = self.cleaned_data['active'] + if self._is_default_version() and not active: + msg = _( + '{version} is the default version of the project, ' + 'it should be active.' + ) + raise forms.ValidationError( + msg.format(version=self.instance.verbose_name) + ) + return active + + def _is_default_version(self): + project = self.instance.project + return project.default_version == self.instance.slug + def save(self, commit=True): obj = super(VersionForm, self).save(commit=commit) if obj.active and not obj.built and not obj.uploaded: diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -2,7 +2,11 @@ """Project forms.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) from random import choice @@ -23,9 +27,16 @@ from readthedocs.integrations.models import Integration from readthedocs.oauth.models import RemoteRepository from readthedocs.projects import constants +from readthedocs.projects.constants import PUBLIC from readthedocs.projects.exceptions import ProjectSpamError from readthedocs.projects.models import ( - Domain, EmailHook, Feature, Project, ProjectRelationship, WebHook) + Domain, + EmailHook, + Feature, + Project, + ProjectRelationship, + WebHook, +) from readthedocs.redirects.models import Redirect @@ -202,6 +213,24 @@ class Meta(object): # 'num_major', 'num_minor', 'num_point', ) + def __init__(self, *args, **kwargs): + super(ProjectAdvancedForm, self).__init__(*args, **kwargs) + + default_choice = (None, '-' * 9) + all_versions = self.instance.versions.values_list( + 'slug', 'verbose_name' + ) + self.fields['default_branch'].widget = forms.Select( + choices=[default_choice] + list(all_versions) + ) + + active_versions = self.instance.all_active_versions().values_list( + 'slug', 'verbose_name' + ) + self.fields['default_version'].widget = forms.Select( + choices=active_versions + ) + def clean_conf_py_file(self): filename = self.cleaned_data.get('conf_py_file', '').strip() if filename and 'conf.py' not in filename:
diff --git a/readthedocs/rtd_tests/tests/test_build_forms.py b/readthedocs/rtd_tests/tests/test_build_forms.py new file mode 100644 --- /dev/null +++ b/readthedocs/rtd_tests/tests/test_build_forms.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- + +from __future__ import division, print_function, unicode_literals + +from django.test import TestCase +from django_dynamic_fixture import get + +from readthedocs.builds.forms import VersionForm +from readthedocs.builds.models import Version +from readthedocs.projects.constants import PRIVATE +from readthedocs.projects.models import Project + + +class TestVersionForm(TestCase): + + def setUp(self): + self.project = get(Project) + + def test_default_version_is_active(self): + version = get( + Version, + project=self.project, + active=False, + ) + self.project.default_version = version.slug + self.project.save() + + form = VersionForm( + { + 'active': True, + 'privacy_level': PRIVATE, + }, + instance=version + ) + self.assertTrue(form.is_valid()) + + def test_default_version_is_inactive(self): + version = get( + Version, + project=self.project, + active=True, + ) + self.project.default_version = version.slug + self.project.save() + + form = VersionForm( + { + 'active': False, + 'privacy_level': PRIVATE, + }, + instance=version + ) + self.assertFalse(form.is_valid()) + self.assertIn('active', form.errors) diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -1,7 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import mock from django.contrib.auth.models import User @@ -10,9 +14,17 @@ from django_dynamic_fixture import get from textclassifier.validators import ClassifierValidator +from readthedocs.builds.constants import LATEST +from readthedocs.builds.models import Version +from readthedocs.projects.constants import PRIVATE, PROTECTED, PUBLIC from readthedocs.projects.exceptions import ProjectSpamError from readthedocs.projects.forms import ( - ProjectBasicsForm, ProjectExtraForm, TranslationForm, UpdateProjectForm) + ProjectAdvancedForm, + ProjectBasicsForm, + ProjectExtraForm, + TranslationForm, + UpdateProjectForm, +) from readthedocs.projects.models import Project @@ -94,6 +106,74 @@ def test_import_repo_url(self): self.assertEqual(form.is_valid(), valid, msg=url) +class TestProjectAdvancedForm(TestCase): + + def setUp(self): + self.project = get(Project) + get( + Version, + project=self.project, + slug='public-1', + active=True, + privacy_level=PUBLIC, + ) + get( + Version, + project=self.project, + slug='public-2', + active=True, + privacy_level=PUBLIC, + ) + get( + Version, + project=self.project, + slug='public-3', + active=False, + privacy_level=PROTECTED, + ) + get( + Version, + project=self.project, + slug='private', + active=True, + privacy_level=PRIVATE, + ) + get( + Version, + project=self.project, + slug='protected', + active=True, + privacy_level=PROTECTED, + ) + + def test_list_only_active_versions_on_default_version(self): + form = ProjectAdvancedForm(instance=self.project) + # This version is created automatically by the project on save + self.assertTrue(self.project.versions.filter(slug=LATEST).exists()) + self.assertEqual( + set( + slug + for slug, _ in form.fields['default_version'].widget.choices + ), + {'latest', 'public-1', 'public-2', 'private', 'protected'}, + ) + + def test_list_all_versions_on_default_branch(self): + form = ProjectAdvancedForm(instance=self.project) + # This version is created automatically by the project on save + self.assertTrue(self.project.versions.filter(slug=LATEST).exists()) + self.assertEqual( + set( + slug + for slug, _ in form.fields['default_branch'].widget.choices + ), + { + None, 'latest', 'public-1', 'public-2', + 'public-3', 'protected', 'private' + }, + ) + + class TestTranslationForms(TestCase): def setUp(self):
Dropdown to select Advanced Settings There are bunch of input fields under `Admin -> Advanced Settings` that could be converted into a dropbox: * Default branch * Default version (this will limit the user to just those _active_/possible versions instead of writting things that are invalid or does not exist) Also, this would involve more coding (since we need to populate the dropdown with data that we don't have in the database at the moment), but would be a good addition also: * Requirements file * Python configuration file > This will probably need more discussion before start working on the implementation regarding the UX design Ref: https://github.com/readthedocs/readthedocs-corporate/issues/137
Referencing to my comment on the other issue https://github.com/rtfd/readthedocs.org/issues/3611#issuecomment-365809242. What about listing the valid options (like recommendations), but the user also can enter a _custom option_, with this the validation isn't necessary and probably a more simple solution.
2018-10-03T02:40:13
readthedocs/readthedocs.org
4,721
readthedocs__readthedocs.org-4721
[ "4711" ]
183b1769201635769cf7b81b65081e4ca6be8897
diff --git a/readthedocs/gold/forms.py b/readthedocs/gold/forms.py --- a/readthedocs/gold/forms.py +++ b/readthedocs/gold/forms.py @@ -5,7 +5,10 @@ from builtins import object from django import forms +from django.utils.translation import ugettext_lazy as _ + from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin +from readthedocs.projects.models import Project from .models import LEVEL_CHOICES, GoldUser @@ -88,6 +91,14 @@ def __init__(self, *args, **kwargs): self.projects = kwargs.pop('projects', None) super(GoldProjectForm, self).__init__(*args, **kwargs) + def clean_project(self): + project_slug = self.cleaned_data.get('project', '') + project_instance = Project.objects.filter(slug=project_slug) + if not project_instance.exists(): + raise forms.ValidationError(_('No project found.')) + else: + return project_slug + def clean(self): cleaned_data = super(GoldProjectForm, self).clean() if self.projects.count() < self.user.num_supported_projects:
diff --git a/readthedocs/rtd_tests/tests/test_gold.py b/readthedocs/rtd_tests/tests/test_gold.py --- a/readthedocs/rtd_tests/tests/test_gold.py +++ b/readthedocs/rtd_tests/tests/test_gold.py @@ -26,6 +26,13 @@ def test_adding_projects(self): self.assertEqual(self.golduser.projects.count(), 1) self.assertEqual(resp.status_code, 302) + def test_incorrect_input_when_adding_projects(self): + self.assertEqual(self.golduser.projects.count(), 0) + incorrect_slug = 'xyz-random-incorrect-slug-xyz' + self.assertEqual(Project.objects.filter(slug=incorrect_slug).count(), 0) + resp = self.client.post(reverse('gold_projects'), data={'project': incorrect_slug}) + self.assertFormError(resp, form='form', field='project', errors='No project found.') + def test_too_many_projects(self): self.project2 = get(Project, slug='test2')
Adopting a projected as gold user fails ### How to reproduce it 1. sign up as Gold member 1. go to https://readthedocs.org/accounts/gold/subscription/ 1. select the project that you want to adopt ### Expected Result Adopts the project. ### Actual Result Fails with a 500. https://sentry.io/read-the-docs/readthedocs-org/issues/587668658/ ### The problem This line https://github.com/rtfd/readthedocs.org/blob/44e02def230b937e4eca396864de9fc81f4ef33f/readthedocs/gold/views.py#L109 cause the problem since we are receiving a "project name" and using it as "project slug".
@humitos I would like to work on this issue. @dojutsu-user go ahead!
2018-10-04T07:45:33
readthedocs/readthedocs.org
4,723
readthedocs__readthedocs.org-4723
[ "4692" ]
dfc8fc9eba8dc9caae171ca0b3e8f6a71594e088
diff --git a/readthedocs/profiles/views.py b/readthedocs/profiles/views.py --- a/readthedocs/profiles/views.py +++ b/readthedocs/profiles/views.py @@ -20,6 +20,7 @@ from readthedocs.core.forms import UserAdvertisingForm, UserDeleteForm +@login_required def edit_profile( request, form_class, success_url=None, template_name='profiles/private/edit_profile.html', extra_context=None): @@ -93,9 +94,6 @@ def edit_profile( return render(request, template_name, context=context) -edit_profile = login_required(edit_profile) - - @login_required() def delete_account(request): form = UserDeleteForm()
login_required decorator is used at the end of the functions. In the file readthedocs.org/readthedocs/profiles/views.py, `login_required` decorator is used at the end of some functions and for some other functions, it used in the form of `@login_required`, which reduces the readability of the code.
2018-10-04T08:06:02
readthedocs/readthedocs.org
4,733
readthedocs__readthedocs.org-4733
[ "4672" ]
35695d170ec7e1dfa2553bf70bc406e7c960409a
diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py --- a/readthedocs/core/views/hooks.py +++ b/readthedocs/core/views/hooks.py @@ -33,35 +33,19 @@ def _build_version(project, slug, already_built=()): All webhook logic should route here to call ``trigger_build``. """ - default = project.default_branch or (project.vcs_repo().fallback_branch) if not project.has_valid_webhook: project.has_valid_webhook = True project.save() - if slug == default and slug not in already_built: - # short circuit versions that are default - # these will build at "latest", and thus won't be - # active - latest_version = project.versions.get(slug=LATEST) - trigger_build(project=project, version=latest_version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, latest_version.slug) - if project.versions.exclude(active=False).filter(slug=slug).exists(): - # Handle the case where we want to build the custom branch too - slug_version = project.versions.get(slug=slug) - trigger_build(project=project, version=slug_version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, slug_version.slug) - return LATEST - - if project.versions.exclude(active=True).filter(slug=slug).exists(): - log.info("(Version build) Not Building %s", slug) - return None - - if slug not in already_built: - version = project.versions.get(slug=slug) + # Previously we were building the latest version (inactive or active) + # when building the default version, + # some users may have relied on this to update the version list #4450 + version = project.versions.filter(active=True, slug=slug).first() + if version and slug not in already_built: + log.info( + "(Version build) Building %s:%s", + project.slug, version.slug, + ) trigger_build(project=project, version=version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, version.slug) return slug log.info("(Version build) Not Building %s", slug)
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -1,14 +1,18 @@ # -*- coding: utf-8 -*- from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import base64 import datetime import json -from builtins import str import mock from allauth.socialaccount.models import SocialAccount +from builtins import str from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.http import QueryDict @@ -18,10 +22,11 @@ from rest_framework import status from rest_framework.test import APIClient +from readthedocs.builds.constants import LATEST from readthedocs.builds.models import Build, BuildCommandResult, Version from readthedocs.integrations.models import Integration from readthedocs.oauth.models import RemoteOrganization, RemoteRepository -from readthedocs.projects.models import Feature, Project, APIProject +from readthedocs.projects.models import APIProject, Feature, Project from readthedocs.restapi.views.integrations import GitHubWebhookView from readthedocs.restapi.views.task_views import get_status_data @@ -654,8 +659,14 @@ class IntegrationsTests(TestCase): def setUp(self): self.project = get(Project) - self.version = get(Version, verbose_name='master', active=True, project=self.project) - self.version_tag = get(Version, verbose_name='v1.0', active=True, project=self.project) + self.version = get( + Version, slug='master', verbose_name='master', + active=True, project=self.project + ) + self.version_tag = get( + Version, slug='v1.0', verbose_name='v1.0', + active=True, project=self.project + ) def test_github_webhook_for_branches(self, trigger_build): """GitHub webhook API.""" @@ -896,6 +907,109 @@ def test_generic_api_falls_back_to_token_auth(self, trigger_build): self.assertEqual(resp.status_code, 200) self.assertTrue(resp.data['build_triggered']) + def test_webhook_doesnt_build_latest_if_is_deactivated(self, trigger_build): + client = APIClient() + integration = Integration.objects.create( + project=self.project, + integration_type=Integration.API_WEBHOOK, + ) + + latest_version = self.project.versions.get(slug=LATEST) + latest_version.active = False + latest_version.save() + + default_branch = self.project.versions.get(slug='master') + default_branch.active = False + default_branch.save() + + resp = client.post( + '/api/v2/webhook/{}/{}/'.format( + self.project.slug, + integration.pk, + ), + {'token': integration.token, 'branches': default_branch.slug}, + format='json', + ) + self.assertEqual(resp.status_code, 200) + self.assertFalse(resp.data['build_triggered']) + trigger_build.assert_not_called() + + def test_webhook_builds_only_master(self, trigger_build): + client = APIClient() + integration = Integration.objects.create( + project=self.project, + integration_type=Integration.API_WEBHOOK, + ) + + latest_version = self.project.versions.get(slug=LATEST) + latest_version.active = False + latest_version.save() + + default_branch = self.project.versions.get(slug='master') + + self.assertFalse(latest_version.active) + self.assertTrue(default_branch.active) + + resp = client.post( + '/api/v2/webhook/{}/{}/'.format( + self.project.slug, + integration.pk, + ), + {'token': integration.token, 'branches': default_branch.slug}, + format='json', + ) + self.assertEqual(resp.status_code, 200) + self.assertTrue(resp.data['build_triggered']) + self.assertEqual(resp.data['versions'], ['master']) + + def test_webhook_build_latest_and_master(self, trigger_build): + client = APIClient() + integration = Integration.objects.create( + project=self.project, + integration_type=Integration.API_WEBHOOK, + ) + + latest_version = self.project.versions.get(slug=LATEST) + default_branch = self.project.versions.get(slug='master') + + self.assertTrue(latest_version.active) + self.assertTrue(default_branch.active) + + resp = client.post( + '/api/v2/webhook/{}/{}/'.format( + self.project.slug, + integration.pk, + ), + {'token': integration.token, 'branches': default_branch.slug}, + format='json', + ) + self.assertEqual(resp.status_code, 200) + self.assertTrue(resp.data['build_triggered']) + self.assertEqual(set(resp.data['versions']), {'latest', 'master'}) + + def test_webhook_build_another_branch(self, trigger_build): + client = APIClient() + integration = Integration.objects.create( + project=self.project, + integration_type=Integration.API_WEBHOOK, + ) + + version_v1 = self.project.versions.get(slug='v1.0') + + self.assertTrue(version_v1.active) + + resp = client.post( + '/api/v2/webhook/{}/{}/'.format( + self.project.slug, + integration.pk, + ), + {'token': integration.token, 'branches': version_v1.slug}, + format='json', + ) + self.assertEqual(resp.status_code, 200) + self.assertTrue(resp.data['build_triggered']) + self.assertEqual(resp.data['versions'], ['v1.0']) + class APIVersionTests(TestCase): fixtures = ['eric', 'test_data']
latest is reactivated on commit I've disabled the "latest" version. However, every time I push a commit to master, the version is reactivated on RTD. I'd like for the version to stay disabled even on new commits.
I was able to replicate this on the .org site I guess this only happens via webhooks, as master is latest, then rtd builds that branch and activates latest. The code that is changing the `active` attr is https://github.com/rtfd/readthedocs.org/blob/35695d170ec7e1dfa2553bf70bc406e7c960409a/readthedocs/core/views/hooks.py#L40-L45 (here slug is `master`, but it triggers a build to `latest`) I'll continue investigating why we are triggering a build to latest there, it may be a bug or some weird design. I think I understand why we are doing that, we should validate that the version is active first
2018-10-04T23:20:15
readthedocs/readthedocs.org
4,754
readthedocs__readthedocs.org-4754
[ "4751" ]
8c7e18d4c52525519ebf66df9a0fcd898a22156e
diff --git a/readthedocs/doc_builder/exceptions.py b/readthedocs/doc_builder/exceptions.py --- a/readthedocs/doc_builder/exceptions.py +++ b/readthedocs/doc_builder/exceptions.py @@ -24,7 +24,9 @@ class BuildEnvironmentError(BuildEnvironmentException): GENERIC_WITH_BUILD_ID = ugettext_noop( 'There was a problem with Read the Docs while building your documentation. ' - 'Please report this to us with your build id ({build_id}).', + 'Please try again later. ' + 'However, if this problem persists, ' + 'please report this to us with your build id ({build_id}).', )
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -339,7 +339,9 @@ def test_failing_execution_with_unexpected_exception(self): 'length': mock.ANY, 'error': ( 'There was a problem with Read the Docs while building your ' - 'documentation. Please report this to us with your build id (123).' + 'documentation. Please try again later. However, if this ' + 'problem persists, please report this to us with your ' + 'build id (123).' ), 'setup': '', 'output': '', @@ -485,9 +487,10 @@ def _inner(): 'exit_code': 1, 'length': 0, 'error': ( - "There was a problem with Read the Docs while building your " - "documentation. Please report this to us with your build id " - "(123)." + 'There was a problem with Read the Docs while building your ' + 'documentation. Please try again later. However, if this ' + 'problem persists, please report this to us with your ' + 'build id (123).' ), 'setup': '', 'output': '',
Improve unexpected error message Many users are reporting / filling an issue in our issue tracker when this message is shown to them, which is logic because it's what the message says. > There was a problem with Read the Docs while building your documentation. Please report this to us with your build id (1234) Although, I think we should improve this message saying something like "if this problem persists, please report..." or something similar to that. Otherwise, sometimes it's a temporal failure and we get tons of reports.
I would like to take this issue. My suggestion for the error message is this - "There was a problem with Read The Docs while building your documentation. This might be a temporary error. We suggest you to try again. However, if this problem persists, please report to us with your build id (1234)." > This might be a temporary error. We suggest you to try again "try again _later_.", maybe is better. The rest is perfect to me.
2018-10-13T05:17:22
readthedocs/readthedocs.org
4,771
readthedocs__readthedocs.org-4771
[ "4768" ]
d11402d538abf109a0de2b8a995caac784b9d3cc
diff --git a/readthedocs/restapi/urls.py b/readthedocs/restapi/urls.py --- a/readthedocs/restapi/urls.py +++ b/readthedocs/restapi/urls.py @@ -63,7 +63,6 @@ ] function_urls = [ - url(r'embed/', core_views.embed, name='embed'), url(r'docurl/', core_views.docurl, name='docurl'), url(r'footer_html/', footer_views.footer_html, name='footer_html'), ] diff --git a/readthedocs/restapi/views/core_views.py b/readthedocs/restapi/views/core_views.py --- a/readthedocs/restapi/views/core_views.py +++ b/readthedocs/restapi/views/core_views.py @@ -6,11 +6,6 @@ from rest_framework.renderers import JSONRenderer from rest_framework.response import Response -import json -import requests - -from django.conf import settings -from django.core.cache import cache from django.shortcuts import get_object_or_404 from readthedocs.builds.constants import LATEST @@ -44,50 +39,3 @@ def docurl(request): return Response({ 'url': make_document_url(project=project, version=version.slug, page=doc) }) - - [email protected]_view(['GET']) [email protected]_classes((permissions.AllowAny,)) [email protected]_classes((JSONRenderer,)) -def embed(request): - """ - Embed a section of content from any Read the Docs page. - - Returns headers and content that matches the queried section. - - ### Arguments - - * project (required) - * doc (required) - * version (default latest) - * section - - ### Example - - GET https://readthedocs.org/api/v2/embed/?project=requests&doc=index&section=User%20Guide - - # Current Request - """ - project = request.GET.get('project') - version = request.GET.get('version', LATEST) - doc = request.GET.get('doc') - section = request.GET.get('section') - - if project is None or doc is None: - return Response({'error': 'Need project and doc'}, status=status.HTTP_400_BAD_REQUEST) - - embed_cache = cache.get('embed:%s' % project) - if embed_cache: - embed = json.loads(embed_cache) - else: - try: - resp = requests.get( - '{host}/api/v1/embed/'.format(host=settings.GROK_API_HOST), - params={'project': project, 'version': version, 'doc': doc, 'section': section} - ) - embed = resp.json() - cache.set('embed:%s' % project, resp.content, 1800) - except Exception as e: - return Response({'error': '%s' % e.msg}, status=status.HTTP_400_BAD_REQUEST) - - return Response(embed)
Remove /embed endpoint Similar to #4731 https://github.com/rtfd/readthedocs.org/blob/604e1fa0374a0ba1b242b88e6f242eff77b7b6c3/readthedocs/restapi/urls.py#L66-L66 Raised in https://github.com/rtfd/readthedocs.org/pull/4731/files#r222783446
@stsewd Will it be okay if I take this issue. Sure
2018-10-16T17:24:18
readthedocs/readthedocs.org
4,781
readthedocs__readthedocs.org-4781
[ "4777" ]
df85fefc5f59c7a00bcc1ffe23965efe75e8c895
diff --git a/readthedocs/gold/forms.py b/readthedocs/gold/forms.py --- a/readthedocs/gold/forms.py +++ b/readthedocs/gold/forms.py @@ -81,6 +81,7 @@ def get_subscription(self): class GoldProjectForm(forms.Form): project = forms.CharField( required=True, + help_text='Enter the project\'s slug' ) def __init__(self, *args, **kwargs):
Make form for adopting a project a ChoiceField This is the form for adopting projects for Gold Members. ![screenshot from 2018-10-18 11-29-40](https://user-images.githubusercontent.com/29149191/47134225-9a130f80-d2c9-11e8-8c40-22a3e22dc002.png) Currently the input field does not specify any idea about what to enter. Although, the project's slug is need to be enter, but it is written nowhere which creates confusion and the user ends up entering the project's name which gives an error. This issue was discussed in the comments [#427135794](https://github.com/rtfd/readthedocs.org/pull/4721#issuecomment-427135794), [#427137725](https://github.com/rtfd/readthedocs.org/pull/4721#issuecomment-427137725) and [#427139109](https://github.com/rtfd/readthedocs.org/pull/4721#issuecomment-427139109)
@stsewd Will adding the `help_text` would be enough? ![screenshot from 2018-10-18 12-09-43](https://user-images.githubusercontent.com/29149191/47135702-f7f62600-d2ce-11e8-80cf-156558c4dcd6.png) That field should probably be a `ChoiceField` where the user can only select the available options (I'm assuming that the User can only adopt projects where he is a maintainer --but I'm not 100% sure, you will need to check the code to know that) https://github.com/rtfd/readthedocs.org/blob/df85fefc5f59c7a00bcc1ffe23965efe75e8c895/readthedocs/gold/views.py#L108-L110 @humitos from the code, it seems that the user can adopt **any** project. Mmm... OK. It doesn't make too much sense to me, since it's a weird case to want to support a project that you are not involved to. So, if we want to keep this behavior, my solution of the `ChoiceField` doesn't fit here. Another different approach, that we don't have in any other place I think, would be to use a field with autocomplete --not sure if we want that, though. Probably, no. I'd say that for now, the `help_text` is the first step, but it doesn't solve the whole problem anyway. @humitos yeahh, `help_text` won't solve whole problem, but it atleast gives the idea/hint that the project's name is not to be entered here.
2018-10-18T15:16:38
readthedocs/readthedocs.org
4,795
readthedocs__readthedocs.org-4795
[ "3801" ]
cef5bc44e27de0729ddb7514009bdf98b90e10d8
diff --git a/readthedocs/core/middleware.py b/readthedocs/core/middleware.py --- a/readthedocs/core/middleware.py +++ b/readthedocs/core/middleware.py @@ -69,6 +69,8 @@ def process_request(self, request): # Support ports during local dev public_domain in host or public_domain in full_host ): + if not Project.objects.filter(slug=subdomain).exists(): + raise Http404(_('Project not found')) request.subdomain = True request.slug = subdomain request.urlconf = SUBDOMAIN_URLCONF
diff --git a/readthedocs/rtd_tests/tests/test_middleware.py b/readthedocs/rtd_tests/tests/test_middleware.py --- a/readthedocs/rtd_tests/tests/test_middleware.py +++ b/readthedocs/rtd_tests/tests/test_middleware.py @@ -47,6 +47,13 @@ def test_proper_subdomain(self): self.assertEqual(request.urlconf, self.urlconf_subdomain) self.assertEqual(request.slug, 'pip') + @override_settings(PRODUCTION_DOMAIN='readthedocs.org') + def test_wrong_subdomain(self): + http_host = 'xyz-wrong-sub-domain-xyz.readthedocs.org' + request = self.factory.get(self.url, HTTP_HOST=http_host) + with self.assertRaises(Http404): + self.middleware.process_request(request) + @override_settings(PRODUCTION_DOMAIN='readthedocs.org') def test_restore_urlconf_after_request(self): """
Return 404 at SubdomainMiddleware level if project doesn't exist While deploying .com we found that `SubdomainMiddleware` doesn't check the existence of the project that it's injecting at `request.slug`. So, any middleware that relies on this to do some extra work, will probably do something like `Project.objects.get(slug=request.slug)` and will fail. We workarounded it by adding `try/except` block in the middleware that uses this, but we should probably just raise a 404 inside the `SubdomainMiddleware` itself instead of continue with the flow. Code: https://github.com/rtfd/readthedocs.org/blob/74bbb3ac23b6b06a70517bc1b301961db6821e32/readthedocs/core/middleware.py#L72
@humitos I would like to take this issue up. If I'm not wrong, you probably want something like this ```python3 # Checking if the project exist or not from django.shortcuts import get_object_or_404 get_object_or_404(Project, slug=subdomain) request.subdomain = True request.slug = subdomain request.urlconf = SUBDOMAIN_URLCONF return None ``` With `get_object_or_404`, it will show error and does not go further. @dojutsu-user since this is a code that it's executed a lot, retrieving the whole Project object it may be overkilling since we not going to use that data anyway. You may just need to check if the project does exist with `.exists()` and using an if in this case.
2018-10-23T09:54:50
readthedocs/readthedocs.org
4,800
readthedocs__readthedocs.org-4800
[ "4705" ]
7fb4b18bc9cf792207a3960df0158cfbc649be56
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -36,7 +36,6 @@ 'ConfigError', 'ConfigOptionNotSupportedError', 'InvalidConfig', - 'ProjectConfig', ) ALL = 'all' @@ -110,12 +109,10 @@ class InvalidConfig(ConfigError): message_template = 'Invalid "{key}": {error}' - def __init__(self, key, code, error_message, source_file=None, - source_position=None): + def __init__(self, key, code, error_message, source_file=None): self.key = key self.code = code self.source_file = source_file - self.source_position = source_position message = self.message_template.format( key=key, code=code, @@ -144,11 +141,10 @@ class BuildConfigBase(object): version = None - def __init__(self, env_config, raw_config, source_file, source_position): + def __init__(self, env_config, raw_config, source_file): self.env_config = env_config self.raw_config = raw_config self.source_file = source_file - self.source_position = source_position if os.path.isdir(self.source_file): self.base_path = self.source_file else: @@ -160,10 +156,7 @@ def __init__(self, env_config, raw_config, source_file, source_position): def error(self, key, message, code): """Raise an error related to ``key``.""" if not os.path.isdir(self.source_file): - source = '{file} [{pos}]'.format( - file=os.path.relpath(self.source_file, self.base_path), - pos=self.source_position, - ) + source = os.path.relpath(self.source_file, self.base_path) error_message = '{source}: {message}'.format( source=source, message=message, @@ -175,7 +168,6 @@ def error(self, key, message, code): code=code, error_message=error_message, source_file=self.source_file, - source_position=self.source_position, ) @contextmanager @@ -189,7 +181,6 @@ def catch_validation_error(self, key): code=error.code, error_message=str(error), source_file=self.source_file, - source_position=self.source_position, ) def pop(self, name, container, default, raise_ex): @@ -1043,16 +1034,6 @@ def submodules(self): return Submodules(**self._config['submodules']) -class ProjectConfig(list): - - """Wrapper for multiple build configs.""" - - def validate(self): - """Validates each configuration build.""" - for build in self: - build.validate() - - def load(path, env_config): """ Load a project configuration and the top-most build config for a given path. @@ -1068,10 +1049,9 @@ def load(path, env_config): 'No configuration file found', code=CONFIG_REQUIRED ) - build_configs = [] with open(filename, 'r') as configuration_file: try: - configs = parse(configuration_file.read()) + config = parse(configuration_file.read()) except ParseError as error: raise ConfigError( 'Parse error in {filename}: {message}'.format( @@ -1080,23 +1060,19 @@ def load(path, env_config): ), code=CONFIG_SYNTAX_INVALID, ) - for i, config in enumerate(configs): - allow_v2 = env_config.get('allow_v2') - if allow_v2: - version = config.get('version', 1) - else: - version = 1 - build_config = get_configuration_class(version)( - env_config, - config, - source_file=filename, - source_position=i, - ) - build_configs.append(build_config) + allow_v2 = env_config.get('allow_v2') + if allow_v2: + version = config.get('version', 1) + else: + version = 1 + build_config = get_configuration_class(version)( + env_config, + config, + source_file=filename, + ) - project_config = ProjectConfig(build_configs) - project_config.validate() - return project_config + build_config.validate() + return build_config def get_configuration_class(version): diff --git a/readthedocs/config/parser.py b/readthedocs/config/parser.py --- a/readthedocs/config/parser.py +++ b/readthedocs/config/parser.py @@ -17,18 +17,17 @@ class ParseError(Exception): def parse(stream): """ - Take file-like object and return a list of project configurations. + Take file-like object and return a project configuration. - The files need be valid YAML and only contain mappings as documents. + The file need be valid YAML and only contain mappings as document. Everything else raises a ``ParseError``. """ try: - configs = list(yaml.safe_load_all(stream)) + config = yaml.safe_load(stream) except yaml.YAMLError as error: raise ParseError('YAML: {message}'.format(message=error)) - if not configs: + if not isinstance(config, dict): + raise ParseError('Expected mapping') + if not config: raise ParseError('Empty config') - for config in configs: - if not isinstance(config, dict): - raise ParseError('Expected mapping') - return configs + return config diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py --- a/readthedocs/doc_builder/config.py +++ b/readthedocs/doc_builder/config.py @@ -65,7 +65,7 @@ def load_yaml_config(version): config = load_config( path=checkout_path, env_config=env_config, - )[0] + ) except InvalidConfig: # This is a subclass of ConfigError, so has to come first raise @@ -74,7 +74,6 @@ def load_yaml_config(version): env_config=env_config, raw_config={}, source_file=checkout_path, - source_position=0, ) config.validate() return config
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -17,7 +17,6 @@ ConfigError, ConfigOptionNotSupportedError, InvalidConfig, - ProjectConfig, load, ) from readthedocs.config.config import ( @@ -81,13 +80,11 @@ } -def get_build_config(config, env_config=None, source_file='readthedocs.yml', - source_position=0): +def get_build_config(config, env_config=None, source_file='readthedocs.yml'): return BuildConfigV1( env_config or {}, config, source_file=source_file, - source_position=source_position, ) @@ -131,10 +128,7 @@ def test_load_empty_config_file(tmpdir): def test_minimal_config(tmpdir): apply_fs(tmpdir, minimal_config_dir) base = str(tmpdir) - config = load(base, env_config) - assert isinstance(config, ProjectConfig) - assert len(config) == 1 - build = config[0] + build = load(base, env_config) assert isinstance(build, BuildConfigV1) @@ -145,10 +139,7 @@ def test_load_version1(tmpdir): ''') }) base = str(tmpdir) - config = load(base, get_env_config({'allow_v2': True})) - assert isinstance(config, ProjectConfig) - assert len(config) == 1 - build = config[0] + build = load(base, get_env_config({'allow_v2': True})) assert isinstance(build, BuildConfigV1) @@ -159,10 +150,7 @@ def test_load_version2(tmpdir): ''') }) base = str(tmpdir) - config = load(base, get_env_config({'allow_v2': True})) - assert isinstance(config, ProjectConfig) - assert len(config) == 1 - build = config[0] + build = load(base, get_env_config({'allow_v2': True})) assert isinstance(build, BuildConfigV2) @@ -183,31 +171,18 @@ def test_yaml_extension(tmpdir): apply_fs(tmpdir, yaml_extension_config_dir) base = str(tmpdir) config = load(base, env_config) - assert len(config) == 1 + assert isinstance(config, BuildConfigV1) def test_build_config_has_source_file(tmpdir): base = str(apply_fs(tmpdir, minimal_config_dir)) - build = load(base, env_config)[0] + build = load(base, env_config) assert build.source_file == os.path.join(base, 'readthedocs.yml') - assert build.source_position == 0 - - -def test_build_config_has_source_position(tmpdir): - base = str(apply_fs(tmpdir, multiple_config_dir)) - builds = load(base, env_config) - assert len(builds) == 2 - first, second = filter( - lambda b: not b.source_file.endswith('nested/readthedocs.yml'), - builds, - ) - assert first.source_position == 0 - assert second.source_position == 1 def test_build_config_has_list_with_single_empty_value(tmpdir): base = str(apply_fs(tmpdir, config_with_explicit_empty_list)) - build = load(base, env_config)[0] + build = load(base, env_config) assert isinstance(build, BuildConfigV1) assert build.formats == [] @@ -217,7 +192,6 @@ def test_config_requires_name(): {'output_base': ''}, {}, source_file='readthedocs.yml', - source_position=0, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -230,7 +204,6 @@ def test_build_requires_valid_name(): {'output_base': ''}, {'name': 'with/slashes'}, source_file='readthedocs.yml', - source_position=0, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -554,7 +527,6 @@ def test_valid_build_config(): env_config, minimal_config, source_file='readthedocs.yml', - source_position=0, ) build.validate() assert build.name == 'docs' @@ -576,7 +548,6 @@ def it_validates_to_abspath(tmpdir): get_env_config(), {'base': '../docs'}, source_file=source_file, - source_position=0, ) build.validate() assert build.base == str(tmpdir.join('docs')) @@ -597,7 +568,6 @@ def it_fails_if_base_is_not_a_string(tmpdir): get_env_config(), {'base': 1}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -610,7 +580,6 @@ def it_fails_if_base_does_not_exist(tmpdir): get_env_config(), {'base': 'docs'}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -626,7 +595,6 @@ def it_fails_if_build_is_invalid_option(tmpdir): get_env_config(), {'build': {'image': 3.0}}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -642,7 +610,6 @@ def it_fails_on_python_validation(tmpdir): 'python': {'version': '3.3'}, }, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) build.validate_build() with raises(InvalidConfig) as excinfo: @@ -659,7 +626,6 @@ def it_works_on_python_validation(tmpdir): 'python': {'version': '3.3'}, }, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) build.validate_build() build.validate_python() @@ -670,7 +636,6 @@ def it_works(tmpdir): get_env_config(), {'build': {'image': 'latest'}}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) build.validate() assert build.build.image == 'readthedocs/build:latest' @@ -681,7 +646,6 @@ def default(tmpdir): get_env_config(), {}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) build.validate() assert build.build.image == 'readthedocs/build:2.0' @@ -697,7 +661,6 @@ def it_priorities_image_from_env_config(tmpdir, image): get_env_config({'defaults': defaults}), {'build': {'image': 'latest'}}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) build.validate() assert build.build.image == image @@ -787,7 +750,6 @@ def test_build_validate_calls_all_subvalidators(tmpdir): {}, {}, source_file=str(tmpdir.join('readthedocs.yml')), - source_position=0, ) with patch.multiple( BuildConfigV1, @@ -803,20 +765,6 @@ def test_build_validate_calls_all_subvalidators(tmpdir): BuildConfigV1.validate_output_base.assert_called_with() -def test_validate_project_config(): - with patch.object(BuildConfigV1, 'validate') as build_validate: - project = ProjectConfig([ - BuildConfigV1( - env_config, - minimal_config, - source_file='readthedocs.yml', - source_position=0, - ), - ]) - project.validate() - assert build_validate.call_count == 1 - - def test_load_calls_validate(tmpdir): apply_fs(tmpdir, minimal_config_dir) base = str(tmpdir) @@ -843,13 +791,12 @@ def test_config_filenames_regex(correct_config_filename): class TestBuildConfigV2(object): - def get_build_config(self, config, env_config=None, - source_file='readthedocs.yml', source_position=0): + def get_build_config( + self, config, env_config=None, source_file='readthedocs.yml'): return BuildConfigV2( env_config or {}, config, source_file=source_file, - source_position=source_position, ) def test_version(self): diff --git a/readthedocs/config/tests/test_parser.py b/readthedocs/config/tests/test_parser.py --- a/readthedocs/config/tests/test_parser.py +++ b/readthedocs/config/tests/test_parser.py @@ -28,36 +28,35 @@ def test_parse_bad_type(): def test_parse_single_config(): buf = StringIO(u'base: path') config = parse(buf) - assert isinstance(config, list) - assert len(config) == 1 - assert config[0]['base'] == 'path' + assert isinstance(config, dict) + assert config['base'] == 'path' def test_parse_null_value(): buf = StringIO(u'base: null') config = parse(buf) - assert config[0]['base'] is None + assert config['base'] is None def test_parse_empty_value(): buf = StringIO(u'base:') config = parse(buf) - assert config[0]['base'] is None + assert config['base'] is None def test_parse_empty_string_value(): buf = StringIO(u'base: ""') config = parse(buf) - assert config[0]['base'] == '' + assert config['base'] == '' def test_parse_empty_list(): buf = StringIO(u'base: []') config = parse(buf) - assert config[0]['base'] == [] + assert config['base'] == [] -def test_parse_multiple_configs_in_one_file(): +def test_do_not_parse_multiple_configs_in_one_file(): buf = StringIO( u''' base: path @@ -67,8 +66,5 @@ def test_parse_multiple_configs_in_one_file(): nested: works: true ''') - configs = parse(buf) - assert isinstance(configs, list) - assert len(configs) == 2 - assert configs[0]['base'] == 'path' - assert configs[1]['nested'] == {'works': True} + with raises(ParseError): + parse(buf) diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py --- a/readthedocs/rtd_tests/tests/test_config_integration.py +++ b/readthedocs/rtd_tests/tests/test_config_integration.py @@ -13,7 +13,7 @@ from mock import MagicMock, PropertyMock, patch from readthedocs.builds.models import Version -from readthedocs.config import ALL, BuildConfigV1, InvalidConfig, ProjectConfig +from readthedocs.config import ALL, BuildConfigV1, InvalidConfig from readthedocs.config.tests.utils import apply_fs from readthedocs.doc_builder.config import load_yaml_config from readthedocs.doc_builder.environments import LocalBuildEnvironment @@ -27,9 +27,7 @@ def create_load(config=None): """ Mock out the function of the build load function. - This will create a ProjectConfig list of BuildConfigV1 objects and validate - them. The default load function iterates over files and builds up a list of - objects. Instead of mocking all of this, just mock the end result. + This will create a BuildConfigV1 object and validate it. """ if config is None: config = {} @@ -41,14 +39,11 @@ def inner(path=None, env_config=None): } if env_config is not None: env_config_defaults.update(env_config) - yaml_config = ProjectConfig([ - BuildConfigV1( - env_config_defaults, - config, - source_file='readthedocs.yml', - source_position=0, - ), - ]) + yaml_config = BuildConfigV1( + env_config_defaults, + config, + source_file='readthedocs.yml', + ) yaml_config.validate() return yaml_config
Design around multiple configurations in one file Related code https://github.com/rtfd/readthedocs.org/blob/a98441717ea50b19fe6bb0f1ea52df092dbc8190/readthedocs/config/config.py#L988-L988 I'm not sure what use case has. Maybe a monorepo? But still, rtd doesn't support that workflow (more the one doc per project).
I'm +1 on getting rid of the logic that searches for a readthedocs.yml file. I think we should only support one top-level config file, all of the other UX patterns are weird. * Config in some other directory: why not just put it top level? * Multiple configs: which one are we supposed to use? * Config in submodule or something: why would you want to use it? Oops, I think I put the wrong title here, for multiple configurations files the issue is #4669. This is for a configuration file with multiple configurations (yaml support having a top level dict) @stsewd I found this issue a bit confusing. Can you expand on the purpose of it and what it's needed to do/decide here? Our code is designed to support several configurations in one yaml file (yaml allows to have more than one top dict), but in reality we only use the _first_ one, so we have some dead code here, and maybe we aren't going to use it (v2 uses only one too). I see... So, > But still, rtd doesn't support that workflow (more the one doc per project). RTD does not support more than one doc per project, but if the repository has multiple translations the use will import the same repo many times as different (sub)projects. In that case, we may want to allow: 1. multiple YAML files 2. multiple configs on the same YAML file 3. none of the above and force to build all the projects with the same YAML since the only thing that should change on each different translation are the `.po` files. I'm mentioning 3) even though it's not a problem in that particular case, but as an example that it _could_ be useful in some weird case, maybe. At first, it seems we could kill that code. I think our translations workflow needs to be ripped apart eventually. Adding a translation should not require a separate file on the repo. However, currently, you shouldn't need a separate config file for this, the language selector is in the admin UI. We've discussed in other tickets, but translations will eventually just be another project setting (ie, just enable "Español" translation to your project like you do versions, not add a separate project). I'm still -1 on multiple config files I think, this is in line with other services like travis and circleci Just to be clear, this is about having multiple configs in **one file** p: anyway, looks like we don't have a use case for this is kind of the same as having multiples files.
2018-10-24T01:54:38
readthedocs/readthedocs.org
4,801
readthedocs__readthedocs.org-4801
[ "4061" ]
9917933cf84ce1d5db46f461bdc717a9e66d808e
diff --git a/readthedocs/projects/validators.py b/readthedocs/projects/validators.py --- a/readthedocs/projects/validators.py +++ b/readthedocs/projects/validators.py @@ -62,9 +62,12 @@ def __call__(self, value): allow_private_repos = getattr(settings, 'ALLOW_PRIVATE_REPOS', False) public_schemes = ['https', 'http', 'git', 'ftps', 'ftp'] private_schemes = ['ssh', 'ssh+git'] + local_schemes = ['file'] valid_schemes = public_schemes if allow_private_repos: valid_schemes += private_schemes + if getattr(settings, 'DEBUG'): # allow `file://` urls in dev + valid_schemes += local_schemes url = urlparse(value) # Malicious characters go first
Locally hosted RTD instance doesn't allow git file:/// URLs ## Details I installed a local RTD instance according to the Installation guide and imported test project. Now, I want to import my git project manually, ## Expected Result I expected that the instance should accept all valid Git URLs. ## Actual Result When I enter file:///.../../x.git URL, the manual import page shows "Invalid scheme for URL" error. I checked that I can clone this URL from a terminal.
Makes sense to me. It should be easy to grep for the git logic we use for Git urls, and then to extend that to match this protocol. Would you be interested in helping with a PR? Yes, I can help with a PR. Let me look at the code... sob., 5 maj 2018, 00:34 użytkownik Richard Littauer < [email protected]> napisał: > Makes sense to me. It should be easy to grep for the git logic we use for > Git urls, and then to extend that to match this protocol. Would you be > interested in helping with a PR? > > — > You are receiving this because you authored the thread. > Reply to this email directly, view it on GitHub > <https://github.com/rtfd/readthedocs.org/issues/4061#issuecomment-386751897>, > or mute the thread > <https://github.com/notifications/unsubscribe-auth/AJfmUSSktUPlqpUQhCXyaxOHrWxDzcBaks5tvNd1gaJpZM4TzPy_> > . > These are disallowed on readthedocs.org for security reasons. If the work here gets completed it should probably be an option that is disabled by default. `git file:///` URLs only make sense on local rtd instances, when the instance is on the same machine as a Git repo. They certainly are a security risk on public-facing servers. However, many people have their local repos on a development machine and may want to install a local instance and generate a set of docs. Currently, after installation of a new instance, it's impossible to manually import a project without exposing the repo on the net, which is a hassle and and also a security risk. I think this use case may be common enough to justify an install option which unblocks ` git file:///` URLs during the installation. As @davidfischer points out, this option should be disabled by default. I'm re labelling this since it's not a bug in RTD but a feature that is useful for local RTD installation. Marking as Community Effort.
2018-10-24T11:41:29
readthedocs/readthedocs.org
4,810
readthedocs__readthedocs.org-4810
[ "4793" ]
dfc8fc9eba8dc9caae171ca0b3e8f6a71594e088
diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -605,7 +605,6 @@ def update_app_instances(self, html=False, localmedia=False, search=False, if html: version = api_v2.version(self.version.pk) version.patch({ - 'active': True, 'built': True, }) except HttpClientError:
Find out where/why rtd activates a version after building it Not sure how to classify this issue, but we kind of need to figure out why are we activating a version after building it, it may be a bug or it is by design. Raised in https://github.com/rtfd/readthedocs.org/pull/4733#issuecomment-430012283
I think I found it https://github.com/rtfd/readthedocs.org/blob/a4291d17728b8df6b7a9ce57537c5d620416e485/readthedocs/projects/tasks.py#L605-L610 /cc @humitos Still not sure why we do that :man_shrugging: @stsewd good work! First of all, I think we don't need to send the `active=True` in that PATCH. On the other hand, I come up with some questions: * why we would build a version that it's not active? * in case we want to build a version that it's not active, why we would like to mark it as active after building it? @agjohnson @ericholscher you may remember some use case for this. Otherwise, I think we can remove the `active` argument from the PATCH. Also, we should confirm that removing this we still can work around #4001. I'm lost on the two points above as well. I'd say we don't want to build it at all if its inactive, and we certainly shouldn't be changing active=True. Perhaps this was a case to catch the initial import of a project? When the `latest` version is maybe not active yet? Just guessing at this... I tested this locally, removing active from the patch works, and it doesn't break things like the first import, as we create the latest and stable with `active=True` https://github.com/rtfd/readthedocs.org/blob/dfc8fc9eba8dc9caae171ca0b3e8f6a71594e088/readthedocs/builds/managers.py#L37-L59 The only parts where we are triggering builds on inactive versions are: on webhooks (fix in #4733), and when triggering manually a build (from django shell?). If we want to move the check to trigger build, we need to refactor the other places, and we lose the ability to build inactive versions In my opinion, I think that removing it from the `PATCH` is ok. I don't think we want to move the check of the inactive version inside the `trigger_build` function. Do you have a different opinion? > Do you have a different opinion? Nope, same. Also, that is easier than touching the rest of the code p:
2018-10-25T17:46:01
readthedocs/readthedocs.org
4,811
readthedocs__readthedocs.org-4811
[ "4258" ]
a2eee0265b7b85356ac7bb495b34eedbb948857c
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -122,7 +122,9 @@ def validate_submodules(self, config): return True, submodules.keys() def fetch(self): - code, _, _ = self.run('git', 'fetch', '--tags', '--prune') + code, _, _ = self.run( + 'git', 'fetch', '--tags', '--prune', '--prune-tags', + ) if code != 0: raise RepositoryError
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -1,21 +1,33 @@ # -*- coding: utf-8 -*- from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) +import os from os.path import exists +from tempfile import mkdtemp import django_dynamic_fixture as fixture import pytest from django.contrib.auth.models import User -from mock import Mock +from mock import Mock, patch from readthedocs.config import ALL from readthedocs.projects.exceptions import RepositoryError from readthedocs.projects.models import Feature, Project from readthedocs.rtd_tests.base import RTDTestCase from readthedocs.rtd_tests.utils import ( - create_git_tag, make_test_git, make_test_hg) + create_git_branch, + create_git_tag, + delete_git_branch, + delete_git_tag, + make_test_git, + make_test_hg, +) class TestGitBackend(RTDTestCase): @@ -118,6 +130,51 @@ def test_check_invalid_submodule_urls(self): repo.checkout('invalidsubmodule') self.assertEqual(e.msg, RepositoryError.INVALID_SUBMODULES) + @patch('readthedocs.projects.models.Project.checkout_path') + def test_fetch_clean_tags_and_branches(self, checkout_path): + upstream_repo = self.project.repo + create_git_tag(upstream_repo, 'v01') + create_git_tag(upstream_repo, 'v02') + create_git_branch(upstream_repo, 'newbranch') + + local_repo = os.path.join(mkdtemp(), 'local') + os.mkdir(local_repo) + checkout_path.return_value = local_repo + + repo = self.project.vcs_repo() + repo.clone() + + delete_git_tag(upstream_repo, 'v02') + delete_git_branch(upstream_repo, 'newbranch') + + # We still have all branches and tags in the local repo + self.assertEqual( + set(['v01', 'v02']), + set(vcs.verbose_name for vcs in repo.tags) + ) + self.assertEqual( + set([ + 'relativesubmodule', 'invalidsubmodule', + 'master', 'submodule', 'newbranch', + ]), + set(vcs.verbose_name for vcs in repo.branches) + ) + + repo.checkout() + + # We don't have the eliminated branches and tags in the local repo + self.assertEqual( + set(['v01']), + set(vcs.verbose_name for vcs in repo.tags) + ) + self.assertEqual( + set([ + 'relativesubmodule', 'invalidsubmodule', + 'master', 'submodule' + ]), + set(vcs.verbose_name for vcs in repo.branches) + ) + class TestHgBackend(RTDTestCase): def setUp(self):
Delete untracked tags on fetch step Currently, if the user deletes a tag, it needs to wipe the environment for this change be reflected in their version list. There are some solutions to delete untracked tags (require more than 2 commands). But I found that the newest version of git has the `--prune-tags` option, which is used as `git fetch --prune --prune-tags` (`git >2.17`). We need to update git on the servers (we use 2.7.4) and change the fetch command. Or we can find a way to wipe the environment if we detect something like this case. Raised in https://github.com/rtfd/readthedocs.org/pull/3913#issuecomment-396673349
I think we can implement the command you mentioned here very soon since we are migrating our servers to Ubuntu 18.04 which includes 2.17.1 by default: https://packages.ubuntu.com/bionic/git The PR needed for this should be very simple and shouldn't have any secondary effect :) I'm :+1: on adding the `--prune-tags` to our current command. Blocking until .com update to 18.04
2018-10-25T20:56:48
readthedocs/readthedocs.org
4,814
readthedocs__readthedocs.org-4814
[ "4790" ]
dfc8fc9eba8dc9caae171ca0b3e8f6a71594e088
diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py --- a/readthedocs/doc_builder/backends/mkdocs.py +++ b/readthedocs/doc_builder/backends/mkdocs.py @@ -15,7 +15,7 @@ from django.template import loader as template_loader from readthedocs.doc_builder.base import BaseBuilder -from readthedocs.doc_builder.exceptions import BuildEnvironmentError +from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError from readthedocs.projects.models import Feature log = logging.getLogger(__name__) @@ -99,7 +99,7 @@ def load_yaml_config(self): if hasattr(exc, 'problem_mark'): mark = exc.problem_mark note = ' (line %d, column %d)' % (mark.line + 1, mark.column + 1) - raise BuildEnvironmentError( + raise MkDocsYAMLParseError( 'Your mkdocs.yml could not be loaded, ' 'possibly due to a syntax error{note}'.format(note=note) ) diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -37,7 +37,7 @@ from .exceptions import ( BuildEnvironmentCreationFailed, BuildEnvironmentError, BuildEnvironmentException, BuildEnvironmentWarning, BuildTimeoutError, - ProjectBuildsSkippedError, VersionLockedError, YAMLParseError) + ProjectBuildsSkippedError, VersionLockedError, YAMLParseError, MkDocsYAMLParseError) log = logging.getLogger(__name__) @@ -438,6 +438,7 @@ class BuildEnvironment(BaseEnvironment): ProjectBuildsSkippedError, YAMLParseError, BuildTimeoutError, + MkDocsYAMLParseError ) def __init__(self, project=None, version=None, build=None, config=None, diff --git a/readthedocs/doc_builder/exceptions.py b/readthedocs/doc_builder/exceptions.py --- a/readthedocs/doc_builder/exceptions.py +++ b/readthedocs/doc_builder/exceptions.py @@ -7,7 +7,6 @@ class BuildEnvironmentException(Exception): - message = None status_code = None @@ -21,7 +20,6 @@ def get_default_message(self): class BuildEnvironmentError(BuildEnvironmentException): - GENERIC_WITH_BUILD_ID = ugettext_noop( 'There was a problem with Read the Docs while building your documentation. ' 'Please report this to us with your build id ({build_id}).', @@ -29,32 +27,33 @@ class BuildEnvironmentError(BuildEnvironmentException): class BuildEnvironmentCreationFailed(BuildEnvironmentError): - message = ugettext_noop('Build environment creation failed') class VersionLockedError(BuildEnvironmentError): - message = ugettext_noop('Version locked, retrying in 5 minutes.') status_code = 423 class ProjectBuildsSkippedError(BuildEnvironmentError): - message = ugettext_noop('Builds for this project are temporarily disabled') class YAMLParseError(BuildEnvironmentError): - GENERIC_WITH_PARSE_EXCEPTION = ugettext_noop( 'Problem parsing YAML configuration. {exception}', ) class BuildTimeoutError(BuildEnvironmentError): - message = ugettext_noop('Build exited due to time out') class BuildEnvironmentWarning(BuildEnvironmentException): pass + + +class MkDocsYAMLParseError(BuildEnvironmentError): + GENERIC_WITH_PARSE_EXCEPTION = ugettext_noop( + 'Problem parsing MkDocs YAML configuration. {exception}', + )
If mkdocs.yml is broken raise a specific exception and catch it properly Currently, we are raising `BuildEnvironmentError` which is too generic and produce the error to be logged into sentry when it's not an app bug but a user configuration problem. https://github.com/rtfd/readthedocs.org/blob/df85fefc5f59c7a00bcc1ffe23965efe75e8c895/readthedocs/doc_builder/backends/mkdocs.py#L84-L87 We should raise something like `MkDocsYAMLError` (or a better name) and add it as a warning exception at, https://github.com/rtfd/readthedocs.org/blob/df85fefc5f59c7a00bcc1ffe23965efe75e8c895/readthedocs/doc_builder/environments.py#L433-L441
2018-10-26T21:53:51
readthedocs/readthedocs.org
4,831
readthedocs__readthedocs.org-4831
[ "872" ]
70bf58b2e59d2fe88f31ca8f7c6200bee2232858
diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -243,7 +243,7 @@ class BuildViewSetBase(UserSelectViewSet): serializer_class = BuildSerializer admin_serializer_class = BuildAdminSerializer model = Build - filter_fields = ('project__slug',) + filter_fields = ('project__slug', 'commit') class BuildViewSet(SettingsOverrideObject):
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -414,6 +414,22 @@ def test_get_invalid_raw_log(self): resp = client.get('/api/v2/build/{0}.txt'.format(404)) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) + def test_build_filter_by_commit(self): + """ + Create a build with commit + Should return the list of builds according to the + commit query params + """ + get(Build, project_id=1, version_id=1, builder='foo', commit='test') + get(Build, project_id=2, version_id=1, builder='foo', commit='other') + client = APIClient() + api_user = get(User, staff=False, password='test') + client.force_authenticate(user=api_user) + resp = client.get('/api/v2/build/', {'commit': 'test'}, format='json') + self.assertEqual(resp.status_code, 200) + build = resp.data + self.assertEqual(len(build['results']), 1) + class APITests(TestCase): fixtures = ['eric.json', 'test_data.json']
Provide an API to query the build status by commit In order to do a check before release that everything is ok, I would like to have a way to obtain the current build status for a given commit. So, in addition to: ``` GET /api/v1/build/{id}/ ``` also have this: ``` GET /api/v1/commit/{sha1}/ ``` or ``` GET /api/v1/{user}/{project}/commit/{sha1}/ ``` Is this possible right now?
Neat. We currently don't record the commit hash for builds. This is a good idea, and would allow that kind of API. Any news on this? Any way I can help? New (simpler?) idea: Is it possible to insert the commit hash in the sphinx html? If it can be done already, then I just need to download the the page and check that it matches the version my latest hash. This should be part of newly built documentation with our theme already, we pass the commit hash in with via the API. We are storing the commit hash on the build object now, but we don't expose the commit as a first class endpoint in the api. You can find similar API endpoints here: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/restapi/views/core_views.py Adding a `build_status` endpoint to that file with a test would be a good place to start. This looks like it might have some overlap with #2251 which also adds commit ids to builds. @agjohnson This "good first issue" is quite old. Can we improve the solution to fixing it, to encourage contribution? Same with https://github.com/rtfd/readthedocs.org/issues/1045. Would it be possible to get a more up to date information on this issue? I'm interested to have a look. Thanks. > Would it be possible to get a more up to date information on this issue? I'm interested to have a look. Thanks for your interest. I think the original aspect of the issue is still valid: "build an API endpoint that receive a project and a commit hash and return the status (failed/success) of that build" The `Build` object should have the commit hash: https://github.com/rtfd/readthedocs.org/blob/1af444173481df9a7b16e015fc6b12abe8155e7e/readthedocs/builds/models.py#L430-L431 So, I think we have all the information to create that endpoint. This could be next to the others at this file: https://github.com/rtfd/readthedocs.org/blob/1af444173481df9a7b16e015fc6b12abe8155e7e/readthedocs/restapi/views/model_views.py @Alig1493 let me know if you are able to start with that info or not. Please, keep us updated on this :) Thanks! Will the api endpoint be under v1 or v2? v2 is the right place. v1 is deprecated and shouldn't be used. @humitos There are already `api/v2/buid/<id>`, so what should be the endpoint for this? Yes. What I understood from the original report is to have the ability that given the sha and the project you can know the status of that build. Now, a build id is needed, that you don't know how to get it from a sha. Makes sense? @humitos I have understand. I wanted to explain that, there are already a endpoint for `api/v2/buid/<id>`, so the getting with another endpoint like `api/v2/buid/<commit hash>` will make the user confuse. So I was wondering, what the API endpoint will look like. Can you please clarify?
2018-10-31T12:17:52
readthedocs/readthedocs.org
4,833
readthedocs__readthedocs.org-4833
[ "4673" ]
70bf58b2e59d2fe88f31ca8f7c6200bee2232858
diff --git a/readthedocs/core/views/__init__.py b/readthedocs/core/views/__init__.py --- a/readthedocs/core/views/__init__.py +++ b/readthedocs/core/views/__init__.py @@ -116,8 +116,16 @@ def server_error_404(request, exception=None, template_name='404.html'): # pyli Marking exception as optional to make /404/ testing page to work. """ response = get_redirect_response(request, path=request.get_full_path()) + if response: - return response + if response.url == request.build_absolute_uri(): + # check that we do have a response and avoid infinite redirect + log.warning( + 'Infinite Redirect: FROM URL is the same than TO URL. url=%s', + response.url, + ) + else: + return response r = render(request, template_name) r.status_code = 404 return r
diff --git a/readthedocs/rtd_tests/tests/test_redirects.py b/readthedocs/rtd_tests/tests/test_redirects.py --- a/readthedocs/rtd_tests/tests/test_redirects.py +++ b/readthedocs/rtd_tests/tests/test_redirects.py @@ -127,6 +127,34 @@ def setUp(self): self.pip = Project.objects.get(slug='pip') self.pip.versions.create_latest() + @override_settings(USE_SUBDOMAIN=True) + def test_redirect_prefix_infinite(self): + """ + Avoid infinite redirects. + + If the URL hit is the same that the URL returned for redirection, we + return a 404. + + These examples comes from this issue: + * https://github.com/rtfd/readthedocs.org/issues/4673 + """ + Redirect.objects.create( + project=self.pip, redirect_type='prefix', + from_url='/', + ) + r = self.client.get('/redirect', HTTP_HOST='pip.readthedocs.org') + self.assertEqual(r.status_code, 302) + self.assertEqual( + r['Location'], 'http://pip.readthedocs.org/en/latest/redirect.html') + + r = self.client.get('/redirect/', HTTP_HOST='pip.readthedocs.org') + self.assertEqual(r.status_code, 302) + self.assertEqual( + r['Location'], 'http://pip.readthedocs.org/en/latest/redirect/') + + r = self.client.get('/en/latest/redirect/', HTTP_HOST='pip.readthedocs.org') + self.assertEqual(r.status_code, 404) + @override_settings(USE_SUBDOMAIN=True) def test_redirect_root(self): Redirect.objects.create(
Redirect full path to default version I'd like to be able to create versionless links to the documentation, for use in error messages, code comments, etc. For example, a message like `see https://click.palletsprojects.com/windows for more information`. I don't want to use URLs with versions because I would have to remember to modify all instances of it before releasing a new version. Currently, only the root path redirects to the default version, other paths raise a 404. Instead, the path should be preserved and appended to the default version path on redirect. ``` Works: https://click.palletsprojects.com/ -> https://click.palletsprojects.com/en/7.x/ Doesn't work, 404: https://click.palletsprojects.com/windows -> https://click.palletsprojects.com/en/7.x/windows ``` I do not want to use the "latest" or "stable" versions because I would like the URLs that people land on and share to contain the actual version. I already do this with the transitional redirects I set up from `click.pocoo.org` to `click.palletsprojects.com`. A similar approach could probably be used to extend RTD's default redirect. ```nginx location ~ ^/dev(.*)$ { return 301 https://click.palletsprojects.com/en/master$1; } location ~ ^/(\d)(.*)$ { return 301 https://click.palletsprojects.com/en/$1.x$2; } location ~ ^/latest(.*)$ { return 301 https://click.palletsprojects.com/en/7.x$1; } location / { return 301 https://click.palletsprojects.com/en/7.x$request_uri; } ```
I think this is what you need https://github.com/rtfd/readthedocs.org/issues/2422#issuecomment-417764518 Although, you'll need to change the redirect on each release An exact redirect from `/$rest`, or a prefix redirect from `/`, to `/en/1.0.x` works, but as a side effect it causes infinite redirects for 404s. If the page is https://itsdangerous.palletsprojects.com/en/1.0.x/signer, `/signer` redirects correctly, but `/en/`, `/en/bad-version`, and `/en/1.0.x/bad-page` all just keep getting appended and redirected. The first two shouldn't really come up, but the last one is an issue because the user sees a strange error instead of a 404. The redirects catch too much, they should only trigger for 404s that don't already start with the `/$lang/$version/` prefix being redirected to. I'd rather not have to update these redirects as new releases occur either, as part of the draw of moving to RTD was that we could drop our custom bots and processes for building docs. @davidism your issue called my attention because I thought that we supported this. I found a _hacky solution_, I think. This URL, * https://gh-rtd-project-a.readthedocs.io/redirect, will bring you to * https://gh-rtd-project-a.readthedocs.io/en/redirect/redirect.html (being `/en` the default language and `redirect` the default version) --which I suppose it's what you want. How I did it? I created a `Prefix Redirect` with `From URL` as just `/`. This brought other issues: * it produces an infinite redirection with trailing `/`, https://gh-rtd-project-a.readthedocs.io/redirect/ * I found that leaving the `From URL` as empty is possible to submit and it doesn't make any sense, producing all 404 URLs to infinite redirection ---- From your examples, ``` location ~ ^/dev(.*)$ { return 301 https://click.palletsprojects.com/en/master$1; } ``` This one can be achieved by an `Exact Redirect` with the `$rest` attribute, I think. * From URL: `/dev/$rest` * To URL: `/en/master/` ---- Similar Redirect can be done for this one, ``` location ~ ^/latest(.*)$ { return 301 https://click.palletsprojects.com/en/7.x$1; } ``` * From URL: `/latest/$rest` * To URL: `/en/7.x/` ---- The _hacky way_ that I found should solve this one (although, it should be implemented in a better way), ``` location / { return 301 https://click.palletsprojects.com/en/7.x$request_uri; } ``` ---- Finally, maybe this can be achieved by * From URL: `/$rest` * To URL: `/en/` ``` location ~ ^/(\d)(.*)$ { return 301 https://click.palletsprojects.com/en/$1.x$2; } ``` So, accessing `/1.x/section/file.html` will redirect to `/en/1.x/section/file.html` ---- I'm listing all of this here but I know it doesn't sound like a "good answer". I want to be sure that, even with a _hacky way_ we can make this work or not. This will probably need some changes in our implementation of redirects --which is kind of complicated. Finally, if you add a _namespace_ in your the URLs you want to link, like `/redirect-to-default-version/` and add a `Prefix Redirect` this will work: https://gh-rtd-project-a.readthedocs.io/redirect-to-default-version/redirect.html With a shorter namespace, https://gh-rtd-project-a.readthedocs.io/r/redirect.html So, your URLs will be in the form https://click.palletsprojects.com/r/windows Does any of this make sense to you? :grin: Thanks, I started using prefix and exact redirects in an earlier comment. The problem with all these is that any 404 now produces an infinite redirect with a weird repeated URL. It's not end-user friendly. > The problem with all these is that any 404 now produces an infinite redirect with a weird repeated URL. It's not end-user friendly. If that bug is fixed and just returns a 404, that would be enough for your use case? Yeah, I think that should cover it.
2018-10-31T12:32:28
readthedocs/readthedocs.org
4,845
readthedocs__readthedocs.org-4845
[ "4844" ]
0f5d979c221da7140138359fc06660ebec6fff9b
diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -323,11 +323,11 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ for owner in self.users.all(): assign('view_project', owner, self) try: - if self.default_branch: - latest = self.versions.get(slug=LATEST) - if latest.identifier != self.default_branch: - latest.identifier = self.default_branch - latest.save() + latest = self.versions.get(slug=LATEST) + default_branch = self.get_default_branch() + if latest.identifier != default_branch: + latest.identifier = default_branch + latest.save() except Exception: log.exception('Failed to update latest identifier')
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -16,7 +16,13 @@ from readthedocs.builds.constants import LATEST from readthedocs.builds.models import Version -from readthedocs.projects.constants import PRIVATE, PROTECTED, PUBLIC +from readthedocs.projects.constants import ( + PRIVATE, + PROTECTED, + PUBLIC, + REPO_TYPE_GIT, + REPO_TYPE_HG, +) from readthedocs.projects.exceptions import ProjectSpamError from readthedocs.projects.forms import ( ProjectAdvancedForm, @@ -29,6 +35,7 @@ class TestProjectForms(TestCase): + @mock.patch.object(ClassifierValidator, '__call__') def test_form_spam(self, mocked_validator): """Form description field fails spam validation.""" @@ -83,7 +90,7 @@ def test_import_repo_url(self): ('ssh+git://github.com/humitos/foo', True), ('[email protected]:strangeuser/readthedocs.git', True), ('[email protected]:22/_ssh/docs', True), - ] + common_urls + ] + common_urls with override_settings(ALLOW_PRIVATE_REPOS=False): for url, valid in public_urls: @@ -115,6 +122,47 @@ def test_empty_slug(self): self.assertFalse(form.is_valid()) self.assertIn('name', form.errors) + def test_changing_vcs_should_change_latest(self): + """When changing the project's VCS, latest should be changed too.""" + project = get(Project, repo_type=REPO_TYPE_HG, default_branch=None) + latest = project.versions.get(slug=LATEST) + self.assertEqual(latest.identifier, 'default') + + form = ProjectBasicsForm( + { + 'repo': 'http://github.com/test/test', + 'name': 'name', + 'repo_type': REPO_TYPE_GIT, + }, + instance=project, + ) + self.assertTrue(form.is_valid()) + form.save() + latest.refresh_from_db() + self.assertEqual(latest.identifier, 'master') + + def test_changing_vcs_should_not_change_latest_is_not_none(self): + """ + When changing the project's VCS, + we should respect the custom default branch. + """ + project = get(Project, repo_type=REPO_TYPE_HG, default_branch='custom') + latest = project.versions.get(slug=LATEST) + self.assertEqual(latest.identifier, 'custom') + + form = ProjectBasicsForm( + { + 'repo': 'http://github.com/test/test', + 'name': 'name', + 'repo_type': REPO_TYPE_GIT, + }, + instance=project, + ) + self.assertTrue(form.is_valid()) + form.save() + latest.refresh_from_db() + self.assertEqual(latest.identifier, 'custom') + class TestProjectAdvancedForm(TestCase):
checking out wrong branch name ## Details * Read the Docs project URL: https://readthedocs.org/projects/flatland/ * Build URL (if applicable): https://readthedocs.org/projects/flatland/builds/8034952/ * Read the Docs username (if applicable): thomaswaldmann ## Expected Result build docs from master branch. ## Actual Result ``` git checkout --force default error: pathspec 'default' did not match any file(s) known to git. ``` So it tries to checkout `default` instead of `master`. I tried changing the branch in the settings, but i can not select `master` and not selecting anything (`------`) does not help either.
Note: the project has recently switched from bitbucket/mercurial to github/git, in case that matters. Building the docs for the `0.8` tag does not work either. I already tried "cleaning" the build environment (as it offers on the rtfd site), did not help either. I think there was an open issue already for this (I'm searching now.) Did you try changing the default branch from the advanced settings? Yes, as I said: no `master` there. And not selecting anything (`------`) does obviously **not** choose the VCS default branchname as the help text there says (which would be `master` in case of git). Smells like a bug. Found the ticket, It was closed recently because it was stale https://github.com/rtfd/readthedocs.org/issues/1170, the current workaround is recreating the project. Yes, deleting and recreating the project solves the problem. Having to do that sucks, though. It's likely just a form field not getting updated / processed correctly, right? Note: deletion of the project invalidated the build URL given in the top post. Ok, I was able to reproduce without having a real mercurial repo, if someone makes a mistake when creating the project and choose the wrong repo type can have this problem. > Having to do that sucks, though. It's likely just a form field not getting updated / processed correctly, right? Yeah, I'm investigating now So, the problem is that we are relying on the `LATEST` slug rather than calling `get_default_branch`, we could update the latest version or change where we are not using `get_default_branch`.
2018-11-01T05:08:27
readthedocs/readthedocs.org
4,851
readthedocs__readthedocs.org-4851
[ "1820" ]
bc248aa6a7d2bb54e88e779fe4f78b9f0266acf4
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -2,36 +2,55 @@ """Models for the builds app.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import logging import os.path import re -from builtins import object from shutil import rmtree +from builtins import object from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from django.utils.encoding import python_2_unicode_compatible -from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext +from django.utils.translation import ugettext_lazy as _ from guardian.shortcuts import assign from taggit.managers import TaggableManager from readthedocs.core.utils import broadcast from readthedocs.projects.constants import ( - BITBUCKET_URL, GITHUB_URL, GITLAB_URL, PRIVACY_CHOICES, PRIVATE) + BITBUCKET_URL, + GITHUB_URL, + GITLAB_URL, + PRIVACY_CHOICES, + PRIVATE, +) from readthedocs.projects.models import APIProject, Project from .constants import ( - BRANCH, BUILD_STATE, BUILD_STATE_FINISHED, BUILD_TYPES, LATEST, - NON_REPOSITORY_VERSIONS, STABLE, TAG, VERSION_TYPES) + BRANCH, + BUILD_STATE, + BUILD_STATE_FINISHED, + BUILD_TYPES, + LATEST, + NON_REPOSITORY_VERSIONS, + STABLE, + TAG, + VERSION_TYPES, +) from .managers import VersionManager from .querysets import BuildQuerySet, RelatedBuildQuerySet, VersionQuerySet from .utils import ( - get_bitbucket_username_repo, get_github_username_repo, - get_gitlab_username_repo) + get_bitbucket_username_repo, + get_github_username_repo, + get_gitlab_username_repo, +) from .version_slug import VersionSlugField DEFAULT_VERSION_PRIVACY_LEVEL = getattr( @@ -193,6 +212,10 @@ def identifier_friendly(self): return self.identifier[:8] return self.identifier + @property + def is_editable(self): + return self.type == BRANCH + def get_subdomain_url(self): private = self.privacy_level == PRIVATE return self.project.get_docs_url(
diff --git a/readthedocs/rtd_tests/tests/test_footer.py b/readthedocs/rtd_tests/tests/test_footer.py --- a/readthedocs/rtd_tests/tests/test_footer.py +++ b/readthedocs/rtd_tests/tests/test_footer.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import mock from django.test import TestCase @@ -11,13 +15,15 @@ from readthedocs.core.middleware import FooterNoSessionMiddleware from readthedocs.projects.models import Project from readthedocs.restapi.views.footer_views import ( - footer_html, get_version_compare_data) + footer_html, + get_version_compare_data, +) from readthedocs.rtd_tests.mocks.paths import fake_paths_by_regex class Testmaker(APITestCase): fixtures = ['test_data'] - url = '/api/v2/footer_html/?project=pip&version=latest&page=index' + url = '/api/v2/footer_html/?project=pip&version=latest&page=index&docroot=/' factory = APIRequestFactory() @classmethod @@ -99,6 +105,24 @@ def test_show_version_warning(self): response = self.render() self.assertTrue(response.data['show_version_warning']) + def test_show_edit_on_github(self): + version = self.pip.versions.get(slug=LATEST) + version.type = BRANCH + version.save() + response = self.render() + self.assertIn('On GitHub', response.data['html']) + self.assertIn('View', response.data['html']) + self.assertIn('Edit', response.data['html']) + + def test_not_show_edit_on_github(self): + version = self.pip.versions.get(slug=LATEST) + version.type = TAG + version.save() + response = self.render() + self.assertIn('On GitHub', response.data['html']) + self.assertIn('View', response.data['html']) + self.assertNotIn('Edit', response.data['html']) + class TestVersionCompareFooter(TestCase): fixtures = ['test_data']
"edit on github" / "view on github" links for "stable" -> 404 See there: http://borgbackup.readthedocs.org/en/stable/ Link at top right points to: https://github.com/borgbackup/borg/blob/55fef2457e483701254e5b2a7d4a1a60963971a8/docs/index.rst (which is 404) Same for the view/edit links in the bottom left box, they also point to same 404 location. OTOH, the docs shown for "stable" are (currently) correctly showing 0.28.2 release, which is the latest release. I tried a "stable" rebuild, did not help.
it would seem more reliable to show a link to a branch there, it seems to me... 55fef2457e483701254e5b2a7d4a1a60963971a8 is the hash of the 0.28.2 _tag_ on github: ``` $ git show 55fef2457e483701254e5b2a7d4a1a60963971a8 | head tag 0.28.2 Tagger: Thomas Waldmann <[email protected]> Date: Sun Nov 15 22:01:29 2015 +0100 tagged/signed release 0.28.2 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJWSPKpAAoJECQ6z6lR944BBt4P/2sRtiN7prKpK7dO8QAPuEiM Tb4gzaLJRf1ZjSGwFGQ+Ma3YXf3OBMG+ZAZIUtn7q4b3BoetuekCIWcGc9FPxOT5 ``` it seems that GH doesn't render that properly transparently, so it could be a github bug as well... Ah, interesting. I only looked in git log output for the hash, but the tags do not show up there. could somebody please have a look? This is ticket has not been triaged yet, as we have not had time to focus on this. Feel free to supply test cases or a patch that address the issue you are seeing. could this be fixed please? we get new tickets on our tracker all the time because "edit on github" is broken... could it be related to that I gpg-sign my release tags in git? I'm seeing the same problem with the files written in markdown in project using a mix of markdown and rST (the "edit on GitHub" links go to e.g. foo.rst when the page was generated from e.g. foo.md): See e.g. http://zulip.readthedocs.org/en/latest/integration-guide.html @timabbott are you signing your release tags with gpg, too? well, i don't know about others but this _is_ fixed here. the original post here says that the [stable release page](http://borgbackup.readthedocs.org/en/stable/) leads to a [404 page](https://github.com/borgbackup/borg/blob/55fef2457e483701254e5b2a7d4a1a60963971a8/docs/index.rst) (which is still 404). but the stable page now points to a [different page](https://github.com/borgbackup/borg/blob/6a3f2d78644567f6b105414916b449eaa4409540/docs/index.rst) which works fine. also, [tagged releases](http://borgbackup.readthedocs.org/en/1.0.1/) work fine too: they leads to a [working link](https://github.com/borgbackup/borg/blob/1.0.1/docs/index.rst). i wonder if [github's support for signed commits](https://github.com/blog/2144-gpg-signature-verification) fixed this or something. anyways, it looks to me like the original issue is fixed. @timabbott's issue seems related not to git tags but to the markdown/rst mix, an issue that should be fixed on RTFD's side, not github. @anarcat right, it works now for our stable tag. \o/ thus, I am closing this one - please file different bugs in a different issue. Sounds reasonable, opened https://github.com/rtfd/readthedocs.org/issues/2130 for the markdown issue -- I hadn't realized it was a different bug. Thanks! Hmm, the issue seems to have come back: http://borgbackup.readthedocs.io/en/stable/index.html "edit on github" points to: https://github.com/borgbackup/borg/blob/89707fc6084c196fa3f973a91937218288f59e03/docs/index.rst ... which is 404. I've also seen this. http://kinto.readthedocs.io/en/stable/ "edit on github" points to https://github.com/Kinto/kinto/blob/9c331e13643a53ca76208f4efa6c9bb651b165b4/docs/index.rst , which 404s. Like @anarcat said, the hash shown is the hash of the tag, rather than that of the commit. Looking at http://readthedocs.org/api/v1/version/1957996/?format=json, I see the problematic hash listed as the "identifier". BTW, how about a setting whether one wants to have "edit on github" links or not? I'ld just switch it off so we would never have to bother with it again. We have a link to github in our docs, so we don't really need that extra link. ;-) @ThomasWaldmann your last command should be in another issue I believe ;) Regarding the issue at hand, i've investigated a bit, and here are my findings: When checking for the "identifier" to use, there's a special case when the slug is `STABLE`: https://github.com/rtfd/readthedocs.org/blob/b5d06e856ec0eea6f340bc078ab747254b516cd0/readthedocs/builds/models.py#L117 This will return the `identifier` which is the commit hash. This is what's causing problem, because it's the hash of the tag, not of a commit, and thus GitHub doesn't know how to display it. As you can see from the comment there, the issue is that we don't have the original branch name. So maybe we could store the branch name instead of the tag hash (which seems useless in our case?) in the `identifier` field. This should be done in the [`update_stable_version`](https://github.com/rtfd/readthedocs.org/blob/b5d06e856ec0eea6f340bc078ab747254b516cd0/readthedocs/projects/models.py#L698) I guess (specifically line [716](https://github.com/rtfd/readthedocs.org/blob/b5d06e856ec0eea6f340bc078ab747254b516cd0/readthedocs/projects/models.py#L716) and [726](https://github.com/rtfd/readthedocs.org/blob/b5d06e856ec0eea6f340bc078ab747254b516cd0/readthedocs/projects/models.py#L726)). I'll submit a PR with this naive fix, to open up the discussion. This has been fixed (see http://mockingjay.readthedocs.io/en/stable/ -- it properly links to the hash of the commit on github) @ericholscher click on first link in top post, then on "edit on github". this is not fixed. i still see this problem all the time. it could be a bug with github that doesn't properly recognize certain hashes, but it's still a problem regardless. Where did the commit `55fef2457e483701254e5b2a7d4a1a60963971a8` come from for borg backup? GitHub doesn't seem to recognize it, but I imagine it came from the project at some point. Did you delete and retag a release or something? Either that, or we have a bug that is pulling tags in from random places. it's not a commit, it's a tag, and it's the whole bug here - github doesn't display tag hashes correctly when you ask them as commits: ``` $ git show 55fef2457e483701254e5b2a7d4a1a60963971a8 tag 0.28.2 Tagger: Thomas Waldmann <[email protected]> Date: Sun Nov 15 22:01:29 2015 +0100 tagged/signed release 0.28.2 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJWSPKpAAoJECQ6z6lR944BBt4P/2sRtiN7prKpK7dO8QAPuEiM Tb4gzaLJRf1ZjSGwFGQ+Ma3YXf3OBMG+ZAZIUtn7q4b3BoetuekCIWcGc9FPxOT5 gL4ebFkAPXZ24KrU+c+QCHgPf80E2DfqNZYQsv2yaEPfr0W0c40vOP9bdhmOgZF4 z1GLUrE/36PbVkcneIar7fu/YRV5Rc0Ohl+5g4se+EOyhXQBCcAQkGFZefTpe4eq s9uVkBTx6eD15BSlEebv34Fxc0SPAj2+UIcrIPtDPrUVLv62wfNu5btwS+41FxXs 2m7QJKXWQAmsM24NGLePGsFP5RJLg+E3IF1ytAdYh3ecHk5xsvoiD3c/z0MAO7Cm 3ukiWQVVX6xpLjpM0iEzBrtaxTv0zPqh1r9n9ecIv2PNVxwmcJ27C8IgMqZ0SpLS w6whl2mimv7C4Ww7vG1qTzjRjBzLqAOOdZ9SIwVrzAjCYcCSS8wDSpJeIArjDroc mwbeZoIrWWz40XBFNOsoJQ2a2s98SvJoyO/y8wSnPrgSAlc+bKqGlKnmRLDEASml dDhGJMywJhS0U5H9fDkJcPCR0zlRk24RqUVISN63e0C6zjsWfwoOpZYFmLF4tIT2 OBbd64Ca7+OkIktQgXC5ujBioX3QBUF7Oris6yVJINYP/eC9fGNX0oYNasoLMiu0 h2eYx+ToARnXU8QFcWLU =cjuM -----END PGP SIGNATURE----- commit 3a72fbe418e075ec7af8695a50dee50603d09b0d Author: Thomas Waldmann <[email protected]> Date: Sun Nov 15 20:30:58 2015 +0100 update CHANGES diff --git a/docs/changes.rst b/docs/changes.rst index 9d215e63..f60edd59 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -16,8 +16,13 @@ Other changes: - do not create docs sources at build time (just have them in the repo), completely remove have_cython() hack, do not use the "mock" library at build time, #384 -- docs: explain how to regenerate usage and API files (build_api or - build_usage) and when to commit usage files directly into git, #384 +- avoid hidden import, make it easier for PyInstaller, easier fix for #218 +- docs: + + - add description of item flags / status output, fixes #402 + - explain how to regenerate usage and API files (build_api or + build_usage) and when to commit usage files directly into git, #384 + - minor install docs improvements Version 0.28.1 ``` so either you: 1. patch github to make it DTRT here (hahaha, just kidding, you're SOL there) 2. ask them nicely to fix this (couldn't resist) 3. handle those cases by hand and inspect the ref given to extract a valid commit... (looks like this is the only option) OMG. `commit hash` and `tag hash` are two different things? `tag` has them both? But why? A related problem is if the documentation is generated via `apidoc`, such that no ReST file exists on Github. This will always lead to a 404 error, see #2719. Please add an option to disable these links. @goerz, in that case, perhaps rtfd/sphinx_rtd_theme/issues/324? :p So can anybody explain what exactly happens? 1. rtfd clones project sources 2. rtfd tries to find branch? tag? for `/stable/` URL <-- how does it do this, is there a log for this operation 3. rtfd tries to find commit id for branch/tag name found in step 2 4. rtfd updates to specified commit id 5. rtfd builds the doc 6. rtfd inserts commit id into GitHub edit link URL The problem is that GitHub skips some commits? Or don't show them by commit id? It would be nice if build logs were a part of docs footer or something with stats etc. > OMG. commit hash and tag hash are two different things? tag has them both? But why? a tag is built on top of a commit, and has extra information, just like branches are not commits, tags are not just commits either. see [this document](https://git-scm.com/book/en/v2/Git-Internals-Git-References) for a good overview of references and tags in git. > 1. rtfd clones project sources > 2. rtfd tries to find branch? tag? for /stable/ URL <-- how does it do this, is there a log for this operation > 3. rtfd tries to find commit id for branch/tag name found in step 2 > 4. rtfd updates to specified commit id > 5. rtfd builds the doc > 6. rtfd inserts commit id into GitHub edit link URL > > The problem is that GitHub skips some commits? Or don't show them by commit id? The problem is that RTFD skips step 3 and 4, AFAIK - it just uses the tag hash instead of the commit hash. Sounds like ya'll have done some good work debugging this issue. The next step is to submit a patch that hopefully fixes the issue. I saw a previous attempt at this that I closed thinking it was fixed in my testing (#2428) -- that might be a good place to start. I don't get a feeling that we've debugged the issue. These are all assumptions. Can we have a command that we can checkout and run to repeat rtfd behaviour? https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/models.py#L698 is probably a good place to start. > https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/models.py#L698 is probably a good place to start. Lame Django question - where is `self.versions` defined in this file? I don't see any properties in `Project` object with such name. We're [having the exact same problem](/restic/restic/issues/1175) over at the [restic](/restic/restic) documentation, with e.g. http://restic.readthedocs.io/en/stable/installation.html - the below discussion is with this example page in mind. For the `latest` and `v0.7.1` versions, the "Edit on GitHub" link works just fine. It's just the `stable` version that has the problem this issue is about. Now, the [RTD documentation](https://docs.readthedocs.io/en/latest/versions.html#versions) states that "We also create a `stable` version, if your project has any tagged releases. `stable` will be automatically kept up to date to point at your highest version". To me this sounds like RTD is aware of what the highest version is. In restic's case, currently `v0.7.1` for which there is a tag with the same name. Considering that RTD also lists `latest`, `stable` and then every release tag in the sidebar where you select the version to display, it sure seems to understand what versions we have. To solve this issue, can't you simply put the highest tag's _name_ in the URLs of the links, instead of as now the highest tag's commit hash? To be clear, in our case the "Edit on GitHub" link in the example page above is currently https://github.com/restic/restic/blob/402b6f5f8d0171d7061cbeab67725fce794fc376/doc/installation.rst - the hash referencing the commit of the `v0.7.1` tag. If instead of this hash you would just use the highest tag name, the URL would instead be https://github.com/restic/restic/blob/v0.7.1/doc/installation.rst, and this works just fine. This would be in line with what the documentation states about the `stable` version referencing the highest version, so I think it seems rather straight forward to solve it this way. @ThomasWaldmann (and other people having this issue) can you try re-building your project and see if the problem is solved? According to https://github.com/rtfd/readthedocs.org/pull/3615#pullrequestreview-106594994 it should. i rebuilt the "stable" branch of the borg repo (not sure why i still have access to that, @ThomasWaldmann, btw ;), and we still see the issue. the tag links are a little better, but still ackward. e.,g. it goes to https://github.com/borgbackup/borg/edit/1.1.4/docs/usage/general.rst which says "Sorry, it looks like your fork is outdated!" Testing a version `stable`, the `Edit` link gives me a 404 still. The `View` link still works for me, but if I try to edit this file on GitHub, the edit button is disabled and tells me that "i must be on a branch" -- so perhaps this is a clue. Reopening. can we just remove (or disable by default) the annoying / buggy features like this or that "outdated library version" note? the problem is not just that readers encounter these bugs and report them HERE, they also frequently report them at the documented software project, causing work for the maintainers of these. and once you have explained it to one user who found it, the next one will find it and open another issue... @agjohnson I have the exact same symptoms in e.g. https://restic.readthedocs.io/en/stable/040_backup.html. I have asked the project owner of restic to trigger a rebuild of the docs, but haven't heard from him yet on whether or not he did that. Would be great if stable just pointed to the last release tag instead, that would have solved it by now. That's because commits nor tags can't be edited. So I think this is kind of another issue (hide the `Edit` button when a version isn't a branch). @stsewd has a good point, one cannot edit tags either. So at the GitHub end we're left with the branches that exist in the repo. Usually this is master and perhaps some other dev branches. In order for the edit link to have a purpose, perhaps it simply needs to point to the main branch at all times, regardless of which version you are looking at in RTD? How to deduct/define the "main" branch is another question though. Maybe the answer to it lies in just looking at what branch the Latest version corresponds to. EDIT: Then again; What happens on GitHub is a separate concern. Perhaps RTD did what it's supposed to do by linking to the proper file in the proper commit/tag/branch, and that's the end of it, in the sense that it's then up to the user to create a branch from that commit/tag if they want to edit. Just like the "Fork me on GitHub" links on project pages just goes to the original repository, instead of actually starting to fork the original repo into the user's GitHub account. The restic doc project was rebuilt in RTD, but the same state still applies, for obvious reasons. Not much else we can do about it suppose :) Thanks for your hard work! I am +1 on reticketing the issue. There were some changes since 2015 and the info needs to be researched again. Yeah, I don't think this is a bug, is more like an improvement, and it needs a design decision about how to deal with links to no editable versions (tags). On my deployment, it wouldn't even work on `master` branch? See http://how.bitshares.works/en/master/ @xeroc I think you are facing another issue https://github.com/rtfd/readthedocs.org/issues/1917 @stsewd are you sure looking at the BitShares docs: > © Copyright 2018, BitShares Blockchain Foundation. Revision 3c056912. > Built with Sphinx using a theme provided by Read the Docs. @techtonik @xeroc that's another bug, please see https://github.com/rtfd/readthedocs.org/issues/4671 thanks for the link. So, I just discovered that we have this, `is_editable` thing (sphinx only) https://github.com/rtfd/readthedocs.org/blob/0f5d979c221da7140138359fc06660ebec6fff9b/readthedocs/doc_builder/backends/sphinx.py#L127-L132 It was added here https://github.com/rtfd/readthedocs.org/commit/4619c8297780c1c0d3e557a9712045e279fd6e95#diff-4ade07b2b17f38e60bfcb27301c99934 The commit msg describes what we are seeing now. But it was never used on the conf.py context. Anyway, I think we can use that now in the flyout menu with github and friends https://github.com/rtfd/readthedocs.org/blob/0f5d979c221da7140138359fc06660ebec6fff9b/readthedocs/restapi/templates/restapi/footer.html#L79-L88
2018-11-01T21:00:54
readthedocs/readthedocs.org
4,852
readthedocs__readthedocs.org-4852
[ "4808" ]
bc248aa6a7d2bb54e88e779fe4f78b9f0266acf4
diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -215,8 +215,10 @@ def setup_base(self): site_packages, '--no-download', env_path, - bin_path=None, # Don't use virtualenv bin that doesn't exist yet - cwd=self.checkout_path, + # Don't use virtualenv bin that doesn't exist yet + bin_path=None, + # Don't use the project's root, some config files can interfere + cwd='$HOME', ) def install_core_requirements(self):
Builds failing with NamespacePath has no attribute sort ## Details * Read the Docs project URL: https://readthedocs.org/projects/jaracoclipboard/ * Build URL (if applicable): https://readthedocs.org/projects/jaracoclipboard/builds/7996994/ * Read the Docs username (if applicable): jaraco ## Expected Result Build should succeed without error. ## Actual Result Build is failing during the virtualenv creation. This issue started this evening sometime after [this commit](https://github.com/jaraco/jaraco.clipboard/commit/df8bfe63c5f4eccec6859d7d30bf34e59ddca6cb) (which succeeded). The code for configuring RTD is derived from the same code as many other similar projects. I suspected that something was cached incorrectly, as this error message is due to a [known issue](https://github.com/pypa/pip/issues/4216) that was fixed with later versions of pip, so I deleted and recreated the project, but the docs build is still failing. Can you suggest a way to work around the issue?
I see [another project exhibiting the same error](https://readthedocs.org/projects/jaracocollections/builds/7963873/). If this is a problem with pip `9.0.1` then I'm not sure what we can do to fix this soon :/ pip comes from the docker images, so it can't be changed now, (I think we will be able to change this with the v2 of the configuration file). So, our docker images are as follow: - v2 (the one that you are using), comes with pip `9.0.1` - latest, comes with `9.0.3` So, if this bug is fixed on that version of pip, you can solve this problem by adding this in your rtd confifguration file: ```yaml build: image: latest ``` > I think we will be able to change this with the v2 of the configuration file IMHO, I think that we should force to install the latest `pip` version always the initial step by running: pip install -U pip So, if we are going to update the Docker versions more frequently, this won't have any effect. On the other hand, if we tend to make our Docker images stable and do not upgrade them very often (as we are doing now), it will always upgrade to the latest version which is the maintained one. In the meanwhile, we could add a dated Feature flag for this so all new project start installing the latest `pip`. > If this is a problem with pip `9.0.1` then I'm not sure what we can do to fix this soon :/ pip comes from the docker images, so it can't be changed now I was able to reproduce this issue by importing `jaracocollections` project into my local instance and running two builds of `latest` with `readthedocs/build:3.0` image. So, it seems that it's not a problem of the `pip` version itself but with "something else". If you take a closer look: the first command from this build https://readthedocs.org/projects/jaracocollections/builds/7963873/ is `git remote set-url` which means that the builder already have the code and the virtualenv is already created. BUT after all the `git` commands are executed, the `python -mvirtualenv` command is executed again but the virtualenv is already there. I think it does fail because of this. > BUT after all the `git` commands are executed, the `python -mvirtualenv` command is executed again but the virtualenv is already there. I think it does fail because of this. It shouldn't fail because of this. We don't have any kind of check to run the command if the venv doesn't exist or to not run it if it does. Actually, I did a simple local test: ``` $ python -mvirtualenv --no-site-packages --no-download /tmp/testvenv Using base prefix '/home/humitos/.pyenv/versions/3.6.6' New python executable in /tmp/testvenv/bin/python Installing setuptools, pip, wheel...done. Time: 0h:00m:03s $ python -mvirtualenv --no-site-packages --no-download /tmp/testvenv Using base prefix '/home/humitos/.pyenv/versions/3.6.6' New python executable in /tmp/testvenv/bin/python Installing setuptools, pip, wheel...done. Time: 0h:00m:03s $ ``` In fact, by upgrading `pip` before running the command that creates the venv doesn't work either. I added this command python3.6 -mpip install -U pip and it still fail with the same problem and in the log I can see that it's still using pip 9.0.3 which I don't know why yet. I’m following loosely. Let me just say, thanks so much for the in-depth investigation. I think I found the problem. Pip does support reading configurations from a `pyprojec.toml` file https://pip.pypa.io/en/stable/reference/pip/#pep-518-support, when creating the virtualenv pip installs some packages and the `pyproject.toml` file is read, by renaming/removing the file, I was able to build the project without issues. So, I'm not sure what we can do here, maybe the new deps requires a new version of pip? The dep are incompatible somehow? This is weird, I'm installing the latest version of pip before the virtualenv is created, the build pass. BUT, if I build it again, the build fails... Also I noticed that the first time the latest version of pip is only downloaded, in the second time, the old pip version is uninstalled :_ I think we are messing up this https://github.com/rtfd/readthedocs.org/issues/4808#issuecomment-434012767 with the env variables https://github.com/rtfd/readthedocs.org/blob/9b2b17c0fc603267b4b20f5923862c2db82602da/readthedocs/projects/tasks.py#L576-L583 https://github.com/rtfd/readthedocs.org/blob/9b2b17c0fc603267b4b20f5923862c2db82602da/readthedocs/doc_builder/environments.py#L353-L365 So, I have som code that "fixes" this AND install the latest version of pip. For the pyproject problem we can change the cwd of the command to other than the project's root (probably `/`). I wasn't able to install the latest version of pip before the virtualenv creation, because the command will need to be run with sudo (we don't have sudo anyway...). We could use this option https://virtualenv.pypa.io/en/stable/userguide/#the-extra-search-dir-option or more easy, create the virtual env with the old pip version, but update the pip version after the virtualenv creation, that way we are able to install the core and user requirements using the latest pip. I'm a little lost here. Can you explain in simple words/steps where is the current issue/bug and what's your proposal to fix it? It seems there are more than one specific problem (considering the `toml` case). The problem here is that we create the virtualenv with the cwd on the project's root, when creating the virtualenv, pip is triggered, pip reads the `.toml` file from the repo and everything explodes, as we don't need to create the virtualenv from the project's root, I'm proposing to change the cwd to `/` here https://github.com/rtfd/readthedocs.org/blob/9b2b17c0fc603267b4b20f5923862c2db82602da/readthedocs/doc_builder/python_environments.py#L207-L220 (instead of `checkout_path`, just `/`) My other solution is for installing the latest pip always, but that isn't part of the solution > The problem here is that we create the virtualenv with the cwd on the project's root, when creating the virtualenv, pip is triggered, pip reads the `.toml` file from the repo and everything explodes, as we don't need to create the virtualenv from the project's root, I'm proposing to change the cwd to / here If the project has a `pyproject.toml` in the root of the repository, I assume that the project wants us to read it and use it, right? If we change the CWD we will be fixing _this particular_ problem for this particular repository but breaking the general rule that it's to read that file (as the pip's documentation says). Still, I don't understand why this fails. It seems an incompatibility with the `pip` + `virtualenv` version installed on the docker image + the dependencies required from the `toml` file. On the other hand, if the problem is just that CWD path, we should be able to reproduce it outside RTD and I didn't find a way yet (I tried installing different versions of `pip`, `virtualenv` and `setuptools`) > If the project has a pyproject.toml in the root of the repository, I assume that the project wants us to read it and use it, right? Yeah, but when installing the packages, not when creating the virtualenv > If we change the CWD we will be fixing this particular problem for this particular repository but breaking the general rule that it's to read that file (as the pip's documentation says). Only for the virtualenv creation, installation of packages are still running the cwd on the project's root @stsewd you are right! It seems that you have been trying to say that in many comments but I wasn't getting it :) OK, so I suppose that a good place to point the CWD when creating the venv is the `HOME` directory instead of `/` because if the command needs to write a temp file or whatever, it will fail on `/`. Pointing to home sounds better, not sure how to do that (yet), I think we don't allow shell expansions, I'll see what I can do
2018-11-02T03:47:45
readthedocs/readthedocs.org
4,853
readthedocs__readthedocs.org-4853
[ "4401" ]
bc248aa6a7d2bb54e88e779fe4f78b9f0266acf4
diff --git a/readthedocs/doc_builder/exceptions.py b/readthedocs/doc_builder/exceptions.py --- a/readthedocs/doc_builder/exceptions.py +++ b/readthedocs/doc_builder/exceptions.py @@ -43,7 +43,7 @@ class ProjectBuildsSkippedError(BuildEnvironmentError): class YAMLParseError(BuildEnvironmentError): GENERIC_WITH_PARSE_EXCEPTION = ugettext_noop( - 'Problem parsing YAML configuration. {exception}', + 'Problem in your project\'s configuration. {exception}', )
Confusing error message to end user In https://github.com/rtfd/readthedocs.org/issues/4071#issuecomment-405939492 I realized that we are saying that we have a problem parsing the YAML file but the problem is in fact in one of the options set from the web admin dashboard. Example: ![captura de pantalla_2018-07-18_10-57-36](https://user-images.githubusercontent.com/244656/42886232-647bedbc-8a79-11e8-8aca-fe2430139fe0.png) There is no `requirements_file` entry in the YAML file (https://github.com/geopandas/geopandas/blob/master/readthedocs.yml) but it exists under the `Admin -> Advanced Settings` field form. We need to improve this error to something more user-friendly that expresses the real error. It's not an error on parsing the YAML file. The file was parsed properly, but the problem is with one of the values from one of the fields.
Previously we weren't checking for the requirements file of the web admin, so an error would happen in the install requirements step. I think we can catch this cases when doing https://github.com/rtfd/readthedocs.org/issues/4388 This message comes from https://github.com/rtfd/readthedocs.org/blob/67dfae9de7be81af92bd88453427424111a6168f/readthedocs/projects/tasks.py#L424-L424, we should change to be more generic like _Problem in your project configuration_.
2018-11-02T04:23:43
readthedocs/readthedocs.org
4,876
readthedocs__readthedocs.org-4876
[ "4450" ]
4fa2746040aee0aafcdb09e0e5674dcb42ea9809
diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py --- a/readthedocs/core/views/hooks.py +++ b/readthedocs/core/views/hooks.py @@ -1,21 +1,26 @@ """Views pertaining to builds.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + import json +import logging import re from django.http import HttpResponse, HttpResponseNotFound from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt -from readthedocs.core.utils import trigger_build from readthedocs.builds.constants import LATEST +from readthedocs.core.utils import trigger_build from readthedocs.projects import constants -from readthedocs.projects.models import Project, Feature +from readthedocs.projects.models import Feature, Project from readthedocs.projects.tasks import sync_repository_task -import logging - log = logging.getLogger(__name__) @@ -75,6 +80,35 @@ def build_branches(project, branch_list): return (to_build, not_building) +def sync_versions(project): + """ + Sync the versions of a repo using its latest version. + + This doesn't register a new build, + but clones the repo and syncs the versions. + Due that `sync_repository_task` is bound to a version, + we always pass the default version. + + :returns: The version slug that was used to trigger the clone. + :rtype: str + """ + try: + version_identifier = project.get_default_branch() + version = ( + project.versions + .filter(identifier=version_identifier) + .first() + ) + if not version: + log.info('Unable to sync from %s version', version_identifier) + return None + sync_repository_task.delay(version.pk) + return version.slug + except Exception: + log.exception('Unknown sync versions exception') + return None + + def get_project_from_url(url): if not url: return Project.objects.none() diff --git a/readthedocs/oauth/services/github.py b/readthedocs/oauth/services/github.py --- a/readthedocs/oauth/services/github.py +++ b/readthedocs/oauth/services/github.py @@ -172,7 +172,7 @@ def get_webhook_data(self, project, integration): ), 'content_type': 'json', }, - 'events': ['push', 'pull_request'], + 'events': ['push', 'pull_request', 'create', 'delete'], }) def setup_webhook(self, project): diff --git a/readthedocs/restapi/views/integrations.py b/readthedocs/restapi/views/integrations.py --- a/readthedocs/restapi/views/integrations.py +++ b/readthedocs/restapi/views/integrations.py @@ -1,32 +1,44 @@ """Endpoints integrating with Github, Bitbucket, and other webhooks.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + import json import logging import re -from builtins import object +import six +from django.shortcuts import get_object_or_404 from rest_framework import permissions -from rest_framework.views import APIView +from rest_framework.exceptions import NotFound, ParseError from rest_framework.renderers import JSONRenderer from rest_framework.response import Response -from rest_framework.exceptions import ParseError, NotFound - -from django.shortcuts import get_object_or_404 +from rest_framework.views import APIView -from readthedocs.core.views.hooks import build_branches -from readthedocs.core.signals import (webhook_github, webhook_bitbucket, - webhook_gitlab) +from readthedocs.core.signals import ( + webhook_bitbucket, + webhook_github, + webhook_gitlab, +) +from readthedocs.core.views.hooks import build_branches, sync_versions from readthedocs.integrations.models import HttpExchange, Integration from readthedocs.integrations.utils import normalize_request_payload from readthedocs.projects.models import Project -import six - log = logging.getLogger(__name__) +GITHUB_EVENT_HEADER = 'HTTP_X_GITHUB_EVENT' GITHUB_PUSH = 'push' +GITHUB_CREATE = 'create' +GITHUB_DELETE = 'delete' GITLAB_PUSH = 'push' +GITLAB_NULL_HASH = '0' * 40 +GITLAB_TAG_PUSH = 'tag_push' +BITBUCKET_EVENT_HEADER = 'HTTP_X_EVENT_KEY' BITBUCKET_PUSH = 'repo:push' @@ -124,6 +136,14 @@ def get_response_push(self, project, branches): 'project': project.slug, 'versions': list(to_build)} + def sync_versions(self, project): + version = sync_versions(project) + return { + 'build_triggered': False, + 'project': project.slug, + 'versions': [version], + } + class GitHubWebhookView(WebhookMixin, APIView): @@ -140,6 +160,12 @@ class GitHubWebhookView(WebhookMixin, APIView): "ref": "branch-name", ... } + + See full payload here: + + - https://developer.github.com/v3/activity/events/types/#pushevent + - https://developer.github.com/v3/activity/events/types/#createevent + - https://developer.github.com/v3/activity/events/types/#deleteevent """ integration_type = Integration.GITHUB_WEBHOOK @@ -154,9 +180,13 @@ def get_data(self): def handle_webhook(self): # Get event and trigger other webhook events - event = self.request.META.get('HTTP_X_GITHUB_EVENT', 'push') - webhook_github.send(Project, project=self.project, - data=self.data, event=event) + event = self.request.META.get(GITHUB_EVENT_HEADER, GITHUB_PUSH) + webhook_github.send( + Project, + project=self.project, + data=self.data, + event=event + ) # Handle push events and trigger builds if event == GITHUB_PUSH: try: @@ -164,6 +194,9 @@ def handle_webhook(self): return self.get_response_push(self.project, branches) except KeyError: raise ParseError('Parameter "ref" is required') + if event in (GITHUB_CREATE, GITHUB_DELETE): + return self.sync_versions(self.project) + return None def _normalize_ref(self, ref): pattern = re.compile(r'^refs/(heads|tags)/') @@ -180,26 +213,55 @@ class GitLabWebhookView(WebhookMixin, APIView): Expects the following JSON:: { + "before": "95790bf891e76fee5e1747ab589903a6a1f80f22", + "after": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "object_kind": "push", "ref": "branch-name", ... } + + See full payload here: + + - https://docs.gitlab.com/ce/user/project/integrations/webhooks.html#push-events + - https://docs.gitlab.com/ce/user/project/integrations/webhooks.html#tag-events """ integration_type = Integration.GITLAB_WEBHOOK def handle_webhook(self): - # Get event and trigger other webhook events + """ + Handle GitLab events for push and tag_push. + + GitLab doesn't have a separate event for creation/deletion, + instead, it sets the before/after field to + 0000000000000000000000000000000000000000 ('0' * 40) + """ event = self.request.data.get('object_kind', GITLAB_PUSH) - webhook_gitlab.send(Project, project=self.project, - data=self.request.data, event=event) + webhook_gitlab.send( + Project, + project=self.project, + data=self.request.data, + event=event + ) # Handle push events and trigger builds - if event == GITLAB_PUSH: + if event in (GITLAB_PUSH, GITLAB_TAG_PUSH): + data = self.request.data + before = data['before'] + after = data['after'] + # Tag/branch created/deleted + if GITLAB_NULL_HASH in (before, after): + return self.sync_versions(self.project) + # Normal push to master try: - branches = [self.request.data['ref'].replace('refs/heads/', '')] + branches = [self._normalize_ref(data['ref'])] return self.get_response_push(self.project, branches) except KeyError: raise ParseError('Parameter "ref" is required') + return None + + def _normalize_ref(self, ref): + pattern = re.compile(r'^refs/(heads|tags)/') + return pattern.sub('', ref) class BitbucketWebhookView(WebhookMixin, APIView): @@ -218,31 +280,60 @@ class BitbucketWebhookView(WebhookMixin, APIView): "name": "branch-name", ... }, + "old" { + "name": "branch-name", + ... + }, ... }], ... }, ... } + + See full payload here: + + - https://confluence.atlassian.com/bitbucket/event-payloads-740262817.html#EventPayloads-Push """ integration_type = Integration.BITBUCKET_WEBHOOK def handle_webhook(self): - # Get event and trigger other webhook events - event = self.request.META.get('HTTP_X_EVENT_KEY', BITBUCKET_PUSH) - webhook_bitbucket.send(Project, project=self.project, - data=self.request.data, event=event) - # Handle push events and trigger builds + """ + Handle BitBucket events for push. + + BitBucket doesn't have a separate event for creation/deletion, + instead it sets the new attribute (null if it is a deletion) + and the old attribute (null if it is a creation). + """ + event = self.request.META.get(BITBUCKET_EVENT_HEADER, BITBUCKET_PUSH) + webhook_bitbucket.send( + Project, + project=self.project, + data=self.request.data, + event=event + ) if event == BITBUCKET_PUSH: try: - changes = self.request.data['push']['changes'] - branches = [change['new']['name'] - for change in changes - if change.get('new')] - return self.get_response_push(self.project, branches) + data = self.request.data + changes = data['push']['changes'] + branches = [] + for change in changes: + old = change['old'] + new = change['new'] + # Normal push to master + if old is not None and new is not None: + branches.append(new['name']) + # BitBuck returns an array of changes rather than + # one webhook per change. If we have at least one normal push + # we don't trigger the sync versions, because that + # will be triggered with the normal push. + if branches: + return self.get_response_push(self.project, branches) + return self.sync_versions(self.project) except KeyError: raise ParseError('Invalid request') + return None class IsAuthenticatedOrHasToken(permissions.IsAuthenticated):
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -26,8 +26,21 @@ from readthedocs.builds.models import Build, BuildCommandResult, Version from readthedocs.integrations.models import Integration from readthedocs.oauth.models import RemoteOrganization, RemoteRepository -from readthedocs.projects.models import APIProject, Feature, Project, EnvironmentVariable -from readthedocs.restapi.views.integrations import GitHubWebhookView +from readthedocs.projects.models import ( + APIProject, + EnvironmentVariable, + Feature, + Project, +) +from readthedocs.restapi.views.integrations import ( + GITHUB_CREATE, + GITHUB_DELETE, + GITHUB_EVENT_HEADER, + GITLAB_NULL_HASH, + GITLAB_PUSH, + GITLAB_TAG_PUSH, + GitHubWebhookView, +) from readthedocs.restapi.views.task_views import get_status_data super_auth = base64.b64encode(b'super:test').decode('utf-8') @@ -825,6 +838,29 @@ def setUp(self): Version, slug='v1.0', verbose_name='v1.0', active=True, project=self.project ) + self.github_payload = { + 'ref': 'master', + } + self.gitlab_payload = { + 'object_kind': GITLAB_PUSH, + 'ref': 'master', + 'before': '95790bf891e76fee5e1747ab589903a6a1f80f22', + 'after': '95790bf891e76fee5e1747ab589903a6a1f80f23', + } + self.bitbucket_payload = { + 'push': { + 'changes': [{ + 'new': { + 'type': 'branch', + 'name': 'master', + }, + 'old': { + 'type': 'branch', + 'name': 'master', + }, + }], + }, + } def test_github_webhook_for_branches(self, trigger_build): """GitHub webhook API.""" @@ -882,6 +918,44 @@ def test_github_webhook_for_tags(self, trigger_build): trigger_build.assert_has_calls( [mock.call(force=True, version=self.version_tag, project=self.project)]) + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_github_create_event(self, sync_repository_task, trigger_build): + client = APIClient() + + headers = {GITHUB_EVENT_HEADER: GITHUB_CREATE} + resp = client.post( + '/api/v2/webhook/github/{}/'.format(self.project.slug), + self.github_payload, + format='json', + **headers + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_github_delete_event(self, sync_repository_task, trigger_build): + client = APIClient() + + headers = {GITHUB_EVENT_HEADER: GITHUB_DELETE} + resp = client.post( + '/api/v2/webhook/github/{}/'.format(self.project.slug), + self.github_payload, + format='json', + **headers + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + def test_github_parse_ref(self, trigger_build): wh = GitHubWebhookView() @@ -904,23 +978,153 @@ def test_github_invalid_webhook(self, trigger_build): self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data['detail'], 'Unhandled webhook event') - def test_gitlab_webhook(self, trigger_build): + def test_gitlab_webhook_for_branches(self, trigger_build): """GitLab webhook API.""" client = APIClient() client.post( - '/api/v2/webhook/gitlab/{0}/'.format(self.project.slug), - {'object_kind': 'push', 'ref': 'master'}, + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, format='json', ) - trigger_build.assert_has_calls( - [mock.call(force=True, version=mock.ANY, project=self.project)]) + trigger_build.assert_called_with( + force=True, version=mock.ANY, project=self.project + ) + + trigger_build.reset_mock() + self.gitlab_payload.update( + ref='non-existent', + ) client.post( - '/api/v2/webhook/gitlab/{0}/'.format(self.project.slug), - {'object_kind': 'push', 'ref': 'non-existent'}, + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, format='json', ) - trigger_build.assert_has_calls( - [mock.call(force=True, version=mock.ANY, project=self.project)]) + trigger_build.assert_not_called() + + def test_gitlab_webhook_for_tags(self, trigger_build): + client = APIClient() + self.gitlab_payload.update( + object_kind=GITLAB_TAG_PUSH, + ref='v1.0', + ) + client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + trigger_build.assert_called_with( + force=True, version=self.version_tag, project=self.project + ) + + trigger_build.reset_mock() + self.gitlab_payload.update( + ref='refs/tags/v1.0', + ) + client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + trigger_build.assert_called_with( + force=True, version=self.version_tag, project=self.project + ) + + trigger_build.reset_mock() + self.gitlab_payload.update( + ref='refs/heads/non-existent', + ) + client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + trigger_build.assert_not_called() + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_gitlab_push_hook_creation( + self, sync_repository_task, trigger_build): + client = APIClient() + self.gitlab_payload.update( + before=GITLAB_NULL_HASH, + after='95790bf891e76fee5e1747ab589903a6a1f80f22', + ) + resp = client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_gitlab_push_hook_deletion( + self, sync_repository_task, trigger_build): + client = APIClient() + self.gitlab_payload.update( + before='95790bf891e76fee5e1747ab589903a6a1f80f22', + after=GITLAB_NULL_HASH, + ) + resp = client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_gitlab_tag_push_hook_creation( + self, sync_repository_task, trigger_build): + client = APIClient() + self.gitlab_payload.update( + object_kind=GITLAB_TAG_PUSH, + before=GITLAB_NULL_HASH, + after='95790bf891e76fee5e1747ab589903a6a1f80f22', + ) + resp = client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_gitlab_tag_push_hook_deletion( + self, sync_repository_task, trigger_build): + client = APIClient() + self.gitlab_payload.update( + object_kind=GITLAB_TAG_PUSH, + before='95790bf891e76fee5e1747ab589903a6a1f80f22', + after=GITLAB_NULL_HASH, + ) + resp = client.post( + '/api/v2/webhook/gitlab/{}/'.format(self.project.slug), + self.gitlab_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) def test_gitlab_invalid_webhook(self, trigger_build): """GitLab webhook unhandled event.""" @@ -937,27 +1141,20 @@ def test_bitbucket_webhook(self, trigger_build): """Bitbucket webhook API.""" client = APIClient() client.post( - '/api/v2/webhook/bitbucket/{0}/'.format(self.project.slug), - { - 'push': { - 'changes': [{ - 'new': { - 'name': 'master', - }, - }], - }, - }, + '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug), + self.bitbucket_payload, format='json', ) trigger_build.assert_has_calls( [mock.call(force=True, version=mock.ANY, project=self.project)]) client.post( - '/api/v2/webhook/bitbucket/{0}/'.format(self.project.slug), + '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug), { 'push': { 'changes': [ { 'new': {'name': 'non-existent'}, + 'old': {'name': 'master'}, }, ], }, @@ -969,7 +1166,7 @@ def test_bitbucket_webhook(self, trigger_build): trigger_build_call_count = trigger_build.call_count client.post( - '/api/v2/webhook/bitbucket/{0}/'.format(self.project.slug), + '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug), { 'push': { 'changes': [ @@ -983,6 +1180,42 @@ def test_bitbucket_webhook(self, trigger_build): ) self.assertEqual(trigger_build_call_count, trigger_build.call_count) + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_bitbucket_push_hook_creation( + self, sync_repository_task, trigger_build): + client = APIClient() + self.bitbucket_payload['push']['changes'][0]['old'] = None + resp = client.post( + '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug), + self.bitbucket_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + + @mock.patch('readthedocs.core.views.hooks.sync_repository_task') + def test_bitbucket_push_hook_deletion( + self, sync_repository_task, trigger_build): + client = APIClient() + self.bitbucket_payload['push']['changes'][0]['new'] = None + resp = client.post( + '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug), + self.bitbucket_payload, + format='json', + ) + self.assertEqual(resp.status_code, status.HTTP_200_OK) + self.assertFalse(resp.data['build_triggered']) + self.assertEqual(resp.data['project'], self.project.slug) + self.assertEqual(resp.data['versions'], [LATEST]) + trigger_build.assert_not_called() + latest_version = self.project.versions.get(slug=LATEST) + sync_repository_task.delay.assert_called_with(latest_version.pk) + def test_bitbucket_invalid_webhook(self, trigger_build): """Bitbucket webhook unhandled event.""" client = APIClient()
Sync versions when creating a new branch/tag Currently rtd doesn't detect if a branch/tag was deleted, we only detect if a commit was pushed to a branch or a tag was re-tagged. Ref https://github.com/rtfd/readthedocs.org/issues/3192#issuecomment-408704631 ## Current status ### GitHub We only listen to push events for branch and tags in github https://developer.github.com/v3/activity/events/types/#pushevent. We can listen to Create and Delete events https://developer.github.com/v3/activity/events/types/#createevent https://developer.github.com/v3/activity/events/types/#deleteevent ### GitLab We only listen to push events which exclude tags https://docs.gitlab.com/ce/user/project/integrations/webhooks.html#push-events. We can listen to tag events with https://docs.gitlab.com/ce/user/project/integrations/webhooks.html#tag-events GitLab doesn't have a separate event for creation/deletion, instead, it sets the `before/after` field to `0000000000000000000000000000000000000000` ### BitBucket We currently listen to push events, it works for branches and tags https://confluence.atlassian.com/bitbucket/event-payloads-740262817.html#EventPayloads-Push. BitBucket doesn't have a separate event for creation/deletion, instead, it sets the `new` attribute (null if it is a deletion) and the `old` attribute (null if it is a creation). ## Solution First, we need to listen to (or modify) the above events to have the information about the creations and deletions. Our sync step is bound to a specific version (and it should exists in the database) https://github.com/rtfd/readthedocs.org/blob/b04b11298f5a45d6f6c6ec3f9bbbf368b942295b/readthedocs/projects/tasks.py#L223-L226 So, we can't call it directly, said that we have 2 options: 1) Trigger a new build to the default branch 2) Call to `SyncRepositoryTask` with the default branch https://github.com/rtfd/readthedocs.org/blob/b04b11298f5a45d6f6c6ec3f9bbbf368b942295b/readthedocs/projects/tasks.py#L223-L226 It doesn't trigger a new build, but clone the repo to the default branch and update the version list. ## Conclusion The changes for getting the information about new/removed branches and tags is relatively easy, but we need to take a decision about how to sync the versions, with 1) the user can see the build process and that _something is happening_, but we will be wasting resources building the documentation, with 2) we save resources but the user doesn't see anything (it can be checked in the webhook response anyway)
> First, we need to listen to (or modify) the above events to have the information about the creations and deletions On Github, for example, we are only listening for `push` and `pull_request` events. Here is the code where this lives and should be updated to add more events: https://github.com/rtfd/readthedocs.org/blob/b04b11298f5a45d6f6c6ec3f9bbbf368b942295b/readthedocs/oauth/services/github.py#L175 (the `get_webhook_data` on the other services classes have the same info for GitLab and Bitbucket) > 2) we save resources but the user doesn't see anything (it can be checked in the webhook response anyway) I prefer this way. Why? Because I think the user shouldn't care about this at all. This process of syncying the VCS repo with our Versions should be transparent and automatic. I'd say that we can trigger the `SyncRepositoryTask` with the `default_branch` of the project, as you mentioned. I just realize that we have an inconsistency in our code. Even though that we are subscribed to `push` and `pull_request` in GitHub. Then, when we receive an event we are just considering `push` only: https://github.com/rtfd/readthedocs.org/blob/b04b11298f5a45d6f6c6ec3f9bbbf368b942295b/readthedocs/restapi/views/integrations.py#L161 @humitos maybe for a future implementation #1340 , we don't need that at the moment I meant that we are subscribing webhooks for two different events and we are processing only one. > Call to SyncRepositoryTask with the default branch Feels like the right implementation. That was the general idea behind that bit of code to behind with, so we're mostly just expanding when it gets called to include `delete` events, I think?
2018-11-07T00:54:27
readthedocs/readthedocs.org
4,879
readthedocs__readthedocs.org-4879
[ "4791" ]
ce0948fcbc902c4d870d278399bb0a2c83bd917d
diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -390,6 +390,9 @@ def USE_PROMOS(self): # noqa 'filename': os.path.join(LOGS_ROOT, 'debug.log'), 'formatter': 'default', }, + 'null': { + 'class': 'logging.NullHandler', + }, }, 'loggers': { '': { # root logger @@ -403,5 +406,9 @@ def USE_PROMOS(self): # noqa # Don't double log at the root logger for these. 'propagate': False, }, + 'django.security.DisallowedHost': { + 'handlers': ['null'], + 'propagate': False, + }, }, }
Disable django.security.DisallowedHost from logging We have properly configured our `ALLOWED_HOST` setting and it's blocking some of these attacks properly. Although, this is spamming Sentry with many error logs. Since this is not an error and I think we don't really need to take any kind of action, we should disable this from the logging as mentioned here: https://docs.djangoproject.com/en/1.11/topics/logging/#django-security Sentry query: https://sentry.io/read-the-docs/readthedocs-org/?query=logger%3A%22django.security.DisallowedHost%22
@humitos I would like to work on this issue.
2018-11-08T09:30:45
readthedocs/readthedocs.org
4,883
readthedocs__readthedocs.org-4883
[ "4706" ]
cb0dedf0da844240e4ed15b777074c0a72f63129
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -6,7 +6,6 @@ from __future__ import division, print_function, unicode_literals import os -import re from contextlib import contextmanager import six @@ -22,7 +21,6 @@ validate_bool, validate_choice, validate_dict, - validate_directory, validate_file, validate_list, validate_string, @@ -43,12 +41,8 @@ CONFIG_NOT_SUPPORTED = 'config-not-supported' VERSION_INVALID = 'version-invalid' -BASE_INVALID = 'base-invalid' -BASE_NOT_A_DIR = 'base-not-a-directory' CONFIG_SYNTAX_INVALID = 'config-syntax-invalid' CONFIG_REQUIRED = 'config-required' -NAME_REQUIRED = 'name-required' -NAME_INVALID = 'name-invalid' CONF_FILE_REQUIRED = 'conf-file-required' PYTHON_INVALID = 'python-invalid' SUBMODULES_INVALID = 'submodules-invalid' @@ -263,12 +257,6 @@ class BuildConfigV1(BuildConfigBase): """Version 1 of the configuration file.""" - BASE_INVALID_MESSAGE = 'Invalid value for base: {base}' - BASE_NOT_A_DIR_MESSAGE = '"base" is not a directory: {base}' - NAME_REQUIRED_MESSAGE = 'Missing key "name"' - NAME_INVALID_MESSAGE = ( - 'Invalid name "{name}". Valid values must match {name_re}' - ) CONF_FILE_REQUIRED_MESSAGE = 'Missing key "conf_file"' PYTHON_INVALID_MESSAGE = '"python" section must be a mapping.' PYTHON_EXTRA_REQUIREMENTS_INVALID_MESSAGE = ( @@ -306,63 +294,17 @@ def validate(self): ``readthedocs.yml`` config file if not set """ # Validate env_config. - # TODO: this isn't used - self._config['output_base'] = self.validate_output_base() - # Validate the build environment first # Must happen before `validate_python`! self._config['build'] = self.validate_build() # Validate raw_config. Order matters. - # TODO: this isn't used - self._config['name'] = self.validate_name() - # TODO: this isn't used - self._config['base'] = self.validate_base() self._config['python'] = self.validate_python() self._config['formats'] = self.validate_formats() self._config['conda'] = self.validate_conda() self._config['requirements_file'] = self.validate_requirements_file() - def validate_output_base(self): - """Validates that ``output_base`` exists and set its absolute path.""" - assert 'output_base' in self.env_config, ( - '"output_base" required in "env_config"') - output_base = os.path.abspath( - os.path.join( - self.env_config.get('output_base', self.base_path), - ) - ) - return output_base - - def validate_name(self): - """Validates that name exists.""" - name = self.raw_config.get('name', None) - if not name: - name = self.env_config.get('name', None) - if not name: - self.error('name', self.NAME_REQUIRED_MESSAGE, code=NAME_REQUIRED) - name_re = r'^[-_.0-9a-zA-Z]+$' - if not re.match(name_re, name): - self.error( - 'name', - self.NAME_INVALID_MESSAGE.format( - name=name, - name_re=name_re), - code=NAME_INVALID) - - return name - - def validate_base(self): - """Validates that path is a valid directory.""" - if 'base' in self.raw_config: - base = self.raw_config['base'] - else: - base = self.base_path - with self.catch_validation_error('base'): - base = validate_directory(base, self.base_path) - return base - def validate_build(self): """ Validate the build config settings. @@ -536,21 +478,6 @@ def validate_formats(self): return formats - @property - def name(self): - """The project name.""" - return self._config['name'] - - @property - def base(self): - """The base directory.""" - return self._config['base'] - - @property - def output_base(self): - """The output base.""" - return self._config['output_base'] - @property def formats(self): """The documentation formats to be built.""" diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py --- a/readthedocs/doc_builder/config.py +++ b/readthedocs/doc_builder/config.py @@ -43,8 +43,6 @@ def load_yaml_config(version): 'build': { 'image': img_name, }, - 'output_base': '', - 'name': version.slug, 'defaults': { 'install_project': project.install_project, 'formats': get_default_formats(project),
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -24,8 +24,6 @@ CONFIG_NOT_SUPPORTED, CONFIG_REQUIRED, INVALID_KEY, - NAME_INVALID, - NAME_REQUIRED, PYTHON_INVALID, VERSION_INVALID, ) @@ -34,49 +32,19 @@ INVALID_BOOL, INVALID_CHOICE, INVALID_LIST, - INVALID_PATH, - INVALID_STRING, VALUE_NOT_FOUND, ValidationError, ) from .utils import apply_fs -env_config = { - 'output_base': '/tmp', -} - -minimal_config = { - 'name': 'docs', -} - -config_with_explicit_empty_list = { - 'readthedocs.yml': ''' -name: docs -formats: [] -''', -} - -minimal_config_dir = { - 'readthedocs.yml': '''\ -name: docs -''', -} - -multiple_config_dir = { - 'readthedocs.yml': ''' -name: first ---- -name: second - ''', - 'nested': minimal_config_dir, -} - -yaml_extension_config_dir = { - 'readthedocs.yaml': '''\ -name: docs -type: sphinx -''' +yaml_config_dir = { + 'readthedocs.yml': textwrap.dedent( + ''' + formats: + - pdf + ''' + ), } @@ -88,18 +56,6 @@ def get_build_config(config, env_config=None, source_file='readthedocs.yml'): ) -def get_env_config(extra=None): - """Get the minimal env_config for the configuration object.""" - defaults = { - 'output_base': '', - 'name': 'name', - } - if extra is None: - extra = {} - defaults.update(extra) - return defaults - - @pytest.mark.parametrize('files', [ {'readthedocs.ymlmore': ''}, {'first': {'readthedocs.yml': ''}}, {'startreadthedocs.yml': ''}, {'second': {'confuser.txt': 'content'}}, @@ -111,7 +67,7 @@ def test_load_no_config_file(tmpdir, files): apply_fs(tmpdir, files) base = str(tmpdir) with raises(ConfigError) as e: - load(base, env_config) + load(base, {}) assert e.value.code == CONFIG_REQUIRED @@ -121,13 +77,13 @@ def test_load_empty_config_file(tmpdir): }) base = str(tmpdir) with raises(ConfigError): - load(base, env_config) + load(base, {}) def test_minimal_config(tmpdir): - apply_fs(tmpdir, minimal_config_dir) + apply_fs(tmpdir, yaml_config_dir) base = str(tmpdir) - build = load(base, env_config) + build = load(base, {}) assert isinstance(build, BuildConfigV1) @@ -138,7 +94,7 @@ def test_load_version1(tmpdir): ''') }) base = str(tmpdir) - build = load(base, get_env_config({'allow_v2': True})) + build = load(base, {'allow_v2': True}) assert isinstance(build, BuildConfigV1) @@ -149,7 +105,7 @@ def test_load_version2(tmpdir): ''') }) base = str(tmpdir) - build = load(base, get_env_config({'allow_v2': True})) + build = load(base, {'allow_v2': True}) assert isinstance(build, BuildConfigV2) @@ -161,83 +117,70 @@ def test_load_unknow_version(tmpdir): }) base = str(tmpdir) with raises(ConfigError) as excinfo: - load(base, get_env_config({'allow_v2': True})) + load(base, {'allow_v2': True}) assert excinfo.value.code == VERSION_INVALID def test_yaml_extension(tmpdir): """Make sure it's capable of loading the 'readthedocs' file with a 'yaml' extension.""" - apply_fs(tmpdir, yaml_extension_config_dir) + apply_fs(tmpdir, { + 'readthedocs.yaml': textwrap.dedent( + ''' + python: + version: 3 + ''' + ), + }) base = str(tmpdir) - config = load(base, env_config) + config = load(base, {}) assert isinstance(config, BuildConfigV1) def test_build_config_has_source_file(tmpdir): - base = str(apply_fs(tmpdir, minimal_config_dir)) - build = load(base, env_config) + base = str(apply_fs(tmpdir, yaml_config_dir)) + build = load(base, {}) assert build.source_file == os.path.join(base, 'readthedocs.yml') def test_build_config_has_list_with_single_empty_value(tmpdir): - base = str(apply_fs(tmpdir, config_with_explicit_empty_list)) - build = load(base, env_config) + base = str(apply_fs(tmpdir, { + 'readthedocs.yml': textwrap.dedent( + ''' + formats: [] + ''' + ) + })) + build = load(base, {}) assert isinstance(build, BuildConfigV1) assert build.formats == [] -def test_config_requires_name(): - build = BuildConfigV1( - {'output_base': ''}, - {}, - source_file='readthedocs.yml', - ) - with raises(InvalidConfig) as excinfo: - build.validate() - assert excinfo.value.key == 'name' - assert excinfo.value.code == NAME_REQUIRED - - -def test_build_requires_valid_name(): - build = BuildConfigV1( - {'output_base': ''}, - {'name': 'with/slashes'}, - source_file='readthedocs.yml', - ) - with raises(InvalidConfig) as excinfo: - build.validate() - assert excinfo.value.key == 'name' - assert excinfo.value.code == NAME_INVALID - - def test_version(): - build = get_build_config({}, get_env_config()) + build = get_build_config({}) assert build.version == '1' def test_doc_type(): build = get_build_config( {}, - get_env_config( - { - 'defaults': { - 'doctype': 'sphinx', - }, - } - ) + { + 'defaults': { + 'doctype': 'sphinx', + }, + } ) build.validate() assert build.doctype == 'sphinx' def test_empty_python_section_is_valid(): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() assert build.python def test_python_section_must_be_dict(): - build = get_build_config({'python': 123}, get_env_config()) + build = get_build_config({'python': 123}) with raises(InvalidConfig) as excinfo: build.validate() assert excinfo.value.key == 'python' @@ -245,7 +188,7 @@ def test_python_section_must_be_dict(): def test_use_system_site_packages_defaults_to_false(): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() # Default is False. assert not build.python.use_system_site_packages @@ -256,13 +199,13 @@ def test_use_system_site_packages_repects_default_value(value): defaults = { 'use_system_packages': value, } - build = get_build_config({}, get_env_config({'defaults': defaults})) + build = get_build_config({}, {'defaults': defaults}) build.validate() assert build.python.use_system_site_packages is value def test_python_pip_install_default(): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() # Default is False. assert build.python.install_with_pip is False @@ -271,7 +214,7 @@ def test_python_pip_install_default(): class TestValidatePythonExtraRequirements(object): def test_it_defaults_to_list(self): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() # Default is an empty list. assert build.python.extra_requirements == [] @@ -279,7 +222,6 @@ def test_it_defaults_to_list(self): def test_it_validates_is_a_list(self): build = get_build_config( {'python': {'extra_requirements': 'invalid'}}, - get_env_config(), ) with raises(InvalidConfig) as excinfo: build.validate() @@ -296,7 +238,6 @@ def test_it_uses_validate_string(self, validate_string): 'extra_requirements': ['tests'], }, }, - get_env_config(), ) build.validate() validate_string.assert_any_call('tests') @@ -305,14 +246,13 @@ def test_it_uses_validate_string(self, validate_string): class TestValidateUseSystemSitePackages(object): def test_it_defaults_to_false(self): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() assert build.python.use_system_site_packages is False def test_it_validates_value(self): build = get_build_config( {'python': {'use_system_site_packages': 'invalid'}}, - get_env_config(), ) with raises(InvalidConfig) as excinfo: build.validate() @@ -324,7 +264,6 @@ def test_it_uses_validate_bool(self, validate_bool): validate_bool.return_value = True build = get_build_config( {'python': {'use_system_site_packages': 'to-validate'}}, - get_env_config(), ) build.validate() validate_bool.assert_any_call('to-validate') @@ -333,14 +272,13 @@ def test_it_uses_validate_bool(self, validate_bool): class TestValidateSetupPyInstall(object): def test_it_defaults_to_false(self): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() assert build.python.install_with_setup is False def test_it_validates_value(self): build = get_build_config( {'python': {'setup_py_install': 'this-is-string'}}, - get_env_config(), ) with raises(InvalidConfig) as excinfo: build.validate() @@ -352,7 +290,6 @@ def test_it_uses_validate_bool(self, validate_bool): validate_bool.return_value = True build = get_build_config( {'python': {'setup_py_install': 'to-validate'}}, - get_env_config(), ) build.validate() validate_bool.assert_any_call('to-validate') @@ -361,7 +298,7 @@ def test_it_uses_validate_bool(self, validate_bool): class TestValidatePythonVersion(object): def test_it_defaults_to_a_valid_version(self): - build = get_build_config({'python': {}}, get_env_config()) + build = get_build_config({'python': {}}) build.validate() assert build.python.version == 2 assert build.python_interpreter == 'python2.7' @@ -370,7 +307,6 @@ def test_it_defaults_to_a_valid_version(self): def test_it_supports_other_versions(self): build = get_build_config( {'python': {'version': 3.5}}, - get_env_config(), ) build.validate() assert build.python.version == 3.5 @@ -380,7 +316,6 @@ def test_it_supports_other_versions(self): def test_it_validates_versions_out_of_range(self): build = get_build_config( {'python': {'version': 1.0}}, - get_env_config(), ) with raises(InvalidConfig) as excinfo: build.validate() @@ -390,7 +325,6 @@ def test_it_validates_versions_out_of_range(self): def test_it_validates_wrong_type(self): build = get_build_config( {'python': {'version': 'this-is-string'}}, - get_env_config(), ) with raises(InvalidConfig) as excinfo: build.validate() @@ -400,7 +334,6 @@ def test_it_validates_wrong_type(self): def test_it_validates_wrong_type_right_value(self): build = get_build_config( {'python': {'version': '3.5'}}, - get_env_config(), ) build.validate() assert build.python.version == 3.5 @@ -409,7 +342,6 @@ def test_it_validates_wrong_type_right_value(self): build = get_build_config( {'python': {'version': '3'}}, - get_env_config(), ) build.validate() assert build.python.version == 3 @@ -419,12 +351,10 @@ def test_it_validates_wrong_type_right_value(self): def test_it_validates_env_supported_versions(self): build = get_build_config( {'python': {'version': 3.6}}, - env_config=get_env_config( - { - 'python': {'supported_versions': [3.5]}, - 'build': {'image': 'custom'}, - } - ) + env_config={ + 'python': {'supported_versions': [3.5]}, + 'build': {'image': 'custom'}, + }, ) with raises(InvalidConfig) as excinfo: build.validate() @@ -433,12 +363,10 @@ def test_it_validates_env_supported_versions(self): build = get_build_config( {'python': {'version': 3.6}}, - env_config=get_env_config( - { - 'python': {'supported_versions': [3.5, 3.6]}, - 'build': {'image': 'custom'}, - } - ) + env_config={ + 'python': {'supported_versions': [3.5, 3.6]}, + 'build': {'image': 'custom'}, + }, ) build.validate() assert build.python.version == 3.6 @@ -452,7 +380,7 @@ def test_it_respects_default_value(self, value): } build = get_build_config( {}, - get_env_config({'defaults': defaults}), + {'defaults': defaults}, ) build.validate() assert build.python.version == value @@ -461,34 +389,33 @@ def test_it_respects_default_value(self, value): class TestValidateFormats(object): def test_it_defaults_to_empty(self): - build = get_build_config({}, get_env_config()) + build = get_build_config({}) build.validate() assert build.formats == [] def test_it_gets_set_correctly(self): - build = get_build_config({'formats': ['pdf']}, get_env_config()) + build = get_build_config({'formats': ['pdf']}) build.validate() assert build.formats == ['pdf'] def test_formats_can_be_null(self): - build = get_build_config({'formats': None}, get_env_config()) + build = get_build_config({'formats': None}) build.validate() assert build.formats == [] def test_formats_with_previous_none(self): - build = get_build_config({'formats': ['none']}, get_env_config()) + build = get_build_config({'formats': ['none']}) build.validate() assert build.formats == [] def test_formats_can_be_empty(self): - build = get_build_config({'formats': []}, get_env_config()) + build = get_build_config({'formats': []}) build.validate() assert build.formats == [] def test_all_valid_formats(self): build = get_build_config( {'formats': ['pdf', 'htmlzip', 'epub']}, - get_env_config() ) build.validate() assert build.formats == ['pdf', 'htmlzip', 'epub'] @@ -496,7 +423,6 @@ def test_all_valid_formats(self): def test_cant_have_none_as_format(self): build = get_build_config( {'formats': ['htmlzip', None]}, - get_env_config() ) with raises(InvalidConfig) as excinfo: build.validate() @@ -506,7 +432,6 @@ def test_cant_have_none_as_format(self): def test_formats_have_only_allowed_values(self): build = get_build_config( {'formats': ['htmlzip', 'csv']}, - get_env_config() ) with raises(InvalidConfig) as excinfo: build.validate() @@ -514,7 +439,7 @@ def test_formats_have_only_allowed_values(self): assert excinfo.value.code == INVALID_CHOICE def test_only_list_type(self): - build = get_build_config({'formats': 'no-list'}, get_env_config()) + build = get_build_config({'formats': 'no-list'}) with raises(InvalidConfig) as excinfo: build.validate() assert excinfo.value.key == 'format' @@ -523,75 +448,23 @@ def test_only_list_type(self): def test_valid_build_config(): build = BuildConfigV1( - env_config, - minimal_config, + {}, + {}, source_file='readthedocs.yml', ) build.validate() - assert build.name == 'docs' - assert build.base assert build.python assert build.python.install_with_setup is False assert build.python.install_with_pip is False assert build.python.use_system_site_packages is False - assert build.output_base - - -class TestValidateBase(object): - - def test_it_validates_to_abspath(self, tmpdir): - apply_fs(tmpdir, {'configs': minimal_config, 'docs': {}}) - with tmpdir.as_cwd(): - source_file = str(tmpdir.join('configs', 'readthedocs.yml')) - build = BuildConfigV1( - get_env_config(), - {'base': '../docs'}, - source_file=source_file, - ) - build.validate() - assert build.base == str(tmpdir.join('docs')) - - @patch('readthedocs.config.config.validate_directory') - def test_it_uses_validate_directory(self, validate_directory): - validate_directory.return_value = 'path' - build = get_build_config({'base': '../my-path'}, get_env_config()) - build.validate() - # Test for first argument to validate_directory - args, kwargs = validate_directory.call_args - assert args[0] == '../my-path' - - def test_it_fails_if_base_is_not_a_string(self, tmpdir): - apply_fs(tmpdir, minimal_config) - with tmpdir.as_cwd(): - build = BuildConfigV1( - get_env_config(), - {'base': 1}, - source_file=str(tmpdir.join('readthedocs.yml')), - ) - with raises(InvalidConfig) as excinfo: - build.validate() - assert excinfo.value.key == 'base' - assert excinfo.value.code == INVALID_STRING - - def test_it_fails_if_base_does_not_exist(self, tmpdir): - apply_fs(tmpdir, minimal_config) - build = BuildConfigV1( - get_env_config(), - {'base': 'docs'}, - source_file=str(tmpdir.join('readthedocs.yml')), - ) - with raises(InvalidConfig) as excinfo: - build.validate() - assert excinfo.value.key == 'base' - assert excinfo.value.code == INVALID_PATH class TestValidateBuild(object): def test_it_fails_if_build_is_invalid_option(self, tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) build = BuildConfigV1( - get_env_config(), + {}, {'build': {'image': 3.0}}, source_file=str(tmpdir.join('readthedocs.yml')), ) @@ -601,7 +474,7 @@ def test_it_fails_if_build_is_invalid_option(self, tmpdir): assert excinfo.value.code == INVALID_CHOICE def test_it_fails_on_python_validation(self, tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) build = BuildConfigV1( {}, { @@ -617,7 +490,7 @@ def test_it_fails_on_python_validation(self, tmpdir): assert excinfo.value.code == INVALID_CHOICE def test_it_works_on_python_validation(self, tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) build = BuildConfigV1( {}, { @@ -630,9 +503,9 @@ def test_it_works_on_python_validation(self, tmpdir): build.validate_python() def test_it_works(self, tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) build = BuildConfigV1( - get_env_config(), + {}, {'build': {'image': 'latest'}}, source_file=str(tmpdir.join('readthedocs.yml')), ) @@ -640,9 +513,9 @@ def test_it_works(self, tmpdir): assert build.build.image == 'readthedocs/build:latest' def test_default(self, tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) build = BuildConfigV1( - get_env_config(), + {}, {}, source_file=str(tmpdir.join('readthedocs.yml')), ) @@ -652,12 +525,12 @@ def test_default(self, tmpdir): @pytest.mark.parametrize( 'image', ['latest', 'readthedocs/build:3.0', 'rtd/build:latest']) def test_it_priorities_image_from_env_config(self, tmpdir, image): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, yaml_config_dir) defaults = { 'build_image': image, } build = BuildConfigV1( - get_env_config({'defaults': defaults}), + {'defaults': defaults}, {'build': {'image': 'latest'}}, source_file=str(tmpdir.join('readthedocs.yml')), ) @@ -666,7 +539,7 @@ def test_it_priorities_image_from_env_config(self, tmpdir, image): def test_use_conda_default_false(): - build = get_build_config({}, get_env_config()) + build = get_build_config({}) build.validate() assert build.conda is None @@ -674,7 +547,6 @@ def test_use_conda_default_false(): def test_use_conda_respects_config(): build = get_build_config( {'conda': {}}, - get_env_config(), ) build.validate() assert isinstance(build.conda, Conda) @@ -684,7 +556,6 @@ def test_validates_conda_file(tmpdir): apply_fs(tmpdir, {'environment.yml': ''}) build = get_build_config( {'conda': {'file': 'environment.yml'}}, - get_env_config(), source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() @@ -693,7 +564,7 @@ def test_validates_conda_file(tmpdir): def test_requirements_file_empty(): - build = get_build_config({}, get_env_config()) + build = get_build_config({}) build.validate() assert build.python.requirements is None @@ -705,7 +576,7 @@ def test_requirements_file_repects_default_value(tmpdir): } build = get_build_config( {}, - get_env_config({'defaults': defaults}), + {'defaults': defaults}, source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() @@ -716,7 +587,6 @@ def test_requirements_file_respects_configuration(tmpdir): apply_fs(tmpdir, {'requirements.txt': ''}) build = get_build_config( {'requirements_file': 'requirements.txt'}, - get_env_config(), source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() @@ -726,7 +596,6 @@ def test_requirements_file_respects_configuration(tmpdir): def test_requirements_file_is_null(tmpdir): build = get_build_config( {'requirements_file': None}, - get_env_config(), source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() @@ -736,7 +605,6 @@ def test_requirements_file_is_null(tmpdir): def test_requirements_file_is_blank(tmpdir): build = get_build_config( {'requirements_file': ''}, - get_env_config(), source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() @@ -744,7 +612,7 @@ def test_requirements_file_is_blank(tmpdir): def test_build_validate_calls_all_subvalidators(tmpdir): - apply_fs(tmpdir, minimal_config) + apply_fs(tmpdir, {}) build = BuildConfigV1( {}, {}, @@ -752,28 +620,22 @@ def test_build_validate_calls_all_subvalidators(tmpdir): ) with patch.multiple( BuildConfigV1, - validate_base=DEFAULT, - validate_name=DEFAULT, validate_python=DEFAULT, - validate_output_base=DEFAULT, ): build.validate() - BuildConfigV1.validate_base.assert_called_with() - BuildConfigV1.validate_name.assert_called_with() BuildConfigV1.validate_python.assert_called_with() - BuildConfigV1.validate_output_base.assert_called_with() def test_load_calls_validate(tmpdir): - apply_fs(tmpdir, minimal_config_dir) + apply_fs(tmpdir, yaml_config_dir) base = str(tmpdir) with patch.object(BuildConfigV1, 'validate') as build_validate: - load(base, env_config) + load(base, {}) assert build_validate.call_count == 1 def test_raise_config_not_supported(): - build = get_build_config({}, get_env_config()) + build = get_build_config({}) build.validate() with raises(ConfigOptionNotSupportedError) as excinfo: build.redirects @@ -799,12 +661,12 @@ def test_as_dict(tmpdir): }, 'requirements_file': 'requirements.txt', }, - get_env_config({ + { 'defaults': { 'doctype': 'sphinx', 'sphinx_configuration': None, }, - }), + }, source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py --- a/readthedocs/rtd_tests/tests/test_config_integration.py +++ b/readthedocs/rtd_tests/tests/test_config_integration.py @@ -88,8 +88,6 @@ def test_python_supported_versions_default_image_1_0(self, load_config): env_config={ 'allow_v2': mock.ANY, 'build': {'image': 'readthedocs/build:1.0'}, - 'output_base': '', - 'name': mock.ANY, 'defaults': { 'install_project': self.project.install_project, 'formats': [
Remove unused validations from v1 config `validate_base` and `validate_name` don't have any purpose here, and we rather re-purpose those for v2. related code: https://github.com/rtfd/readthedocs.org/blob/a98441717ea50b19fe6bb0f1ea52df092dbc8190/readthedocs/config/config.py#L282-L309
2018-11-09T03:57:02
readthedocs/readthedocs.org
4,891
readthedocs__readthedocs.org-4891
[ "4789" ]
af867c1d85a157b7106734fe45a0fabde9385083
diff --git a/readthedocs/core/symlink.py b/readthedocs/core/symlink.py --- a/readthedocs/core/symlink.py +++ b/readthedocs/core/symlink.py @@ -152,10 +152,10 @@ def symlink_cnames(self, domain=None): if domain: domains = [domain] else: - domains = Domain.objects.filter(project=self.project) + domains = Domain.objects.filter(project=self.project).values_list('domain', flat=True) for dom in domains: log_msg = 'Symlinking CNAME: {} -> {}'.format( - dom.domain, + dom, self.project.slug, ) log.info( @@ -167,13 +167,13 @@ def symlink_cnames(self, domain=None): ) # CNAME to doc root - symlink = os.path.join(self.CNAME_ROOT, dom.domain) + symlink = os.path.join(self.CNAME_ROOT, dom) self.environment.run('ln', '-nsf', self.project_root, symlink) # Project symlink project_cname_symlink = os.path.join( self.PROJECT_CNAME_ROOT, - dom.domain, + dom, ) self.environment.run( 'ln', @@ -183,16 +183,21 @@ def symlink_cnames(self, domain=None): ) def remove_symlink_cname(self, domain): - """Remove CNAME symlink.""" - log_msg = 'Removing symlink for CNAME {}'.format(domain.domain) + """ + Remove CNAME symlink. + + :param domain: domain for which symlink is to be removed + :type domain: str + """ + log_msg = 'Removing symlink for CNAME {}'.format(domain) log.info( constants.LOG_TEMPLATE.format( project=self.project.slug, version='', - msg=log_msg, + msg=log_msg ), ) - symlink = os.path.join(self.CNAME_ROOT, domain.domain) + symlink = os.path.join(self.CNAME_ROOT, domain) safe_unlink(symlink) def symlink_subprojects(self): diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1199,7 +1199,7 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ broadcast( type='app', task=tasks.symlink_domain, - args=[self.project.pk, self.pk], + args=[self.project.pk, self.domain], ) def delete(self, *args, **kwargs): # pylint: disable=arguments-differ @@ -1207,7 +1207,7 @@ def delete(self, *args, **kwargs): # pylint: disable=arguments-differ broadcast( type='app', task=tasks.symlink_domain, - args=[self.project.pk, self.pk, True], + args=[self.project.pk, self.domain, True], ) super().delete(*args, **kwargs) diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -1074,9 +1074,16 @@ def symlink_project(project_pk): @app.task(queue='web', throws=(BuildEnvironmentWarning,)) -def symlink_domain(project_pk, domain_pk, delete=False): +def symlink_domain(project_pk, domain, delete=False): + """ + Symlink domain. + + :param project_pk: project's pk + :type project_pk: int + :param domain: domain for the symlink + :type domain: str + """ project = Project.objects.get(pk=project_pk) - domain = Domain.objects.get(pk=domain_pk) for symlink in [PublicSymlink, PrivateSymlink]: sym = symlink(project=project) if delete:
Domain deletion task is failing This method, https://github.com/rtfd/readthedocs.org/blob/df85fefc5f59c7a00bcc1ffe23965efe75e8c895/readthedocs/projects/models.py#L1030-L1034 deletes the Domain from the database and trigger a task called: `symlink_domain`. This task tries to get the Domain object using the `pk` from the database and it fails because the object was already removed. I do see two different approaches here: 1. do not trigger this task and rely on the task to remove orphan symlinks implemented in https://github.com/rtfd/readthedocs.org/pull/3543 1. refactor the task and the helper methods (`Symlink.symlink_cnames` and `Symlink.remove_symlink_cname`) to depend only in the `Domain.domain` string instead of the whole `Domain` object Currently, we are depending on 1) but many errors are being generated in our sentry when the Domain is deleted: https://sentry.io/read-the-docs/readthedocs-org/issues/235601094/ > I saw this code before, but I didn't noticed that the Domain object was deleted and then tried to get it from the DB inside the task: https://github.com/rtfd/readthedocs.org/issues/3493#issuecomment-359579064
@humitos I would like to work on this issue. From my understanding, `Domain.domain` should be passed instead of `Domain.pk` in the `delete` method like this ```python3 def delete(self, *args, **kwargs): # pylint: disable=arguments-differ from readthedocs.projects import tasks broadcast(type='app', task=tasks.symlink_domain, args=[self.project.pk, self.domain, True],) super(Domain, self).delete(*args, **kwargs) ``` and then correcting the task and other helper methods.
2018-11-11T17:35:56
readthedocs/readthedocs.org
4,892
readthedocs__readthedocs.org-4892
[ "4861" ]
164800694a25d769234c6e7019c483f347fe9226
diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -365,7 +365,7 @@ def USE_PROMOS(self): # noqa 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', # NOQA 'PAGE_SIZE': 10, } - SILENCED_SYSTEM_CHECKS = ['fields.W342'] + SILENCED_SYSTEM_CHECKS = ['fields.W342', 'guardian.W001'] # Logging LOG_FORMAT = '%(name)s:%(lineno)s[%(process)d]: %(levelname)s %(message)s'
Warnings on build ## Details * Read the Docs project URL: [https://readthedocs.org/](https://readthedocs.org/) * Build URL (if applicable): [http://127.0.0.1:8000/](http://127.0.0.1:8000/) * Read the Docs username (if applicable): NA * When I am building the project, a warning is being shown. ## Expected Result Guardian authentication backend must be hooked. ## Actual Result When I am building the database/loading test data/creating superuser/running server; I'm being shown the following warning: ``` System check identified some issues: WARNINGS: ?: (guardian.W001) Guardian authentication backend is not hooked. You can add this in settings as eg: `AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend')`. ``` ## Notes * Operating System: macOS Mojave Do you want me to work on this issue?
@rajdeepbharati I don't think RTD is using `django-guardian` as one of AUTHENTICATION_BACKENDS. It is being used for its other functions/classes - `assign`, `get_object_for_user`, `GuardedModelAdmin` (class). I grepped all the files, searching for `guardian` and I found only these uses of it. Also, AUTHENTICATION_BACKENDS was introduced during the configuration of `django-allauth` ( [#commit](https://github.com/rtfd/readthedocs.org/commit/9fd05097527735c25b039d543ec3fd71e59517da)), much after the configuration of `django-guardian` (related commits: [#commit1](https://github.com/rtfd/readthedocs.org/commit/29c982994b0ce9421d5f0a396f2c4f44f1b1bba6), [#commit2](https://github.com/rtfd/readthedocs.org/commit/449efcde8a0fa4861d6083d329fa1ca8111812e8)) I think I saw the same warning on production, so this shouldn't be causing any problems.
2018-11-11T18:30:50
readthedocs/readthedocs.org
4,900
readthedocs__readthedocs.org-4900
[ "4803" ]
3304193202a388720b6093915f65a19f17f6deeb
diff --git a/readthedocs/builds/managers.py b/readthedocs/builds/managers.py --- a/readthedocs/builds/managers.py +++ b/readthedocs/builds/managers.py @@ -2,7 +2,10 @@ """Build and Version class model Managers.""" +import logging + from django.db import models +from django.core.exceptions import ObjectDoesNotExist from readthedocs.core.utils.extend import ( SettingsOverrideObject, @@ -19,6 +22,8 @@ ) from .querysets import VersionQuerySet +log = logging.getLogger(__name__) + __all__ = ['VersionManager'] @@ -67,6 +72,18 @@ def create_latest(self, **kwargs): defaults.update(kwargs) return self.create(**defaults) + def get_object_or_log(self, **kwargs): + """ + Returns Version object or log. + + It will return the Version object if found for the given kwargs, + otherwise it will log a warning along with all provided kwargs. + """ + try: + return super().get(**kwargs) + except ObjectDoesNotExist: + log.warning('Version not found for given kwargs. %s' % kwargs) + class VersionManager(SettingsOverrideObject): _default_class = VersionManagerBase diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -899,7 +899,9 @@ def sync_files( synchronization of build artifacts on each application instance. """ # Clean up unused artifacts - version = Version.objects.get(pk=version_pk) + version = Version.objects.get_object_or_log(pk=version_pk) + if not version: + return if not pdf: remove_dirs([ version.project.get_production_media_path( @@ -959,7 +961,9 @@ def move_files( :param epub: Sync ePub files :type epub: bool """ - version = Version.objects.get(pk=version_pk) + version = Version.objects.get_object_or_log(pk=version_pk) + if not version: + return log.debug( LOG_TEMPLATE.format( project=version.project.slug, @@ -1035,7 +1039,9 @@ def update_search(version_pk, commit, delete_non_commit_files=True): :param commit: Commit that updated index :param delete_non_commit_files: Delete files not in commit from index """ - version = Version.objects.get(pk=version_pk) + version = Version.objects.get_object_or_log(pk=version_pk) + if not version: + return page_list = process_all_json_files(version, build_dir=False) @@ -1130,7 +1136,9 @@ def fileify(version_pk, commit): This is so we have an idea of what files we have in the database. """ - version = Version.objects.get(pk=version_pk) + version = Version.objects.get_object_or_log(pk=version_pk) + if not version: + return project = version.project if not commit: @@ -1220,7 +1228,9 @@ def _manage_imported_files(version, path, commit): @app.task(queue='web') def send_notifications(version_pk, build_pk): - version = Version.objects.get(pk=version_pk) + version = Version.objects.get_object_or_log(pk=version_pk) + if not version: + return build = Build.objects.get(pk=build_pk) for hook in version.project.webhook_notifications.all():
diff --git a/readthedocs/rtd_tests/tests/test_build_notifications.py b/readthedocs/rtd_tests/tests/test_build_notifications.py --- a/readthedocs/rtd_tests/tests/test_build_notifications.py +++ b/readthedocs/rtd_tests/tests/test_build_notifications.py @@ -17,6 +17,13 @@ def setUp(self): self.project = fixture.get(Project) self.version = fixture.get(Version, project=self.project) self.build = fixture.get(Build, version=self.version) + + @patch('readthedocs.builds.managers.log') + def test_send_notification_none_if_wrong_version_pk(self, mock_logger): + self.assertFalse(Version.objects.filter(pk=345343).exists()) + send_notifications(version_pk=345343, build_pk=None) + mock_logger.warning.assert_called_with("Version not found for given kwargs. {'pk': 345343}") + def test_send_notification_none(self): send_notifications(self.version.pk, self.build.pk) diff --git a/readthedocs/rtd_tests/tests/test_celery.py b/readthedocs/rtd_tests/tests/test_celery.py --- a/readthedocs/rtd_tests/tests/test_celery.py +++ b/readthedocs/rtd_tests/tests/test_celery.py @@ -12,6 +12,7 @@ from readthedocs.builds.models import Build from readthedocs.doc_builder.exceptions import VersionLockedError from readthedocs.projects import tasks +from readthedocs.builds.models import Version from readthedocs.projects.exceptions import RepositoryError from readthedocs.projects.models import Project from readthedocs.rtd_tests.base import RTDTestCase @@ -250,3 +251,27 @@ def public_task_exception(): 'error': 'Something bad happened', }, ) + + @patch('readthedocs.builds.managers.log') + def test_sync_files_logging_when_wrong_version_pk(self, mock_logger): + self.assertFalse(Version.objects.filter(pk=345343).exists()) + tasks.sync_files(project_pk=None, version_pk=345343) + mock_logger.warning.assert_called_with("Version not found for given kwargs. {'pk': 345343}") + + @patch('readthedocs.builds.managers.log') + def test_move_files_logging_when_wrong_version_pk(self, mock_logger): + self.assertFalse(Version.objects.filter(pk=345343).exists()) + tasks.move_files(version_pk=345343, hostname=None) + mock_logger.warning.assert_called_with("Version not found for given kwargs. {'pk': 345343}") + + @patch('readthedocs.builds.managers.log') + def test_update_search_logging_when_wrong_version_pk(self, mock_logger): + self.assertFalse(Version.objects.filter(pk=345343).exists()) + tasks.update_search(version_pk=345343, commit=None) + mock_logger.warning.assert_called_with("Version not found for given kwargs. {'pk': 345343}") + + @patch('readthedocs.builds.managers.log') + def test_fileify_logging_when_wrong_version_pk(self, mock_logger): + self.assertFalse(Version.objects.filter(pk=345343).exists()) + tasks.fileify(version_pk=345343, commit=None) + mock_logger.warning.assert_called_with("Version not found for given kwargs. {'pk': 345343}")
move_files tasks depends on the Version from the db and fails Similar to #4789, we pass the `Version.pk` to the `move_files` task. If the version is removed before the task is executed, the task fails because the row is not in the database anymore. The problem is at this line, https://github.com/rtfd/readthedocs.org/blob/3fbb0c1a7c94bcc979f54d67fad8dc94f74854c8/readthedocs/projects/tasks.py#L838 We should handle this exception properly and do not fail the task. We need to think this a little more, but I suppose that if there is no version in the database, there is nothing to move or do at that point. So, I think that just adding a `try/except` block with an `log.warning` will do the trick. This happens frequently: https://sentry.io/read-the-docs/readthedocs-org/issues/235379529/?query=is:unresolved
@humitos Would it be okay if i take this issue? I think you want something like this ```python3 try: version = Version.objects.get(pk=version_pk) except Version.DoesNotExist: log.warning(f'Version with pk {version_pk} does not exist') ``` I just realize that this also happens at https://github.com/rtfd/readthedocs.org/blob/dfc8fc9eba8dc9caae171ca0b3e8f6a71594e088/readthedocs/projects/tasks.py#L1085-L1087 probably for the same reason that the Version/Project is deleted manually by the user. We should think about a _generic way_ of handling this from all the tasks that depend on objects being on the database. @humitos Probably we can implement custom `get()` method in the model manager of `Version`. I found it here https://github.com/rtfd/readthedocs.org/blob/9b2b17c0fc603267b4b20f5923862c2db82602da/readthedocs/builds/managers.py#L17 in this, we can write our own `get()` method which may probably look something like this ```python3 from django.core.exceptions import ObjectDoesNotExist def get(self, **kwargs): try: return super().get(**kwargs) except ObjectDoesNotExist: # warning or something else ``` Sounds good to me. We may have to name it different, though. I'd like to be more explicit on the name for the use case: `get_object_from_task` or `get_object_or_log`. I'm not sure, but something like that. We will also need to handle when the query returns `None` from the task, so we stop the task there without failing. `get_object_or_log` sounds good to me. I am not sure about what you meant from **stop the task**, do you mean to `return None` from the task like this: ```python3 @app.task(queue='web') def task_name(version_pk, *args, **kwargs): # code version = Version.objects.get_object_or_log(pk=version_pk) if not version: return None # code ```
2018-11-13T17:49:54
readthedocs/readthedocs.org
4,902
readthedocs__readthedocs.org-4902
[ "2258" ]
4fa2746040aee0aafcdb09e0e5674dcb42ea9809
diff --git a/readthedocs/restapi/client.py b/readthedocs/restapi/client.py --- a/readthedocs/restapi/client.py +++ b/readthedocs/restapi/client.py @@ -13,6 +13,7 @@ import requests from django.conf import settings +from requests.packages.urllib3.util.retry import Retry # noqa from requests_toolbelt.adapters import host_header_ssl from rest_framework.renderers import JSONRenderer from slumber import API, serialize @@ -44,9 +45,21 @@ def setup_api(): else: adapter_class = requests.adapters.HTTPAdapter + # Define a retry mechanism trying to attempt to not fail in the first + # error. Builders hit this issue frequently because the webs are high loaded + retry = Retry( + total=3, + read=3, + connect=3, + status=3, + backoff_factor=0.5, # 0.5, 1, 2 seconds + method_whitelist=('GET', 'PUT', 'PATCH', 'POST'), + status_forcelist=(408, 413, 429, 500, 502, 503, 504), + ) + session.mount( API_HOST, - adapter_class(max_retries=3), + adapter_class(max_retries=retry), ) session.headers.update({'Host': PRODUCTION_DOMAIN}) api_config = {
Protect against web/api 5xx responses When a build is attempted, but a web/api instance throws a 5xx response, a number of strange behaviors can result: - Builds will get stuck in triggered state - Builds will fail randomly when updating the api fails - 5xx responses will be returned from the web servers to users Part of the resolution to this may be defensive protection around intermittent 5xx responses. This may take some operation changes as well though, as our load balancer should really assume a 5xx response is enough to dislodge the server from the lb group. Raised from #2255
The first one from the list is solved at #3312 I'm not sure to understand the resolution for this. If the request from the builder fails when hitting the API with a 5xx, what we can do? I believe we should be able to have the build HTTP calls do a smart retry backoff. eg: * Try call * If it fails, try again * If that fails, try again in 5s * If that fails, try again in 10s * If that fails, raise an exception If that's the way to go, I suppose we could modify our [slumber client API class](https://github.com/rtfd/readthedocs.org/blob/afd7dd98c1d35d5f826ace6b29b1a89803523939/readthedocs/restapi/client.py#L39-L72) to do this magic. It would be IO blocking, but I suppose that would be fine. @agjohnson labeled as `Operations` so he is maybe thinking in another thing. Yeah, my original take on this is that if a server is overloaded, we should be detecting this and limiting traffic to the server. It's probably a sign that our web servers are overloaded. Looks like this is still an issue after Azure move: https://sentry.io/read-the-docs/readthedocs-org/issues/712687013/ Retrying is probably a good first place to be defensive. :+1: Requests might even do this natively.
2018-11-14T13:19:47
readthedocs/readthedocs.org
4,910
readthedocs__readthedocs.org-4910
[ "4887" ]
0b16360010e933f6235452411d7c533ba27c14fd
diff --git a/readthedocs/core/forms.py b/readthedocs/core/forms.py --- a/readthedocs/core/forms.py +++ b/readthedocs/core/forms.py @@ -18,8 +18,8 @@ class UserProfileForm(forms.ModelForm): - first_name = CharField(label=_('First name'), required=False) - last_name = CharField(label=_('Last name'), required=False) + first_name = CharField(label=_('First name'), required=False, max_length=30) + last_name = CharField(label=_('Last name'), required=False, max_length=30) class Meta(object): model = UserProfile
diff --git a/readthedocs/rtd_tests/tests/test_profile_views.py b/readthedocs/rtd_tests/tests/test_profile_views.py --- a/readthedocs/rtd_tests/tests/test_profile_views.py +++ b/readthedocs/rtd_tests/tests/test_profile_views.py @@ -35,6 +35,27 @@ def test_edit_profile(self): self.assertEqual(self.user.last_name, 'Docs') self.assertEqual(self.user.profile.homepage, 'readthedocs.org') + def test_edit_profile_with_invalid_values(self): + resp = self.client.get( + reverse('profiles_profile_edit'), + ) + self.assertTrue(resp.status_code, 200) + + resp = self.client.post( + reverse('profiles_profile_edit'), + data={ + 'first_name': 'a' * 31, + 'last_name': 'b' * 31, + 'homepage': 'c' * 101, + } + ) + + FORM_ERROR_FORMAT = 'Ensure this value has at most {} characters (it has {}).' + + self.assertFormError(resp, form='form', field='first_name', errors=FORM_ERROR_FORMAT.format(30, 31)) + self.assertFormError(resp, form='form', field='last_name', errors=FORM_ERROR_FORMAT.format(30, 31)) + self.assertFormError(resp, form='form', field='homepage', errors=FORM_ERROR_FORMAT.format(100, 101)) + def test_delete_account(self): resp = self.client.get( reverse('delete_account')
Validate profile fields on form Related code https://github.com/rtfd/readthedocs.org/blob/164800694a25d769234c6e7019c483f347fe9226/readthedocs/core/forms.py#L20-L46 This will raise an exception if the length is greater than the model Sentry issue https://sentry.io/read-the-docs/readthedocs-org/issues/666774301/
@stsewd Can I take this issue? As of now, I think there is no limit for `first_name` and other fields. I am able to set my first_name and last_name to a very long string. ![screenshot from 2018-11-11 13-33-29](https://user-images.githubusercontent.com/29149191/48310531-aa5f9700-e5b6-11e8-900c-440a49f1b1e4.png) I guess you are using sqlite, this can be replicated using postgres @stsewd I am unable to set up postgres as of now, but i found from the django documentation that there is a limitation of 30 characters for both first_name and last_name in Django 1.11([reference](https://docs.djangoproject.com/en/1.11/ref/contrib/auth/#django.contrib.auth.models.User.first_name)). Also the limit for homepage is set for 100 characters. @stsewd Can i start working on this? @dojutsu-user If you can craft a PR that seems to make a substantive difference towards resolving this issue, go ahead. You don't need to ask permission for that. We'll review it when we can. If you're asking for more help in figuring out how to resolve this issue, there's no need to ask if you can start working on it - just ask for help. :)
2018-11-16T18:59:57
readthedocs/readthedocs.org
4,915
readthedocs__readthedocs.org-4915
[ "4875" ]
4031b9078efba9948301e89c948a236671086d26
diff --git a/readthedocs/restapi/utils.py b/readthedocs/restapi/utils.py --- a/readthedocs/restapi/utils.py +++ b/readthedocs/restapi/utils.py @@ -2,16 +2,26 @@ """Utility functions that are used by both views and celery tasks.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import hashlib import logging from rest_framework.pagination import PageNumberPagination -from readthedocs.builds.constants import (LATEST, LATEST_VERBOSE_NAME, - NON_REPOSITORY_VERSIONS, STABLE, - STABLE_VERBOSE_NAME) +from readthedocs.builds.constants import ( + BRANCH, + LATEST, + LATEST_VERBOSE_NAME, + NON_REPOSITORY_VERSIONS, + STABLE, + STABLE_VERBOSE_NAME, + TAG, +) from readthedocs.builds.models import Version from readthedocs.search.indexes import PageIndex, ProjectIndex, SectionIndex @@ -133,15 +143,25 @@ def set_or_create_version(project, slug, version_id, verbose_name, type_): def delete_versions(project, version_data): """Delete all versions not in the current repo.""" - current_versions = [] - if 'tags' in version_data: - for version in version_data['tags']: - current_versions.append(version['identifier']) - if 'branches' in version_data: - for version in version_data['branches']: - current_versions.append(version['identifier']) + # We use verbose_name for tags + # because several tags can point to the same identifier. + versions_tags = [ + version['verbose_name'] + for version in version_data.get('tags', []) + ] + versions_branches = [ + version['identifier'] + for version in version_data.get('branches', []) + ] to_delete_qs = project.versions.all() - to_delete_qs = to_delete_qs.exclude(identifier__in=current_versions) + to_delete_qs = to_delete_qs.exclude( + type=TAG, + verbose_name__in=versions_tags, + ) + to_delete_qs = to_delete_qs.exclude( + type=BRANCH, + identifier__in=versions_branches, + ) to_delete_qs = to_delete_qs.exclude(uploaded=True) to_delete_qs = to_delete_qs.exclude(active=True) to_delete_qs = to_delete_qs.exclude(slug__in=NON_REPOSITORY_VERSIONS)
diff --git a/readthedocs/rtd_tests/tests/test_sync_versions.py b/readthedocs/rtd_tests/tests/test_sync_versions.py --- a/readthedocs/rtd_tests/tests/test_sync_versions.py +++ b/readthedocs/rtd_tests/tests/test_sync_versions.py @@ -105,12 +105,13 @@ def test_new_tag_update_active(self): self.pip.get_stable_version().identifier, ) - def test_new_tag_update_inactive(self): + def test_new_tag_dont_update_inactive(self): Version.objects.create( project=self.pip, identifier='0.8.3', verbose_name='0.8.3', + type=TAG, active=False, ) @@ -142,13 +143,13 @@ def test_new_tag_update_inactive(self): data=json.dumps(version_post_data), content_type='application/json', ) - # Version 0.9 becomes the stable version and active - version_9 = Version.objects.get(slug='0.9') + # Version 0.9 becomes the stable version, but it's inactive + version_9 = self.pip.versions.get(slug='0.9') self.assertEqual( version_9.identifier, self.pip.get_stable_version().identifier, ) - self.assertTrue(version_9.active) + self.assertFalse(version_9.active) # Version 0.8.3 is still inactive version_8 = Version.objects.get(slug='0.8.3') @@ -651,6 +652,97 @@ def test_machine_attr_when_user_define_latest_branch_and_delete_it(self): ) self.assertTrue(version_latest.machine) + def test_deletes_version_with_same_identifier(self): + version_post_data = { + 'branches': [ + { + 'identifier': 'origin/master', + 'verbose_name': 'master', + }, + ], + 'tags': [ + { + 'identifier': '1234', + 'verbose_name': 'one', + }, + ], + } + + resp = self.client.post( + reverse('project-sync-versions', args=[self.pip.pk]), + data=json.dumps(version_post_data), + content_type='application/json', + ) + self.assertEqual(resp.status_code, 200) + + # We only have one version with an identifier `1234` + self.assertEqual( + self.pip.versions.filter(identifier='1234').count(), + 1 + ) + + # We add a new tag with the same identifier + version_post_data = { + 'branches': [ + { + 'identifier': 'origin/master', + 'verbose_name': 'master', + }, + ], + 'tags': [ + { + 'identifier': '1234', + 'verbose_name': 'two', + }, + { + 'identifier': '1234', + 'verbose_name': 'one', + }, + ], + } + + resp = self.client.post( + reverse('project-sync-versions', args=[self.pip.pk]), + data=json.dumps(version_post_data), + content_type='application/json', + ) + self.assertEqual(resp.status_code, 200) + + # We have two versions with an identifier `1234` + self.assertEqual( + self.pip.versions.filter(identifier='1234').count(), + 2 + ) + + # We delete one version with identifier `1234` + version_post_data = { + 'branches': [ + { + 'identifier': 'origin/master', + 'verbose_name': 'master', + }, + ], + 'tags': [ + { + 'identifier': '1234', + 'verbose_name': 'one', + }, + ], + } + + resp = self.client.post( + reverse('project-sync-versions', args=[self.pip.pk]), + data=json.dumps(version_post_data), + content_type='application/json', + ) + self.assertEqual(resp.status_code, 200) + + # We have only one version with an identifier `1234` + self.assertEqual( + self.pip.versions.filter(identifier='1234').count(), + 1 + ) + class TestStableVersion(TestCase): fixtures = ['eric', 'test_data']
Versions aren't deleted if there is one with the same commit If we create two versions pointing to the same commit (say, master and a tag on master). If we delete the tag, when syncing the repositories that tag isn't deleted from the versions list (because master has the same commit). This is also seeing when creating two tags that point to the same commit.
2018-11-19T20:02:13
readthedocs/readthedocs.org
4,929
readthedocs__readthedocs.org-4929
[ "3540" ]
b170de972be8dfe99318ab7533a72c8bf6ec32a1
diff --git a/readthedocs/core/symlink.py b/readthedocs/core/symlink.py --- a/readthedocs/core/symlink.py +++ b/readthedocs/core/symlink.py @@ -52,21 +52,27 @@ fabric -> rtd-builds/fabric/en/latest/ # single version """ -from __future__ import absolute_import, unicode_literals -from builtins import object +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +import logging import os import shutil -import logging from collections import OrderedDict +from builtins import object from django.conf import settings from readthedocs.builds.models import Version -from readthedocs.core.utils.extend import SettingsOverrideObject from readthedocs.core.utils import safe_makedirs, safe_unlink +from readthedocs.core.utils.extend import SettingsOverrideObject +from readthedocs.doc_builder.environments import LocalEnvironment from readthedocs.projects import constants from readthedocs.projects.models import Domain -from readthedocs.projects.utils import run log = logging.getLogger(__name__) @@ -83,6 +89,7 @@ def __init__(self, project): self.subproject_root = os.path.join( self.project_root, 'projects' ) + self.environment = LocalEnvironment(project) self.sanity_check() def sanity_check(self): @@ -146,17 +153,27 @@ def symlink_cnames(self, domain=None): else: domains = Domain.objects.filter(project=self.project) for dom in domains: - log_msg = 'Symlinking CNAME: {0} -> {1}'.format(dom.domain, self.project.slug) - log.info(constants.LOG_TEMPLATE.format(project=self.project.slug, - version='', msg=log_msg)) + log_msg = 'Symlinking CNAME: {} -> {}'.format( + dom.domain, self.project.slug + ) + log.info( + constants.LOG_TEMPLATE.format( + project=self.project.slug, + version='', msg=log_msg + ) + ) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, dom.domain) - run(['ln', '-nsf', self.project_root, symlink]) + self.environment.run('ln', '-nsf', self.project_root, symlink) # Project symlink - project_cname_symlink = os.path.join(self.PROJECT_CNAME_ROOT, dom.domain) - run(['ln', '-nsf', self.project.doc_path, project_cname_symlink]) + project_cname_symlink = os.path.join( + self.PROJECT_CNAME_ROOT, dom.domain + ) + self.environment.run( + 'ln', '-nsf', self.project.doc_path, project_cname_symlink + ) def remove_symlink_cname(self, domain): """Remove CNAME symlink.""" @@ -201,10 +218,12 @@ def symlink_subprojects(self): # TODO this should use os.symlink, not a call to shell. For now, # this passes command as a list to be explicit about escaping # characters like spaces. - status, _, stderr = run(['ln', '-nsf', docs_dir, symlink]) - if status > 0: - log.error('Could not symlink path: status=%d error=%s', - status, stderr) + result = self.environment.run('ln', '-nsf', docs_dir, symlink) + if result.exit_code > 0: + log.error( + 'Could not symlink path: status=%d error=%s', + result.exit_code, result.error + ) # Remove old symlinks if os.path.exists(self.subproject_root): @@ -233,12 +252,16 @@ def symlink_translations(self): for (language, slug) in list(translations.items()): - log_msg = 'Symlinking translation: {0}->{1}'.format(language, slug) - log.info(constants.LOG_TEMPLATE.format(project=self.project.slug, - version='', msg=log_msg)) + log_msg = 'Symlinking translation: {}->{}'.format(language, slug) + log.info( + constants.LOG_TEMPLATE.format( + project=self.project.slug, + version='', msg=log_msg + ) + ) symlink = os.path.join(self.project_root, language) docs_dir = os.path.join(self.WEB_ROOT, slug, language) - run(['ln', '-nsf', docs_dir, symlink]) + self.environment.run('ln', '-nsf', docs_dir, symlink) # Remove old symlinks for lang in os.listdir(self.project_root): @@ -268,9 +291,13 @@ def symlink_single_version(self): # Create symlink if version is not None: - docs_dir = os.path.join(settings.DOCROOT, self.project.slug, - 'rtd-builds', version.slug) - run(['ln', '-nsf', docs_dir, symlink]) + docs_dir = os.path.join( + settings.DOCROOT, + self.project.slug, + 'rtd-builds', + version.slug + ) + self.environment.run('ln', '-nsf', docs_dir, symlink) def symlink_versions(self): """ @@ -280,7 +307,9 @@ def symlink_versions(self): HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() - version_dir = os.path.join(self.WEB_ROOT, self.project.slug, self.project.language) + version_dir = os.path.join( + self.WEB_ROOT, self.project.slug, self.project.language + ) # Include active public versions, # as well as public versions that are built but not active, for archived versions version_queryset = self.get_version_queryset() @@ -289,11 +318,21 @@ def symlink_versions(self): safe_makedirs(version_dir) for version in version_queryset: log_msg = 'Symlinking Version: {}'.format(version) - log.info(constants.LOG_TEMPLATE.format(project=self.project.slug, - version='', msg=log_msg)) + log.info( + constants.LOG_TEMPLATE.format( + project=self.project.slug, + version='', + msg=log_msg + ) + ) symlink = os.path.join(version_dir, version.slug) - docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug) - run(['ln', '-nsf', docs_dir, symlink]) + docs_dir = os.path.join( + settings.DOCROOT, + self.project.slug, + 'rtd-builds', + version.slug + ) + self.environment.run('ln', '-nsf', docs_dir, symlink) versions.add(version.slug) # Remove old symlinks diff --git a/readthedocs/projects/utils.py b/readthedocs/projects/utils.py --- a/readthedocs/projects/utils.py +++ b/readthedocs/projects/utils.py @@ -2,16 +2,16 @@ """Utility functions used by projects.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) -import fnmatch import logging import os -import subprocess -import traceback -import six -from builtins import object, open +from builtins import open from django.conf import settings log = logging.getLogger(__name__) @@ -32,82 +32,6 @@ def version_from_slug(slug, version): return v -def find_file(filename): - """ - Recursively find matching file from the current working path. - - :param file: Filename to match - :returns: A list of matching filenames. - """ - matches = [] - for root, __, filenames in os.walk('.'): - for match in fnmatch.filter(filenames, filename): - matches.append(os.path.join(root, match)) - return matches - - -def run(*commands): - """ - Run one or more commands. - - Each argument in `commands` can be passed as a string or as a list. Passing - as a list is the preferred method, as space escaping is more explicit and it - avoids the need for executing anything in a shell. - - If more than one command is given, then this is equivalent to - chaining them together with ``&&``; if all commands succeed, then - ``(status, out, err)`` will represent the last successful command. - If one command failed, then ``(status, out, err)`` will represent - the failed command. - - :returns: ``(status, out, err)`` - """ - environment = os.environ.copy() - environment['READTHEDOCS'] = 'True' - if 'DJANGO_SETTINGS_MODULE' in environment: - del environment['DJANGO_SETTINGS_MODULE'] - if 'PYTHONPATH' in environment: - del environment['PYTHONPATH'] - # Remove PYTHONHOME env variable if set, otherwise pip install of requirements - # into virtualenv will install incorrectly - if 'PYTHONHOME' in environment: - del environment['PYTHONHOME'] - cwd = os.getcwd() - if not commands: - raise ValueError('run() requires one or more command-line strings') - - for command in commands: - # If command is a string, split it up by spaces to pass into Popen. - # Otherwise treat the command as an iterable. - if isinstance(command, six.string_types): - run_command = command.split() - else: - try: - run_command = list(command) - command = ' '.join(command) - except TypeError: - run_command = command - log.debug('Running command: cwd=%s command=%s', cwd, command) - try: - p = subprocess.Popen( - run_command, - cwd=cwd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=environment, - ) - - out, err = p.communicate() - ret = p.returncode - except OSError: - out = '' - err = traceback.format_exc() - ret = -1 - log.exception('Command failed') - - return (ret, out, err) - - def safe_write(filename, contents): """ Normalize and write to filename. @@ -126,9 +50,3 @@ def safe_write(filename, contents): with open(filename, 'w', encoding='utf-8', errors='ignore') as fh: fh.write(contents) fh.close() - - -class DictObj(object): - - def __getattr__(self, attr): - return self.__dict__.get(attr)
Remove yet another command execution pattern `projects/utils.py` has yet another command execution pattern. We should remove this and move it to the `BuildEnvironment` pattern. Raised in rtfd/readthedocs.org#3520
2018-11-27T18:17:36
readthedocs/readthedocs.org
4,937
readthedocs__readthedocs.org-4937
[ "3743" ]
4cf6a2ee8a86d6366e726ac53d696f3bae7dbdba
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -213,10 +213,18 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ def delete(self, *args, **kwargs): # pylint: disable=arguments-differ from readthedocs.projects import tasks log.info('Removing files for version %s', self.slug) - broadcast(type='app', task=tasks.clear_artifacts, args=[self.get_artifact_paths()]) broadcast( - type='app', task=tasks.symlink_project, args=[self.project.pk]) + type='app', + task=tasks.clear_artifacts, + args=[self.get_artifact_paths()], + ) + project_pk = self.project.pk super(Version, self).delete(*args, **kwargs) + broadcast( + type='app', + task=tasks.symlink_project, + args=[project_pk], + ) @property def identifier_friendly(self):
Clean deletion of a Version When a Version is deleted, we are triggering `clear_artifacts` _and_ `symlink_project` but the later is triggered _before_ the object deletion; so, I think if the task is ran before it's effectively deleted the symlink won't be properly synced: https://github.com/rtfd/readthedocs.org/blob/5472906a067adc60f42556108993255393408410/readthedocs/builds/models.py#L181-L187 To fix this we could chain some Celery task in this order: 1. trigger `clear_artifacts` 1. delete the Version object 1. trigger `symlink_projects` Take into account that the first and last have to be executed in a broadcast call but the second one should be executed just once since it affects the database. > This method `Version.delete` is not used in the codebase at all, so I'm not sure if this is a priority. I found that all the Versions are activated/deactivated but not deleted. Ref: https://github.com/rtfd/readthedocs.org/pull/3649#issuecomment-370915276
We can use Celery task chaining to accomplish this probably. > This method Version.delete is not used in the codebase at all, so I'm not sure if this is a priority. I found that all the Versions are activated/deactivated but not deleted. Actually, we delete the versions when syncing the versions https://github.com/rtfd/readthedocs.org/blob/4fbe0afde6bd0fc9fc14baee79dd361d68f714cb/readthedocs/restapi/utils.py#L134-L154 https://github.com/rtfd/readthedocs.org/blob/4fbe0afde6bd0fc9fc14baee79dd361d68f714cb/readthedocs/restapi/views/model_views.py#L196-L196 I'm not sure about chaining the tasks. What I understand, is that we access to the versions from the `symlink_project` task, which is in a race condition with the deletion of the current version. So at least that `clear_artifacts` is in a race condition with `symlink_projects`, we can just change the call order and that would be enough. > which is in a race condition with the deletion of the current version. What would the race condition here be? If we remove a version and try to symlink, does the symlink stay? `symlink_project` access to all versions and a version is being deleted at the same time. So, probably the symlink would stay
2018-11-29T15:45:45
readthedocs/readthedocs.org
4,938
readthedocs__readthedocs.org-4938
[ "4823" ]
46b5e0681392230601539257e0ed811060e219c5
diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -2,16 +2,21 @@ """An abstraction over virtualenv and Conda environments.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) +import copy import itertools import json import logging import os import shutil -from builtins import object, open import six +from builtins import object, open from django.conf import settings from readthedocs.doc_builder.config import load_yaml_config @@ -223,6 +228,24 @@ def setup_base(self): def install_core_requirements(self): """Install basic Read the Docs requirements into the virtualenv.""" + pip_install_cmd = [ + 'python', + self.venv_bin(filename='pip'), + 'install', + '--upgrade', + '--cache-dir', + self.project.pip_cache_path, + ] + + # Install latest pip first, + # so it is used when installing the other requirements. + cmd = pip_install_cmd + ['pip'] + self.build_env.run( + *cmd, + bin_path=self.venv_bin(), + cwd=self.checkout_path + ) + requirements = [ 'Pygments==2.2.0', # Assume semver for setuptools version, support up to next backwards @@ -255,14 +278,7 @@ def install_core_requirements(self): 'readthedocs-sphinx-ext<0.6' ]) - cmd = [ - 'python', - self.venv_bin(filename='pip'), - 'install', - '--upgrade', - '--cache-dir', - self.project.pip_cache_path, - ] + cmd = copy.copy(pip_install_cmd) if self.config.python.use_system_site_packages: # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -1193,7 +1193,7 @@ def test_install_core_requirements_sphinx(self, checkout_path): ] requirements = self.base_requirements + requirements_sphinx args = self.pip_install_args + requirements - self.build_env_mock.run.assert_called_once() + self.assertEqual(self.build_env_mock.run.call_count, 2) self.assertArgsStartsWith(args, self.build_env_mock.run) @patch('readthedocs.projects.models.Project.checkout_path') @@ -1212,7 +1212,7 @@ def test_install_core_requirements_mkdocs(self, checkout_path): ] requirements = self.base_requirements + requirements_mkdocs args = self.pip_install_args + requirements - self.build_env_mock.run.assert_called_once() + self.assertEqual(self.build_env_mock.run.call_count, 2) self.assertArgsStartsWith(args, self.build_env_mock.run) @patch('readthedocs.projects.models.Project.checkout_path')
Always install the latest pip version Currently, the pip version comes from our docker images, since we don't release then too often, the pip version gets outdated very fast. Here @humitos proposed to install the latest pip as a build step https://github.com/rtfd/readthedocs.org/issues/4808#issuecomment-433740753 I already have some code that makes that real (a description here https://github.com/rtfd/readthedocs.org/issues/4808#issuecomment-434119310) Are you agree with this? Should we add a feature flag first? I don't think any project depends on a specific pip version, or at least to install the packages if they depend on a pip version to build their docs, specifying that on their requirements will work.
> I don't think any project depends on a specific pip version I agree with this. > if they depend on a pip version to build their docs, specifying that on their requirements will work. This is not true. If *I need* (for some reason) `pip==9.0.0` and I pin it in my requirements, all the requirements will be installed with the `pip` version already installed in the virtualenv (`18.0.0`, for example). At the end of the command execution, all my packages were installed with `pip==18.0.0` and now I have `pip==9.0.0` available on the virtualenv, but it was not used to install anything. > Should we add a feature flag first? I don't see too much value here but a precaution. I mean, depending on a not pinned package is kind of dangerous because it's something that we don't control and could be anything. Also, if the new release of `pip` breaks, all of our builds will break. I think it's preferable to add a Feature flag here that will install the latest `pip` by default for all the projects. If we notice that this fails for some reason, we can easily go back by picking those packages that are failing. > At the end of the command execution, all my packages were installed with pip==18.0.0 and now I have pip==9.0.0 available on the virtualenv, but it was not used to install anything. What I mean, the project itself may depend on pip, as a dependency, not to install their packages > Also, if the new release of pip breaks, all of our builds will break. I think it's preferable to add a Feature flag here that will install the latest pip by default for all the projects. If we notice that this fails for some reason, we can easily go back by picking those packages that are failing. What about pin pip and then updated it on each release? > What about pin pip and then updated it on each release? Yes. I think this is the best approach. It has been easier updating some Python code than the docker image (I would like to release more frequently, though). So, I propose to run pip install -U pip==18.1 as the first step before creating the virtualenv (this can be added in the VirtualEnvironment class as a method and run it first). That `18.1` can be updated just by changing the code (similar to other reqs that we install in the venv after creating it) Also, I'd add a feature flag around that pinning so we can mark some project to always upgrade to the latest without pinning for example. Once there, we can take another different approach. Maybe better, but we have something already working that does not require tons of work. > as the first step before creating the virtualenv I wasn't able to do that https://github.com/rtfd/readthedocs.org/issues/4808#issuecomment-434119310 I wasn't able to install the latest version of pip before the virtualenv creation, because the command will need to be run with sudo (we don't have sudo anyway...). We could use this option https://virtualenv.pypa.io/en/stable/userguide/#the-extra-search-dir-option or more easy, create the virtual env with the old pip version, but update the pip version after the virtualenv creation, that way we are able to install the core and user requirements using the latest pip. Either way works for me. Using `--extra-search-dir` is more robust to me, but there is a small problem with it that is we will need to download first the new version of `pip`/`setuptools` first. So, it will be, pip download pip setuptools and then, python -m virtualenv --extra-search-dir . Upgrading `pip` after creating the virtualenv env is easier, but if the first `pip` is broken for whatever reason, we won't be able to upgrade it. > Upgrading pip after creating the virtualenv env is easier, but if the first pip is broken for whatever reason, we won't be able to upgrade it. If the first pip is broken all docs will be broken anyway and `--extra-search-dir` wouldn't work because `pip download` shouldn't work either p: I can say that it will be easier to see what's going on for the user if we install pip after the env creation.
2018-11-29T18:35:06
readthedocs/readthedocs.org
4,939
readthedocs__readthedocs.org-4939
[ "1888" ]
b15e5d39703b1f110ae0091be204cf29bc92eb4a
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -35,6 +35,7 @@ class Backend(BaseVCS): supports_branches = True supports_submodules = True fallback_branch = 'master' # default branch + repo_depth = 50 def __init__(self, *args, **kwargs): super(Backend, self).__init__(*args, **kwargs) @@ -131,7 +132,8 @@ def validate_submodules(self, config): def fetch(self): code, stdout, stderr = self.run( - 'git', 'fetch', '--tags', '--prune', '--prune-tags', + 'git', 'fetch', '--depth', str(self.repo_depth), + '--tags', '--prune', '--prune-tags', ) if code != 0: raise RepositoryError @@ -150,7 +152,8 @@ def checkout_revision(self, revision=None): def clone(self): """Clones the repository.""" code, stdout, stderr = self.run( - 'git', 'clone', self.repo_url, '.' + 'git', 'clone', '--depth', str(self.repo_depth), + '--no-single-branch', self.repo_url, '.' ) if code != 0: raise RepositoryError
Optional use of shallow clone for git repos It would be great if there was an option to enable shallow clone for repositories. If set, then RtD should use shallow clone (`--depth 1` option of `git clone`) instead of full clone if possible. This will result in huge disk space saving for projects with large history. E.g., for one of our projects `/www/readthedocs.org/user_builds/{project}/checkouts/latest` folder takes up to 400 Mb, when cloned with `--depth 1`, it takes up only 12 Mb.
This would definitely be worth looking into, this might be able to take a chunk of storage. I haven't looked into this at all, be the `--depth` option would need to be supported by `git fetch`, as we don't actually run a git clone except on initial import, afaik. Looks like we can now support this with the `--no-single-branch` option on `git clone`. There doesn't appear to be a similar option for `git fetch`, though, so we might not be able to reliably update the list of branches while keeping a shallow clone on disk. If someone could test that this is possible, it would be a pretty sizable win in terms of disk space needed. What exactly do you mean by _update the list of branches_? If it is simple obtaining a list of all branches and tags, then it can be done using `git ls-remote` command. ``` bash git ls-remote --heads --tags https://github.com/rtfd/readthedocs.org.git ``` You don't even need to clone repository to do so. See https://www.kernel.org/pub/software/scm/git/docs/git-ls-remote.html travis uses --depth=50 We have the depth option in the fetch command too https://git-scm.com/docs/git-fetch/2.17.0#git-fetch---depthltdepthgt, I'm testing it, looks like is something that could work for every project. Any reason to not do that on every project? Also, depth=1 looks fine. Do we still get branch/tag info with a shallow clone? Yeah, we do, actually I just have a POC for this. So, we could add a feature flag to test and force everyone to do a shallow clone, or make it an option for the users. The only thing I can see is where a custom extension needs info from other commits I guess?
2018-11-29T21:00:11
readthedocs/readthedocs.org
4,951
readthedocs__readthedocs.org-4951
[ "4934" ]
46b5e0681392230601539257e0ed811060e219c5
diff --git a/readthedocs/vcs_support/backends/svn.py b/readthedocs/vcs_support/backends/svn.py --- a/readthedocs/vcs_support/backends/svn.py +++ b/readthedocs/vcs_support/backends/svn.py @@ -58,7 +58,7 @@ def up(self): def co(self, identifier=None): self.make_clean_working_dir() if identifier: - url = self.base_url + identifier + url = self.get_url(self.base_url, identifier) else: url = self.repo_url retcode, out, err = self.run('svn', 'checkout', url, '.') @@ -104,3 +104,9 @@ def commit(self): def checkout(self, identifier=None): super(Backend, self).checkout() return self.co(identifier) + + def get_url(self, base_url, identifier): + base = base_url.rstrip('/') + tag = identifier.lstrip('/') + url = '{}/{}'.format(base, tag) + return url
diff --git a/readthedocs/rtd_tests/tests/test_backend_svn.py b/readthedocs/rtd_tests/tests/test_backend_svn.py new file mode 100644 --- /dev/null +++ b/readthedocs/rtd_tests/tests/test_backend_svn.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +"""Tests For SVN""" + +from __future__ import ( + absolute_import, + division, + print_function, + unicode_literals, +) + +from mock import patch +from django_dynamic_fixture import get + +from readthedocs.rtd_tests.base import RTDTestCase +from readthedocs.projects.models import Project +from readthedocs.builds.models import Version +from readthedocs.vcs_support.backends.svn import Backend as SvnBackend + +class TestSvnBackend(RTDTestCase): + + def test_get_url(self): + project = get(Project) + version = get(Version, project=project) + backend_obj = SvnBackend(project, version.slug) + + base = 'http://example.com/' + tag = 'xyz/' + self.assertEqual(backend_obj.get_url(base, tag), 'http://example.com/xyz/') + + base = 'http://example.com/' + tag = '/xyz/' + self.assertEqual(backend_obj.get_url(base, tag), 'http://example.com/xyz/') + + base = 'http://example.com' + tag = '/xyz/' + self.assertEqual(backend_obj.get_url(base, tag), 'http://example.com/xyz/')
Remove trailing slashes on svn checkout If someone enters an svn repo like `https://svn.code.sf.net/p/docutils/code/`, it would be concatenated with the version `/trunk/`, so we ended up with `https://svn.code.sf.net/p/docutils/code//trunk` Related code https://github.com/rtfd/readthedocs.org/blob/7f2b831ac18ee839535254e9b14e595621ebf653/readthedocs/vcs_support/backends/svn.py#L61
@stsewd Working on it. Is this a problem when checking out? I don't think so. Does this cause any other issue? @humitos I don't think so, but at the same time I'm not so familiar with svn p: but when testing yesterday, it didn't fail. @stsewd I also didn't get the issue mentioned. Is the PR still required? I think so, it feels weird for the user to see `//` in the URL.
2018-12-04T08:47:26
readthedocs/readthedocs.org
4,964
readthedocs__readthedocs.org-4964
[ "4025" ]
af867c1d85a157b7106734fe45a0fabde9385083
diff --git a/readthedocs/projects/notifications.py b/readthedocs/projects/notifications.py --- a/readthedocs/projects/notifications.py +++ b/readthedocs/projects/notifications.py @@ -2,9 +2,12 @@ """Project notifications.""" +from django.urls import reverse from django.http import HttpRequest +from django.utils.translation import ugettext_lazy as _ +from messages_extends.constants import ERROR_PERSISTENT -from readthedocs.notifications import Notification +from readthedocs.notifications import Notification, SiteNotification from readthedocs.notifications.constants import REQUIREMENT @@ -16,6 +19,20 @@ class ResourceUsageNotification(Notification): level = REQUIREMENT +class EmailConfirmNotification(SiteNotification): + + failure_level = ERROR_PERSISTENT + failure_message = _( + 'Your primary email address is not verified. ' + 'Please <a href="{{account_email_url}}">verify it here</a>.', + ) + + def get_context_data(self): + context = super(EmailConfirmNotification, self).get_context_data() + context.update({'account_email_url': reverse('account_email')}) + return context + + class DeprecatedViewNotification(Notification): """Notification to alert user of a view that is going away.""" diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py --- a/readthedocs/projects/views/private.py +++ b/readthedocs/projects/views/private.py @@ -60,6 +60,7 @@ ) from readthedocs.projects.signals import project_import from readthedocs.projects.views.base import ProjectAdminMixin, ProjectSpamMixin +from readthedocs.projects.notifications import EmailConfirmNotification from ..tasks import retry_domain_verification @@ -78,9 +79,27 @@ class ProjectDashboard(PrivateViewMixin, ListView): model = Project template_name = 'projects/project_dashboard.html' + def validate_primary_email(self, user): + """ + Sends a persistent error notification. + + Checks if the user has a primary email or if the primary email + is verified or not. Sends a persistent error notification if + either of the condition is False. + """ + email_qs = user.emailaddress_set.filter(primary=True) + email = email_qs.first() + if not email or not email.verified: + notification = EmailConfirmNotification(user=user, success=False) + notification.send() + def get_queryset(self): return Project.objects.dashboard(self.request.user) + def get(self, request, *args, **kwargs): + self.validate_primary_email(request.user) + return super(ProjectDashboard, self).get(self, request, *args, **kwargs) + def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)
diff --git a/readthedocs/rtd_tests/tests/test_notifications.py b/readthedocs/rtd_tests/tests/test_notifications.py --- a/readthedocs/rtd_tests/tests/test_notifications.py +++ b/readthedocs/rtd_tests/tests/test_notifications.py @@ -9,6 +9,7 @@ from django.test import TestCase from django.test.utils import override_settings from messages_extends.models import Message as PersistentMessage +from allauth.account.models import EmailAddress from readthedocs.builds.models import Build from readthedocs.notifications import Notification, SiteNotification @@ -166,6 +167,9 @@ class TestNotification(SiteNotification): success_level = INFO_NON_PERSISTENT user = fixture.get(User) + # Setting the primary and verified email address of the user + email = fixture.get(EmailAddress, user=user, primary=True, verified=True) + n = TestNotification(user, True) backend = SiteBackend(request=None) diff --git a/readthedocs/rtd_tests/tests/test_project_views.py b/readthedocs/rtd_tests/tests/test_project_views.py --- a/readthedocs/rtd_tests/tests/test_project_views.py +++ b/readthedocs/rtd_tests/tests/test_project_views.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- + from datetime import timedelta +from mock import patch from django.contrib.auth.models import User from django.contrib.messages import constants as message_const from django.http.response import HttpResponseRedirect @@ -9,7 +11,7 @@ from django.utils import timezone from django.views.generic.base import ContextMixin from django_dynamic_fixture import get, new -from mock import patch +from allauth.account.models import EmailAddress from readthedocs.builds.models import Build, Version from readthedocs.oauth.models import RemoteRepository @@ -342,6 +344,10 @@ def test_import_demo_imported_duplicate(self): project.repo = 'file:///foobar' project.save() + # Setting the primary and verified email of the test user. + user = User.objects.get(username='eric') + user_email = get(EmailAddress, user=user, primary=True, verified=True) + resp = self.client.get('/dashboard/import/manual/demo/') self.assertEqual(resp.status_code, 302) self.assertEqual(resp['Location'], '/dashboard/')
Verify all our emails Currently email verification is optional for users. This means when they sign up, they get a verification email, but they can still sign in without it. We should eventually get all our active users verified. It feels like the process should be the following: * On login, check if the email address is verified, if not, we should resend their verification email, and show a message in the UI about verifying them * After some amount of time, we should turn on email verification as a requirement, so that we are better able to be sure our users have real emails.
Hi @ericholscher I would love to work on it. :) @rajujha373 please, go ahead and submit a pull request! :) I suppose the description should be clear enough, but if you have any question, please ask. @humitos This issue seems interesting to me. I would like to take this issue. Can you please guide me on how to get started with it. @dojutsu-user you need to start by setting up your environment. Check this guide: https://docs.readthedocs.io/en/latest/install.html @humitos thanks, but the development environment is already up, can you please tell me how to check if the email is verified or not. Actually, I am not able to find the model for this ![screenshot from 2018-10-15 14-56-52](https://user-images.githubusercontent.com/29149191/46942410-e8869b00-d08a-11e8-9d5f-db13dd232bfa.png) EDIT: I found it.
2018-12-06T09:49:05
readthedocs/readthedocs.org
4,967
readthedocs__readthedocs.org-4967
[ "4935" ]
cf071ed66c10beacbb5e46a8832186013d5a1ea9
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -182,6 +182,15 @@ class Meta(object): widget=forms.Textarea, ) + def clean_tags(self): + tags = self.cleaned_data.get('tags', []) + for tag in tags: + if len(tag) > 100: + raise forms.ValidationError( + _('Length of each tag must be less than or equal to 100 characters.') + ) + return tags + class ProjectAdvancedForm(ProjectTriggerBuildMixin, ProjectForm):
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -163,6 +163,26 @@ def test_changing_vcs_should_not_change_latest_is_not_none(self): latest.refresh_from_db() self.assertEqual(latest.identifier, 'custom') + def test_length_of_tags(self): + data = { + 'documentation_type': 'sphinx', + 'language': 'en' + } + data['tags'] = '{},{}'.format('a'*50, 'b'*99) + form = ProjectExtraForm(data) + self.assertTrue(form.is_valid()) + + data['tags'] = '{},{}'.format('a'*90, 'b'*100) + form = ProjectExtraForm(data) + self.assertTrue(form.is_valid()) + + data['tags'] = '{},{}'.format('a'*99, 'b'*101) + form = ProjectExtraForm(data) + self.assertFalse(form.is_valid()) + self.assertTrue(form.has_error('tags')) + error_msg = 'Length of each tag must be less than or equal to 100 characters.' + self.assertDictEqual(form.errors, {'tags': [error_msg]}) + class TestProjectAdvancedForm(TestCase):
Validate tags > 100 characteres Not sure how to do this validation yet, the best option is that django-taggit to handle that. Related issue https://github.com/alex/django-taggit/issues/510 Sentry issue https://sentry.io/read-the-docs/readthedocs-org/issues/661257950/
We should probably add a `clean_tags` method in this form https://github.com/rtfd/readthedocs.org/blob/7f2b831ac18ee839535254e9b14e595621ebf653/readthedocs/projects/forms.py#L164 This is low priority though since it only breaks with spam projects. @humitos Thanks for guiding to the code. @stsewd Will submit a PR soon. This is to inform you. Stack Trace/Error Log cannot be seen by all the people with the given Sentry link. It requires to log in into the RTD. Yes I know, it can be only seen by the core team, it may contain secrets.
2018-12-06T17:37:43
readthedocs/readthedocs.org
4,983
readthedocs__readthedocs.org-4983
[ "4981" ]
d196fc7148d631218ef9fa250de69986c7167368
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -564,6 +564,14 @@ def save(self, commit=True): self.project.webhook_notifications.add(self.webhook) return self.project + def clean_url(self): + url = self.cleaned_data.get('url') + if not url: + raise forms.ValidationError( + _('This field is required.') + ) + return url + class Meta: model = WebHook fields = ['url'] diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py --- a/readthedocs/projects/views/private.py +++ b/readthedocs/projects/views/private.py @@ -537,19 +537,18 @@ def project_notifications(request, project_slug): slug=project_slug, ) - email_form = EmailHookForm(data=request.POST or None, project=project) - webhook_form = WebHookForm(data=request.POST or None, project=project) + email_form = EmailHookForm(data=None, project=project) + webhook_form = WebHookForm(data=None, project=project) if request.method == 'POST': - if email_form.is_valid(): - email_form.save() - if webhook_form.is_valid(): - webhook_form.save() - project_dashboard = reverse( - 'projects_notifications', - args=[project.slug], - ) - return HttpResponseRedirect(project_dashboard) + if 'email' in request.POST.keys(): + email_form = EmailHookForm(data=request.POST, project=project) + if email_form.is_valid(): + email_form.save() + elif 'url' in request.POST.keys(): + webhook_form = WebHookForm(data=request.POST, project=project) + if webhook_form.is_valid(): + webhook_form.save() emails = project.emailhook_notifications.all() urls = project.webhook_notifications.all()
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -13,6 +13,7 @@ from django.test.utils import override_settings from django_dynamic_fixture import get from textclassifier.validators import ClassifierValidator +from django.core.exceptions import ValidationError from readthedocs.builds.constants import LATEST from readthedocs.builds.models import Version @@ -31,6 +32,8 @@ ProjectExtraForm, TranslationForm, UpdateProjectForm, + WebHookForm, + EmailHookForm ) from readthedocs.projects.models import Project, EnvironmentVariable @@ -514,6 +517,72 @@ def test_can_change_language_to_self_lang(self): self.assertTrue(form.is_valid()) +class TestNotificationForm(TestCase): + + def setUp(self): + self.project = get(Project) + + def test_webhookform(self): + self.assertEqual(self.project.webhook_notifications.all().count(), 0) + + data = { + 'url': 'http://www.example.com/' + } + form = WebHookForm(data=data, project=self.project) + self.assertTrue(form.is_valid()) + form.save() + self.assertEqual(self.project.webhook_notifications.all().count(), 1) + + def test_wrong_inputs_in_webhookform(self): + self.assertEqual(self.project.webhook_notifications.all().count(), 0) + + data = { + 'url': '' + } + form = WebHookForm(data=data, project=self.project) + self.assertFalse(form.is_valid()) + self.assertDictEqual(form.errors, {'url': ['This field is required.']}) + self.assertEqual(self.project.webhook_notifications.all().count(), 0) + + data = { + 'url': 'wrong-url' + } + form = WebHookForm(data=data, project=self.project) + self.assertFalse(form.is_valid()) + self.assertDictEqual(form.errors, {'url': ['Enter a valid URL.']}) + self.assertEqual(self.project.webhook_notifications.all().count(), 0) + + def test_emailhookform(self): + self.assertEqual(self.project.emailhook_notifications.all().count(), 0) + + data = { + 'email': '[email protected]' + } + form = EmailHookForm(data=data, project=self.project) + self.assertTrue(form.is_valid()) + form.save() + self.assertEqual(self.project.emailhook_notifications.all().count(), 1) + + def test_wrong_inputs_in_emailhookform(self): + self.assertEqual(self.project.emailhook_notifications.all().count(), 0) + + data = { + 'email': 'wrong_email@' + } + form = EmailHookForm(data=data, project=self.project) + self.assertFalse(form.is_valid()) + self.assertDictEqual(form.errors, {'email': ['Enter a valid email address.']}) + self.assertEqual(self.project.emailhook_notifications.all().count(), 0) + + data = { + 'email': '' + } + form = EmailHookForm(data=data, project=self.project) + self.assertFalse(form.is_valid()) + self.assertDictEqual(form.errors, {'email': ['This field is required.']}) + self.assertEqual(self.project.emailhook_notifications.all().count(), 0) + + class TestProjectEnvironmentVariablesForm(TestCase): def setUp(self):
Validate url from Webhook notification We are allowing `''` as URL for webhook notifications. We need to add better validation to this form. ### Steps to reproduce it 1. Go to Admin -> Notifications 1. Click the Add button under the Webhook Notifications Sentry: https://sentry.io/read-the-docs/readthedocs-org/issues/789512412/?referrer=github_plugin
Working on it.
2018-12-10T17:13:42
readthedocs/readthedocs.org
4,984
readthedocs__readthedocs.org-4984
[ "4980" ]
cf071ed66c10beacbb5e46a8832186013d5a1ea9
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -15,6 +15,7 @@ import git from builtins import str from django.core.exceptions import ValidationError +from django.conf import settings from git.exc import BadName from readthedocs.config import ALL @@ -177,12 +178,13 @@ def tags(self): def branches(self): repo = git.Repo(self.working_dir) versions = [] + branches = [] - # ``repo.branches`` returns local branches and - branches = repo.branches # ``repo.remotes.origin.refs`` returns remote branches if repo.remotes: branches += repo.remotes.origin.refs + if getattr(settings, 'LOCAL_GIT_BRANCHES', False): + branches += repo.branches for branch in branches: verbose_name = branch.name
diff --git a/readthedocs/settings/test.py b/readthedocs/settings/test.py --- a/readthedocs/settings/test.py +++ b/readthedocs/settings/test.py @@ -16,6 +16,7 @@ class CommunityTestSettings(CommunityDevSettings): DEBUG = False TEMPLATE_DEBUG = False + LOCAL_GIT_BRANCHES = True @property def LOGGING(self): # noqa - avoid pep8 N802
Stable builds are triggered recursively On Sunday we found that a build for the `stable` version was triggered constantly (Example, http://readthedocs.org/projects/bugzilla/builds/) @ericholscher hotfixed this with this commit https://github.com/rtfd/readthedocs.org/commit/83caf8fe8 to avoid the "Sunday problem", but we need to research and fix it properly. We suspect that this could be introduced on #4433 and/or #4876.
2018-12-10T17:17:16
readthedocs/readthedocs.org
4,990
readthedocs__readthedocs.org-4990
[ "4698" ]
cd6e82118541b1612b24540efebdefb7af92827e
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -28,6 +28,7 @@ 'djangodocs', 'doc_extensions', 'sphinx_tabs.tabs', + 'sphinx-prompt', ] templates_path = ['_templates'] @@ -82,3 +83,7 @@ # Activate autosectionlabel plugin autosectionlabel_prefix_document = True + + +def setup(app): + app.add_stylesheet('css/sphinx_prompt_css.css')
Remove $ before shell commands in docs The developers have a [preference](https://github.com/rtfd/readthedocs.org/pull/4676#discussion_r221400605) to not have a `$` before shell commands in the docs. This makes it easier to copy and paste from our docs. We should remove it everywhere. The following command should show it everywhere. grep -Ri " $ " docs/*.rst docs/*/*.rst
@davidfischer I can take a stab at this! EDIT: Pull request is in for this issue @davidfischer is this still open? I would like to take this issue up. I think #4699 is almost there and there's just a few things left. I think @houllette can finish it up. Is this issue closed. I am looking for a beginner issue! This issue is already taken, there is a PR opened https://github.com/rtfd/readthedocs.org/pull/4699 OK. I would like to take any beginner issues but most of them have open PRs on them. I'll figure something else out i guess. @himanshuc3 If you can't find any that are open, let us know. We can probably make some more. Can I take up this issue? @rajdeepbharati sorry, there is an open PR already https://github.com/rtfd/readthedocs.org/pull/4699 @RichardLitt It would be great if some good first time issues can be introduced for the project. There are, but some are already taken https://github.com/rtfd/readthedocs.org/issues?q=is%3Aissue+is%3Aopen+label%3A%22Good+First+Issue%22, we try to label introductory issues with that label. When watching the repo GitHub show issues on your "timeline/dashboard" when they are labeled.
2018-12-12T10:33:47
readthedocs/readthedocs.org
4,993
readthedocs__readthedocs.org-4993
[ "4985" ]
cd6e82118541b1612b24540efebdefb7af92827e
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -183,8 +183,6 @@ def branches(self): # ``repo.remotes.origin.refs`` returns remote branches if repo.remotes: branches += repo.remotes.origin.refs - if getattr(settings, 'LOCAL_GIT_BRANCHES', False): - branches += repo.branches for branch in branches: verbose_name = branch.name
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -13,6 +13,7 @@ import django_dynamic_fixture as fixture import pytest +import six from django.contrib.auth.models import User from mock import Mock, patch @@ -49,7 +50,8 @@ def setUp(self): self.dummy_conf.submodules.include = ALL self.dummy_conf.submodules.exclude = [] - def test_git_branches(self): + @patch('readthedocs.projects.models.Project.checkout_path') + def test_git_branches(self, checkout_path): repo_path = self.project.repo default_branches = [ # comes from ``make_test_git`` function @@ -63,15 +65,47 @@ def test_git_branches(self): '2.0.X', 'release/2.0.0', 'release/foo/bar', + ] + for branch in branches: + create_git_branch(repo_path, branch) + + # Create dir where to clone the repo + local_repo = os.path.join(mkdtemp(), 'local') + os.mkdir(local_repo) + checkout_path.return_value = local_repo + + repo = self.project.vcs_repo() + repo.clone() + + self.assertEqual( + set(branches + default_branches), + {branch.verbose_name for branch in repo.branches}, + ) + + @pytest.mark.skipif(six.PY2, reason='Only for python3') + @patch('readthedocs.projects.models.Project.checkout_path') + def test_git_branches_unicode(self, checkout_path): + repo_path = self.project.repo + default_branches = [ + # comes from ``make_test_git`` function + 'submodule', + 'relativesubmodule', + 'invalidsubmodule', + ] + branches = [ + 'master', 'release-ünîø∂é', ] for branch in branches: create_git_branch(repo_path, branch) + # Create dir where to clone the repo + local_repo = os.path.join(mkdtemp(), 'local') + os.mkdir(local_repo) + checkout_path.return_value = local_repo + repo = self.project.vcs_repo() - # We aren't cloning the repo, - # so we need to hack the repo path - repo.working_dir = repo_path + repo.clone() self.assertEqual( set(branches + default_branches), diff --git a/readthedocs/rtd_tests/tests/test_celery.py b/readthedocs/rtd_tests/tests/test_celery.py --- a/readthedocs/rtd_tests/tests/test_celery.py +++ b/readthedocs/rtd_tests/tests/test_celery.py @@ -126,10 +126,14 @@ def test_sync_repository(self): @patch('readthedocs.projects.tasks.api_v2') @patch('readthedocs.projects.models.Project.checkout_path') def test_check_duplicate_reserved_version_latest(self, checkout_path, api_v2): - checkout_path.return_value = self.project.repo create_git_branch(self.repo, 'latest') create_git_tag(self.repo, 'latest') + # Create dir where to clone the repo + local_repo = os.path.join(mkdtemp(), 'local') + os.mkdir(local_repo) + checkout_path.return_value = local_repo + version = self.project.versions.get(slug=LATEST) sync_repository = tasks.UpdateDocsTaskStep() sync_repository.version = version @@ -148,10 +152,14 @@ def test_check_duplicate_reserved_version_latest(self, checkout_path, api_v2): @patch('readthedocs.projects.tasks.api_v2') @patch('readthedocs.projects.models.Project.checkout_path') def test_check_duplicate_reserved_version_stable(self, checkout_path, api_v2): - checkout_path.return_value = self.project.repo create_git_branch(self.repo, 'stable') create_git_tag(self.repo, 'stable') + # Create dir where to clone the repo + local_repo = os.path.join(mkdtemp(), 'local') + os.mkdir(local_repo) + checkout_path.return_value = local_repo + version = self.project.versions.get(slug=LATEST) sync_repository = tasks.UpdateDocsTaskStep() sync_repository.version = version diff --git a/readthedocs/settings/test.py b/readthedocs/settings/test.py --- a/readthedocs/settings/test.py +++ b/readthedocs/settings/test.py @@ -16,7 +16,6 @@ class CommunityTestSettings(CommunityDevSettings): DEBUG = False TEMPLATE_DEBUG = False - LOCAL_GIT_BRANCHES = True @property def LOGGING(self): # noqa - avoid pep8 N802
Remove the `LOCAL_GIT_BRANCHES` setting We introduced this here to make tests work, but it isn't how things work in production: https://github.com/rtfd/readthedocs.org/pull/4984 We should remove this setting, and make our tests work properly with remote repos. I see a couple ways we could handle this: * The `make_get_branch` function could manipulate the git repo so that it has a *remote* branch instead of a local branch * We could mock `repo.branches` in the places we use it. I went down this road briefly, but it was a little bit complex. I think the first approach is best, but I'm not sure how to do it easily.
> I think the first approach is best, but I'm not sure how to do it easily. We should follow the same workflow that we do in production: 1. The repository is created by another person with all its branches: we can create a temporal repository 1. We clone it from there (localhost) 1. We start running our test over the cloned one instead of the originally created That way, I suppose that branches will be remotes and the structure of the repository will be pretty similar to what we have in production. There are some tests that use the clone step, I guess we only need to put that step in our tests
2018-12-12T17:19:33
readthedocs/readthedocs.org
5,002
readthedocs__readthedocs.org-5002
[ "3839" ]
4cf6a2ee8a86d6366e726ac53d696f3bae7dbdba
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -15,8 +15,7 @@ import git from builtins import str from django.core.exceptions import ValidationError -from django.conf import settings -from git.exc import BadName +from git.exc import BadName, InvalidGitRepositoryError from readthedocs.config import ALL from readthedocs.projects.exceptions import RepositoryError @@ -67,8 +66,11 @@ def update(self): return self.clone() def repo_exists(self): - code, _, _ = self.run('git', 'status', record=False) - return code == 0 + try: + git.Repo(self.working_dir) + except InvalidGitRepositoryError: + return False + return True def are_submodules_available(self, config): """Test whether git submodule checkout step should be performed.""" @@ -83,8 +85,8 @@ def are_submodules_available(self, config): return False # Keep compatibility with previous projects - code, out, _ = self.run('git', 'submodule', 'status', record=False) - return code == 0 and bool(out) + repo = git.Repo(self.working_dir) + return bool(repo.submodules) def validate_submodules(self, config): """
Simplify vcs_support backend git by using GitPython We already started using GitPython for some submodules pieces. We should continue porting some of our hand rolled logic to use GitPython instead. Pieces of `readthedocs.vcs_support.backend.git` we should definitely port: * `parse_branches()` -- this supersedes a branch I started at #2997 to replace parsing this with a csvreader * `parse_tags()` -- same silly code using a csvreader * `find_ref()` * `ref_exists()` * `repo_exists()` Maybe: * `tags()` * `branches()` These would be good first targets to port over, as we are executing these commands to get data out of the repository. It's not important that these messages are surfaced to users in build command output. I'm going to block on getting a feature out that executes these commands in docker first though, as we need to clone and submodule checkout inside the docker container to isolate these calls. I can't quite consider how relying on gitpython for some of these calls, but not all, works with regard to docker vcs checkouts.
Just want to note here that I'm starting to feel uncomfortable with `gitpython` because it has several issues around unicode that cause our builds to fail and to debug them, write tests, etc. I mentioned some of these problem at: https://github.com/rtfd/readthedocs.org/pull/4318#discussion_r214176024 I'd like to start thinking for better alternatives. Although, I don't have one in mind yet. We are only missing `git status` and `git submodule status` here But not sure if we want to replace those, the code looks more complex with git python p: ```diff - code, _, _ = self.run('git', 'status', record=False) - return code == 0 + try: + git.Repo(self.working_dir) + except InvalidGitRepositoryError: + return False + return True ``` ```diff - code, out, _ = self.run('git', 'submodule', 'status', record=False) - return code == 0 and bool(out) + repo = git.Repo(self.working_dir) + return bool(repo.submodules) ``` If you think that we are ok with git status and git submodule status, we can close this issue. I like the consistency: all `git` command or all `gitpython`. A mixture of both it's hard to maintain and to keep in mind. On the other hand here, there are `git` commands that we can't replace: those with `record=True` The code maybe looks longer, but it's more Pythonic, I'd say and we are relying on an external library instead of a local and custom command. I'm :+1: on migrating them also.
2018-12-14T02:43:16
readthedocs/readthedocs.org
5,015
readthedocs__readthedocs.org-5015
[ "4957" ]
5d4da21fbee93b6859dba3a0369374f4fe138afb
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -3,10 +3,13 @@ """Documentation Builder Environments.""" from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import logging -import json import os import re import socket @@ -32,13 +35,28 @@ from readthedocs.restapi.client import api as api_v2 from .constants import ( - DOCKER_HOSTNAME_MAX_LEN, DOCKER_IMAGE, DOCKER_LIMITS, DOCKER_OOM_EXIT_CODE, - DOCKER_SOCKET, DOCKER_TIMEOUT_EXIT_CODE, DOCKER_VERSION, - MKDOCS_TEMPLATE_DIR, SPHINX_TEMPLATE_DIR) + DOCKER_HOSTNAME_MAX_LEN, + DOCKER_IMAGE, + DOCKER_LIMITS, + DOCKER_OOM_EXIT_CODE, + DOCKER_SOCKET, + DOCKER_TIMEOUT_EXIT_CODE, + DOCKER_VERSION, + MKDOCS_TEMPLATE_DIR, + SPHINX_TEMPLATE_DIR, +) from .exceptions import ( - BuildEnvironmentCreationFailed, BuildEnvironmentError, - BuildEnvironmentException, BuildEnvironmentWarning, BuildTimeoutError, - ProjectBuildsSkippedError, VersionLockedError, YAMLParseError, MkDocsYAMLParseError) + BuildEnvironmentCreationFailed, + BuildEnvironmentError, + BuildEnvironmentException, + BuildEnvironmentWarning, + BuildTimeoutError, + MkDocsYAMLParseError, + ProjectBuildsSkippedError, + VersionLockedError, + YAMLParseError, +) + log = logging.getLogger(__name__) @@ -295,8 +313,9 @@ def run(self): # is in the last 15 lines of the command's output killed_in_output = 'Killed' in '\n'.join(self.output.splitlines()[-15:]) if self.exit_code == DOCKER_OOM_EXIT_CODE or (self.exit_code == 1 and killed_in_output): - self.output = _('Command killed due to excessive memory ' - 'consumption\n') + self.output += str(_( + '\n\nCommand killed due to excessive memory consumption\n' + )) except DockerAPIError: self.exit_code = -1 if self.output is None or not self.output:
diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -6,7 +6,11 @@ * the Command wrappers encapsulate the bytes and expose unicode """ from __future__ import ( - absolute_import, division, print_function, unicode_literals) + absolute_import, + division, + print_function, + unicode_literals, +) import json import os @@ -27,8 +31,11 @@ from readthedocs.builds.models import Version from readthedocs.doc_builder.config import load_yaml_config from readthedocs.doc_builder.environments import ( - BuildCommand, DockerBuildCommand, DockerBuildEnvironment, - LocalBuildEnvironment) + BuildCommand, + DockerBuildCommand, + DockerBuildEnvironment, + LocalBuildEnvironment, +) from readthedocs.doc_builder.exceptions import BuildEnvironmentError from readthedocs.doc_builder.python_environments import Conda, Virtualenv from readthedocs.projects.models import Project @@ -36,6 +43,7 @@ from readthedocs.rtd_tests.mocks.paths import fake_paths_lookup from readthedocs.rtd_tests.tests.test_config_integration import create_load + DUMMY_BUILD_ID = 123 SAMPLE_UNICODE = u'HérÉ îß sömê ünïçó∂é' SAMPLE_UTF8_BYTES = SAMPLE_UNICODE.encode('utf-8') @@ -1127,9 +1135,10 @@ def test_command_oom_kill(self): cmd.build_env.get_client.return_value = self.mocks.docker_client type(cmd.build_env).container_id = PropertyMock(return_value='foo') cmd.run() - self.assertEqual( - str(cmd.output), - u'Command killed due to excessive memory consumption\n') + self.assertIn( + 'Command killed due to excessive memory consumption\n', + str(cmd.output) + ) class TestPythonEnvironment(TestCase):
Keep the command's output when it's killed When a command is killed because of memory consumption we replace the output with a message https://github.com/rtfd/readthedocs.org/blob/85d9a1fb236e2c2149358b1cf462159e523e80bb/readthedocs/doc_builder/environments.py#L274-L276 the output can be helpful to users to debug the build. I guess we should only show the error about the memory in the top? Or maybe just concatenated the output with that msg? Raised in https://github.com/rtfd/readthedocs.org/issues/4946#issuecomment-444232307 Related to https://github.com/rtfd/readthedocs.org/issues/4468
This would be very useful :) right now I need to blindly reduce the size of my example, make another pull request, rerun the build and hope :) I suppose the reason why we put this pre-defined output is that when the command is killed, we don't get any output in return from docker. Interesting, I'll try to reproduce that locally and see So, after some attempts, I was able to reproduce and catch locally, this is what I found ``` 296 killed_in_output = 'Killed' in '\n'.join(self.output.splitlines()[-15:]) 297 if self.exit_code == DOCKER_OOM_EXIT_CODE or (self.exit_code == 1 and killed_in_output): 298 from celery.contrib import rdb 299 rdb.set_trace() 300 -> self.output = _('Command killed due to excessive memory ' 301 'consumption\n') 302 except DockerAPIError: 303 self.exit_code = -1 304 if self.output is None or not self.output: 305 self.output = _('Command exited abnormally') (Pdb) p self.output 'Collecting Pygments==2.2.0\n Using cached https://files.pythonhosted.org/packages/02/ee/b6e02dc6529e82b75bb06823ff7d005b141037cb1416b10c6f00fc419dca/Pygments-2.2.0-py2.py3-none-any.whl\nRequirement already up-to-date: setuptools<40 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nCollecting docutils==0.13.1\n Using cached https://files.pythonhosted.org/packages/7c/30/8fb30d820c012a6f701a66618ce065b6d61d08ac0a77e47fc7808dbaee47/docutils-0.13.1-py3-none-any.whl\nCollecting mock==1.0.1\nRequirement already up-to-date: pillow==2.6.1 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nRequirement already up-to-date: alabaster!=0.7.5,<0.8,>=0.7 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nCollecting commonmark==0.5.4\nRequirement already up-to-date: recommonmark==0.4.0 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nCollecting sphinx<1.8\n Using cached https://files.pythonhosted.org/packages/90/f9/a0babe32c78480994e4f1b93315558f5ed756104054a7029c672a8d77b72/Sphinx-1.7.9-py2.py3-none-any.whl\nRequirement already up-to-date: sphinx-rtd-theme<0.5 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nRequirement already up-to-date: readthedocs-sphinx-ext<0.6 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages\nRequirement already up-to-date: imagesize in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nRequirement already up-to-date: packaging in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nRequirement already up-to-date: babel!=2.0,>=1.3 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nRequirement already up-to-date: Jinja2>=2.3 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nCollecting requests>=2.0.0 (from sphinx<1.8)\n Using cached https://files.pythonhosted.org/packages/7d/e3/20f3d364d6c8e5d2353c72a67778eb189176f08e873c9900e10c0287b84b/requests-2.21.0-py2.py3-none-any.whl\nRequirement already up-to-date: sphinxcontrib-websupport in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nRequirement already up-to-date: snowballstemmer>=1.1 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from sphinx<1.8)\nCollecting six>=1.5 (from sphinx<1.8)\n Using cached https://files.pythonhosted.org/packages/73/fb/00a976f728d0d1fecfe898238ce23f502a721c0ac0ecfedb80e0d88c64e9/six-1.12.0-py2.py3-none-any.whl\nRequirement already up-to-date: pyparsing>=2.0.2 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from packaging->sphinx<1.8)\nRequirement already up-to-date: pytz>=0a in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from babel!=2.0,>=1.3->sphinx<1.8)\nRequirement already up-to-date: MarkupSafe>=0.23 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from Jinja2>=2.3->sphinx<1.8)\nCollecting idna<2.9,>=2.5 (from requests>=2.0.0->sphinx<1.8)\n Using cached https://files.pythonhosted.org/packages/14/2c/cd551d81dbe15200be1cf41cd03869a46fe7226e7450af7a6545bfc474c9/idna-2.8-py2.py3-none-any.whl\nRequirement already up-to-date: certifi>=2017.4.17 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from requests>=2.0.0->sphinx<1.8)\nRequirement already up-to-date: urllib3<1.25,>=1.21.1 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from requests>=2.0.0->sphinx<1.8)\nRequirement already up-to-date: chardet<3.1.0,>=3.0.2 in /home/stsewd/rtd/readthedocs.org/user_builds/read-the-docs/envs/latest/lib/python3.6/site-packages (from requests>=2.0.0->sphinx<1.8)\nInstalling collected packages: Pygments, docutils, mock, commonmark, idna, requests, six, sphinx\n Found existing installation: Pygments 2.3.0\n Uninstalling Pygments-2.3.0:\n Successfully uninstalled Pygments-2.3.0\n Found existing installation: docutils 0.14\n Uninstalling docutils-0.14:\n Successfully uninstalled docutils-0.14\n Found existing installation: mock 2.0.0\n Uninstalling mock-2.0.0:\n Successfully uninstalled mock-2.0.0\n Found existing installation: CommonMark 0.5.5\n Uninstalling CommonMark-0.5.5:\n Successfully uninstalled CommonMark-0.5.5\n Found existing installation: idna 2.7\n Uninstalling idna-2.7:\n Successfully uninstalled idna-2.7\nKilled\n' (Pdb) ``` We have some output, it may be helpful for debugging, and there is a `Killed` at the end, so I think we can append the message or just rely on the top message to communicate the error. I'm going to keep investigating if there are some side effects.
2018-12-19T00:35:46
readthedocs/readthedocs.org
5,034
readthedocs__readthedocs.org-5034
[ "5032" ]
5386e844ac96a211f4aa58e36a49e1a1e7da2b06
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -222,7 +222,7 @@ def __init__(self, *args, **kwargs): default_choice = (None, '-' * 9) all_versions = self.instance.versions.values_list( - 'slug', 'verbose_name' + 'identifier', 'verbose_name' ) self.fields['default_branch'].widget = forms.Select( choices=[default_choice] + list(all_versions)
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -174,6 +174,7 @@ def setUp(self): slug='public-1', active=True, privacy_level=PUBLIC, + identifier='public-1', ) get( Version, @@ -181,6 +182,7 @@ def setUp(self): slug='public-2', active=True, privacy_level=PUBLIC, + identifier='public-2', ) get( Version, @@ -188,6 +190,15 @@ def setUp(self): slug='public-3', active=False, privacy_level=PROTECTED, + identifier='public-3', + ) + get( + Version, + project=self.project, + slug='public-4', + active=False, + privacy_level=PUBLIC, + identifier='public/4' ) get( Version, @@ -195,6 +206,7 @@ def setUp(self): slug='private', active=True, privacy_level=PRIVATE, + identifier='private', ) get( Version, @@ -202,6 +214,7 @@ def setUp(self): slug='protected', active=True, privacy_level=PROTECTED, + identifier='protected', ) def test_list_only_active_versions_on_default_version(self): @@ -222,12 +235,12 @@ def test_list_all_versions_on_default_branch(self): self.assertTrue(self.project.versions.filter(slug=LATEST).exists()) self.assertEqual( set( - slug - for slug, _ in form.fields['default_branch'].widget.choices + identifier + for identifier, _ in form.fields['default_branch'].widget.choices ), { - None, 'latest', 'public-1', 'public-2', - 'public-3', 'protected', 'private' + None, 'master', 'public-1', 'public-2', + 'public-3', 'public/4', 'protected', 'private' }, )
Setting default branch uses Version.slug instead of identifier ## Details * Read the Docs project URL: https://readthedocs.org/projects/test-builds/ * Build URL (if applicable): https://readthedocs.org/projects/test-builds/builds/8319150/ * Read the Docs username (if applicable): humitos ## Expected Result Build the branch set as default branch as when it's built as [non default](https://readthedocs.org/projects/test-builds/builds/8307296/). > NOTE: the correct command here is `git checkout --force origin/branch/with/slashes` ## Actual Result The `git checkout --force branch-with-slashes` command fails because the branch does not exist. ---- `Project.default_branch` is saved in an improperly way. ``` >>> Version.objects.get(slug='branch-with-slashes').__dict__ ... 'type': 'branch', 'identifier': 'origin/branch/with/slashes', 'verbose_name': 'branch/with/slashes', 'slug': 'branch-with-slashes', >>> Version.objects.get(slug='branch-with-slashes').project.default_branch 'branch-with-slashes' ```
2018-12-24T15:56:48
readthedocs/readthedocs.org
5,036
readthedocs__readthedocs.org-5036
[ "5031" ]
5386e844ac96a211f4aa58e36a49e1a1e7da2b06
diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1013,6 +1013,7 @@ def add_features(sender, **kwargs): DONT_OVERWRITE_SPHINX_CONTEXT = 'dont_overwrite_sphinx_context' ALLOW_V2_CONFIG_FILE = 'allow_v2_config_file' MKDOCS_THEME_RTD = 'mkdocs_theme_rtd' + DONT_SHALLOW_CLONE = 'dont_shallow_clone' FEATURES = ( (USE_SPHINX_LATEST, _('Use latest version of Sphinx')), @@ -1021,10 +1022,12 @@ def add_features(sender, **kwargs): (PIP_ALWAYS_UPGRADE, _('Always run pip install --upgrade')), (SKIP_SUBMODULES, _('Skip git submodule checkout')), (DONT_OVERWRITE_SPHINX_CONTEXT, _( - 'Do not overwrite context vars in conf.py with Read the Docs context',)), + 'Do not overwrite context vars in conf.py with Read the Docs context')), (ALLOW_V2_CONFIG_FILE, _( 'Allow to use the v2 of the configuration file')), (MKDOCS_THEME_RTD, _('Use Read the Docs theme for MkDocs as default theme')), + (DONT_SHALLOW_CLONE, _( + 'Do not shallow clone when cloning git repos')), ) projects = models.ManyToManyField( diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -131,11 +131,26 @@ def validate_submodules(self, config): return False, [] return True, submodules.keys() + def use_shallow_clone(self): + """ + Test whether shallow clone should be performed. + + .. note:: + + Temporarily, we support skipping this option as builds that rely on + git history can fail if using shallow clones. This should + eventually be configurable via the web UI. + """ + from readthedocs.projects.models import Feature + return not self.project.has_feature(Feature.DONT_SHALLOW_CLONE) + def fetch(self): - code, stdout, stderr = self.run( - 'git', 'fetch', '--depth', str(self.repo_depth), - '--tags', '--prune', '--prune-tags', - ) + cmd = ['git', 'fetch', '--tags', '--prune', '--prune-tags'] + + if self.use_shallow_clone(): + cmd.extend(['--depth', str(self.repo_depth)]) + + code, stdout, stderr = self.run(*cmd) if code != 0: raise RepositoryError return code, stdout, stderr @@ -152,10 +167,14 @@ def checkout_revision(self, revision=None): def clone(self): """Clones the repository.""" - code, stdout, stderr = self.run( - 'git', 'clone', '--depth', str(self.repo_depth), - '--no-single-branch', self.repo_url, '.' - ) + cmd = ['git', 'clone', '--no-single-branch'] + + if self.use_shallow_clone(): + cmd.extend(['--depth', str(self.repo_depth)]) + + cmd.extend([self.repo_url, '.']) + + code, stdout, stderr = self.run(*cmd) if code != 0: raise RepositoryError return code, stdout, stderr
diff --git a/readthedocs/rtd_tests/tests/test_backend.py b/readthedocs/rtd_tests/tests/test_backend.py --- a/readthedocs/rtd_tests/tests/test_backend.py +++ b/readthedocs/rtd_tests/tests/test_backend.py @@ -149,7 +149,7 @@ def test_skip_submodule_checkout(self): repo.update() repo.checkout('submodule') self.assertTrue(repo.are_submodules_available(self.dummy_conf)) - feature = fixture.get( + fixture.get( Feature, projects=[self.project], feature_id=Feature.SKIP_SUBMODULES, @@ -157,6 +157,19 @@ def test_skip_submodule_checkout(self): self.assertTrue(self.project.has_feature(Feature.SKIP_SUBMODULES)) self.assertFalse(repo.are_submodules_available(self.dummy_conf)) + def test_use_shallow_clone(self): + repo = self.project.vcs_repo() + repo.update() + repo.checkout('submodule') + self.assertTrue(repo.use_shallow_clone()) + fixture.get( + Feature, + projects=[self.project], + feature_id=Feature.DONT_SHALLOW_CLONE, + ) + self.assertTrue(self.project.has_feature(Feature.DONT_SHALLOW_CLONE)) + self.assertFalse(repo.use_shallow_clone()) + def test_check_submodule_urls(self): repo = self.project.vcs_repo() repo.update()
Shallow clone breaks build ## Details * Read the Docs project URL: https://readthedocs.org/projects/patchwork/ * Build URL (if applicable): https://readthedocs.org/projects/patchwork/builds/8314815/ * Read the Docs username (if applicable): ## Expected Result The build, which utilizes the `reno.sphinxext` module, should work. ## Actual Result The build fails due to missing tags. These are presumably missing due to the shallow cloning of repos introduced in #4939. ## Additional Information The original issue that #4939 purported to resolve was #1888, **Optional** use of shallow clone for git repos (emphasis mine). While shallow clone might be useful at large, it should be possible to disable it for projects that rely on git history to build their docs.
I'm guessing this is because the `reno` package needs the git history https://docs.openstack.org/reno/. > I'm guessing this is because the `reno` package needs the git history [docs.openstack.org/reno](https://docs.openstack.org/reno/). Indeed :disappointed: @stsewd I think we should add a Feature flag for these cases to omit shallowing. We have to options here, feature flag (this can only be added by the core team), or an option in the web interface (this can be handled by the user). Yeah, I've been looking at this. I see both approaches were adopted for submodules, with bd364946aa providing the feature flag approach and the later bfa6bdb346 providing a config file based approach. I'd been adding a `git.shallow_clone` setting using the latter approach based on the idea that pretty much everyone using reno or similar tools would be affected. I'm not sure how the config file ties into the web interface though and I don't see submodule configuration there. Is this a third option? We can't have this option in the config file because the config file is parsed after the clone step. And we don't have the submodules option on the web because we are moving all per-version settings to the config only, all per-project settings will be available on the web. Oh, of course 🤦 I guess web UI is the way to go so, though I'm not sure where to nestle that in.
2018-12-24T18:12:18
readthedocs/readthedocs.org
5,043
readthedocs__readthedocs.org-5043
[ "5041" ]
a8814b3599537a9fa3efb9559ea2f3b652351833
diff --git a/docs/doc_extensions.py b/docs/doc_extensions.py --- a/docs/doc_extensions.py +++ b/docs/doc_extensions.py @@ -12,6 +12,8 @@ from django.conf import settings +from readthedocs.projects.models import Feature + def django_setting_role(typ, rawtext, text, lineno, inliner, options=None, content=None): @@ -21,9 +23,27 @@ def django_setting_role(typ, rawtext, text, lineno, inliner, options=None, return [node], [] +def feature_flags_role(typ, rawtext, text, lineno, inliner, options=None, + content=None): + """Up to date feature flags from the application.""" + all_features = Feature.FEATURES + requested_feature = utils.unescape(text) + for feature in all_features: + if requested_feature.lower() == feature[0].lower(): + desc = nodes.Text(feature[1], feature[1]) + return [desc], [] + + def setup(_): from docutils.parsers.rst import roles - roles.register_local_role('djangosetting', django_setting_role) + roles.register_local_role( + 'djangosetting', + django_setting_role + ) + roles.register_local_role( + 'featureflags', + feature_flags_role + ) return { 'version': 'builtin',
Document Feature flag user can request us We have several Feature flags that can be applied to projects. Although, we don't have a document that explains that they are there and how to request them to be enabled. List of Feature flags available, https://github.com/rtfd/readthedocs.org/blob/dec39ba70315eca338359f8381af4610a3e56274/readthedocs/projects/models.py#L1018-L1029 We should write some docs around this mentioning them and explaining how they are used and how to request them. Comes from: https://github.com/rtfd/readthedocs.org/pull/5036#issuecomment-450011872
@humitos If I'm not wrong, the user has to request the admin for the enable of any of the Feature flag. So, here's something that I thought. In the docs, we can have these grouped in a separate heading. And in the project admin in readthedocs.org, we can add links to these which will redirect to a new issue in RTD with **prefilled details** (with Project name/slug). And in the docs, we can do the same thing, only the project details required needs to be manually filled in by the user. Edit: Or we can just add in the docs. I think just a specific page under our docs is enough. Maybe another entry in the FAQ with a general question like: "I want a different configuration that's not available under Admin" or well, something better than that ;) Okay. Working on it.
2018-12-28T10:54:10
readthedocs/readthedocs.org
5,056
readthedocs__readthedocs.org-5056
[ "5055" ]
3304193202a388720b6093915f65a19f17f6deeb
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -6,6 +6,8 @@ import os from contextlib import contextmanager +from django.conf import settings + from readthedocs.config.utils import list_to_dict, to_dict from readthedocs.projects.constants import DOCUMENTATION_CHOICES @@ -61,28 +63,16 @@ INVALID_KEYS_COMBINATION = 'invalid-keys-combination' INVALID_KEY = 'invalid-key' -DOCKER_DEFAULT_IMAGE = 'readthedocs/build' -DOCKER_DEFAULT_VERSION = '2.0' +DOCKER_DEFAULT_IMAGE = getattr(settings, 'DOCKER_DEFAULT_IMAGE', 'readthedocs/build') +DOCKER_DEFAULT_VERSION = getattr(settings, 'DOCKER_DEFAULT_VERSION', '2.0') # These map to corresponding settings in the .org, # so they haven't been renamed. -DOCKER_IMAGE = '{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION) -DOCKER_IMAGE_SETTINGS = { - 'readthedocs/build:1.0': { - 'python': {'supported_versions': [2, 2.7, 3, 3.4]}, - }, - 'readthedocs/build:2.0': { - 'python': {'supported_versions': [2, 2.7, 3, 3.5]}, - }, - 'readthedocs/build:3.0': { - 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, - }, - 'readthedocs/build:stable': { - 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, - }, - 'readthedocs/build:latest': { - 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, - }, -} +DOCKER_IMAGE = getattr( + settings, + 'DOCKER_IMAGE', + '{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION) +) +DOCKER_IMAGE_SETTINGS = getattr(settings, 'DOCKER_IMAGE_SETTINGS', {}) class ConfigError(Exception): diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -271,7 +271,26 @@ def USE_PROMOS(self): # noqa # Docker DOCKER_ENABLE = False - DOCKER_IMAGE = 'readthedocs/build:2.0' + DOCKER_DEFAULT_IMAGE = 'readthedocs/build' + DOCKER_DEFAULT_VERSION = '2.0' + DOCKER_IMAGE = '{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION) + DOCKER_IMAGE_SETTINGS = { + 'readthedocs/build:1.0': { + 'python': {'supported_versions': [2, 2.7, 3, 3.4]}, + }, + 'readthedocs/build:2.0': { + 'python': {'supported_versions': [2, 2.7, 3, 3.5]}, + }, + 'readthedocs/build:3.0': { + 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, + }, + 'readthedocs/build:stable': { + 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, + }, + 'readthedocs/build:latest': { + 'python': {'supported_versions': [2, 2.7, 3, 3.3, 3.4, 3.5, 3.6]}, + }, + } # All auth ACCOUNT_ADAPTER = 'readthedocs.core.adapters.AccountAdapter'
diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py --- a/readthedocs/rtd_tests/tests/test_config_integration.py +++ b/readthedocs/rtd_tests/tests/test_config_integration.py @@ -24,6 +24,7 @@ from readthedocs.projects import tasks from readthedocs.projects.models import Feature, Project from readthedocs.rtd_tests.utils import create_git_submodule, make_git_repo +from doc_builder.constants import DOCKER_IMAGE_SETTINGS def create_load(config=None): @@ -84,30 +85,34 @@ def test_python_supported_versions_default_image_1_0(self, load_config): self.project.enable_pdf_build = True self.project.save() config = load_yaml_config(self.version) - self.assertEqual(load_config.call_count, 1) - load_config.assert_has_calls([ - mock.call( + + expected_env_config = { + 'allow_v2': mock.ANY, + 'build': {'image': 'readthedocs/build:1.0'}, + 'defaults': { + 'install_project': self.project.install_project, + 'formats': [ + 'htmlzip', + 'epub', + 'pdf' + ], + 'use_system_packages': self.project.use_system_packages, + 'requirements_file': self.project.requirements_file, + 'python_version': 2, + 'sphinx_configuration': mock.ANY, + 'build_image': 'readthedocs/build:1.0', + 'doctype': self.project.documentation_type, + }, + } + + img_settings = DOCKER_IMAGE_SETTINGS.get(self.project.container_image, None) + if img_settings: + expected_env_config.update(img_settings) + + load_config.assert_called_once_with( path=mock.ANY, - env_config={ - 'allow_v2': mock.ANY, - 'build': {'image': 'readthedocs/build:1.0'}, - 'defaults': { - 'install_project': self.project.install_project, - 'formats': [ - 'htmlzip', - 'epub', - 'pdf', - ], - 'use_system_packages': self.project.use_system_packages, - 'requirements_file': self.project.requirements_file, - 'python_version': 2, - 'sphinx_configuration': mock.ANY, - 'build_image': 'readthedocs/build:1.0', - 'doctype': self.project.documentation_type, - }, - }, - ), - ]) + env_config=expected_env_config, + ) self.assertEqual(config.python.version, 2) @mock.patch('readthedocs.doc_builder.config.load_config')
Use default settings for Config object Our current `Config` object is using hardcoded values to define some DOCKER settings: https://github.com/rtfd/readthedocs.org/blob/8466fa45a2fb1b813ef0272b1154e0c98fe62218/readthedocs/config/config.py#L60-L81 This should follow the pattern that we were using at https://github.com/rtfd/readthedocs.org/blob/8466fa45a2fb1b813ef0272b1154e0c98fe62218/readthedocs/doc_builder/constants.py#L37-L44 This way, we can modify them by setting the values as a setting and also allow to _override them_ from outside (like `local_settings.py`) Related to #2140
The same happens at https://github.com/rtfd/readthedocs.org/blob/8466fa45a2fb1b813ef0272b1154e0c98fe62218/readthedocs/config/config.py#L278-L279 Also, modifying `PYTHON_SUPPORTED_VERSIONS` has no effect because it then uses https://github.com/rtfd/readthedocs.org/blob/8466fa45a2fb1b813ef0272b1154e0c98fe62218/readthedocs/config/config.py#L283-L289 and returns at line 286 where the class variable was not used. I found this flow kind of confusing and it's hard to know where we need to touch to get a new value supported. It would be good if we can centralize this on only one setting/variable.
2019-01-02T13:56:27
readthedocs/readthedocs.org
5,086
readthedocs__readthedocs.org-5086
[ "3161" ]
1f8443cb1cc31c6459b4f25d5926d1b7d1f95f30
diff --git a/readthedocs/core/urls/subdomain.py b/readthedocs/core/urls/subdomain.py --- a/readthedocs/core/urls/subdomain.py +++ b/readthedocs/core/urls/subdomain.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + """URL configurations for subdomains.""" from __future__ import absolute_import @@ -10,7 +12,7 @@ from readthedocs.core.views.serve import ( redirect_page_with_filename, - redirect_project_slug, serve_docs + redirect_project_slug, serve_docs, robots_txt, ) from readthedocs.core.views import ( server_error_500, @@ -22,6 +24,8 @@ handler404 = server_error_404 subdomain_urls = [ + url(r'robots.txt$', robots_txt, name='robots_txt'), + url(r'^(?:|projects/(?P<subproject_slug>{project_slug})/)' r'page/(?P<filename>.*)$'.format(**pattern_opts), redirect_page_with_filename, diff --git a/readthedocs/core/views/serve.py b/readthedocs/core/views/serve.py --- a/readthedocs/core/views/serve.py +++ b/readthedocs/core/views/serve.py @@ -223,3 +223,49 @@ def _serve_symlink_docs(request, project, privacy_level, filename=''): raise Http404( 'File not found. Tried these files: %s' % ','.join(files_tried)) + + +@map_project_slug +def robots_txt(request, project): + """ + Serve custom user's defined ``/robots.txt``. + + If the user added a ``robots.txt`` in the "default version" of the project, + we serve it directly. + """ + # Use the ``robots.txt`` file from the default version configured + version_slug = project.get_default_version() + version = project.versions.get(slug=version_slug) + + no_serve_robots_txt = any([ + # If project is private or, + project.privacy_level == constants.PRIVATE, + # default version is private or, + version.privacy_level == constants.PRIVATE, + # default version is not active or, + not version.active, + # default version is not built + not version.built, + ]) + if no_serve_robots_txt: + # ... we do return a 404 + raise Http404() + + filename = resolve_path( + project, + version_slug=version_slug, + filename='robots.txt', + subdomain=True, # subdomain will make it a "full" path without a URL prefix + ) + + # This breaks path joining, by ignoring the root when given an "absolute" path + if filename[0] == '/': + filename = filename[1:] + + basepath = PublicSymlink(project).project_root + fullpath = os.path.join(basepath, filename) + + if os.path.exists(fullpath): + return HttpResponse(open(fullpath).read(), content_type='text/plain') + + return HttpResponse('User-agent: *\nAllow: /\n', content_type='text/plain')
diff --git a/readthedocs/rtd_tests/tests/test_doc_serving.py b/readthedocs/rtd_tests/tests/test_doc_serving.py --- a/readthedocs/rtd_tests/tests/test_doc_serving.py +++ b/readthedocs/rtd_tests/tests/test_doc_serving.py @@ -2,13 +2,17 @@ from __future__ import absolute_import, unicode_literals, division, print_function import mock +from mock import patch, mock_open import django_dynamic_fixture as fixture +import pytest +import six from django.contrib.auth.models import User from django.test import TestCase from django.test.utils import override_settings from django.http import Http404 from django.conf import settings +from django.urls import reverse from readthedocs.rtd_tests.base import RequestFactoryTestMixin from readthedocs.projects import constants @@ -77,6 +81,28 @@ def test_private_files_not_found(self): self.assertTrue('private_web_root' in str(exc.exception)) self.assertTrue('public_web_root' not in str(exc.exception)) + @override_settings( + PYTHON_MEDIA=False, + USE_SUBDOMAIN=True, + PUBLIC_DOMAIN='readthedocs.io', + ROOT_URLCONF=settings.SUBDOMAIN_URLCONF, + ) + def test_robots_txt(self): + self.public.versions.update(active=True, built=True) + response = self.client.get( + reverse('robots_txt'), + HTTP_HOST='private.readthedocs.io', + ) + self.assertEqual(response.status_code, 404) + + self.client.force_login(self.eric) + response = self.client.get( + reverse('robots_txt'), + HTTP_HOST='private.readthedocs.io', + ) + # Private projects/versions always return 404 for robots.txt + self.assertEqual(response.status_code, 404) + @override_settings(SERVE_DOCS=[constants.PRIVATE, constants.PUBLIC]) class TestPublicDocs(BaseDocServing): @@ -110,3 +136,41 @@ def test_both_files_not_found(self): _serve_symlink_docs(request, project=self.private, filename='/en/latest/usage.html', privacy_level='public') self.assertTrue('private_web_root' not in str(exc.exception)) self.assertTrue('public_web_root' in str(exc.exception)) + + @override_settings( + PYTHON_MEDIA=False, + USE_SUBDOMAIN=True, + PUBLIC_DOMAIN='readthedocs.io', + ROOT_URLCONF=settings.SUBDOMAIN_URLCONF, + ) + def test_default_robots_txt(self): + self.public.versions.update(active=True, built=True) + response = self.client.get( + reverse('robots_txt'), + HTTP_HOST='public.readthedocs.io', + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b'User-agent: *\nAllow: /\n') + + @override_settings( + PYTHON_MEDIA=False, + USE_SUBDOMAIN=True, + PUBLIC_DOMAIN='readthedocs.io', + ROOT_URLCONF=settings.SUBDOMAIN_URLCONF, + ) + @patch( + 'builtins.open', + new_callable=mock_open, + read_data='My own robots.txt', + ) + @patch('readthedocs.core.views.serve.os') + @pytest.mark.skipif(six.PY2, reason='In Python2 the mock is __builtins__.open') + def test_custom_robots_txt(self, os_mock, open_mock): + os_mock.path.exists.return_value = True + self.public.versions.update(active=True, built=True) + response = self.client.get( + reverse('robots_txt'), + HTTP_HOST='public.readthedocs.io', + ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, b'My own robots.txt')
Custom robots.txt support? We've talked about blowing away the protected designation, so not sure if it makes sense to put special case on the protected privacy level, but maybe a separate option for docs that shouldn't be crawled?
@agjohnson any momentum on this particular item? What is the current recommendation to NOINDEX/NOFOLLOW a site? At very least, we could kill our global robots.txt redirect in nginx and allow projects to contribute their own robots.txt via a static page in Sphinx @agjohnson what's the status of this issue? I'm not sure to clearly understand what's the action needed here. 1. if it's around Protected Privacy level, I think we can close it as won't fix since we are removing the privacy levels from Community site. 1. if it's about giving our users a way to upload by themselves a `robots.txt` I think the solution that I proposed at https://github.com/rtfd/readthedocs.org/issues/2430#issuecomment-418471125 should work (there is an example of a repository in that conversation also) and we can close this issue. If none of those are what you have in mind, please elaborate a little more what you are considering here. @humitos the solution provided in [#2430 (comment)](https://github.com/rtfd/readthedocs.org/issues/2430#issuecomment-418471125) is not optimal: > 1. Your site can have only one robots.txt file. > 2. The robots.txt file must be located at the root of the website host that it applies to. For instance, to control crawling on all URLs below http://www.example.com/, the robots.txt file must be located at http://www.example.com/robots.txt. It cannot be placed in a subdirectory ( for example, at http://example.com/pages/robots.txt). If you're unsure about how to access your website root, or need permissions to do so, contact your web hosting service provider. If you can't access your website root, use an alternative blocking method such as meta tags. > > [Google support](https://support.google.com/webmasters/answer/6062596?hl=en) I think the only viable option is using the "meta tags" method [[1]](http://www.robotstxt.org/meta.html)[[2]](https://support.google.com/webmasters/answer/93710). I am working on a workaround for [Astropy's docs](https://astropy.readthedocs.io/en/stable/) (refer to issue [#7794](https://github.com/astropy/astropy/issues/7794) and pull request [#7874](https://github.com/astropy/astropy/pull/787)). I'll be done by the end of the day and will let you know. If it's a good workaround. I'd be happy to document the process. @dasdachs I see. You are right. > I'll be done by the end of the day and will let you know. If it's a good workaround. I'd be happy to document the process. If the workaround by using meta tags is a good one, maybe it's a good solution to be implemented by a sphinx extension. It's still a hack, but at least "an automatic one" :grimacing: After reading the docs you linked, I don't see a solution coming from Sphinx or without a hack, so I think we should implement this from Read the Docs itself by adding a `robotstxt_file:` option in our YAML (or similar) and copy it at the root of the subdomain. Not sure if that's possible, though. > I think we should implement this from Read the Docs itself by adding a `robotstxt_file:` option in our YAML This is not trivial. With that file, we will need to do: 1. append our [own set of rules](https://github.com/rtfd/readthedocs.org/blob/b07c3a383cc89eb0c92f6070fe888c988da48089/media/robots.txt) to the custom `robots.txt` 1. sync the result to all our web servers * since this file will be _outside_ Sphinx output, we need adapt that code 2. modify the nginx rule to try serving first the custom `robots.txt` from the project/version and as a fallback serve ours This raise another problem: we have one subdomain with multiples versions but only _one_ root place to serve the `robots.txt` file. Which one should we serve? Being a "global setting" makes me doubt if it isn't better to add a text box in the admin where the user can paste the contents of that file or think something easier like that. > I think we should implement this from Read the Docs itself by adding a robotstxt_file: option in our YAML I doubt this will be on the yaml, as this is a per-project configuration rather than per-version The hack I found could be quite simple ([this](https://github.com/astropy/astropy/pull/7874#issuecomment-429138942)): add meta tags to files you don't want indexed. But because of the global `robots.txt`, it would have no affect (refering to this answer from [Google](https://support.google.com/webmasters/answer/93710)). Some solution using YAML or a text box seems like the way to go. Unfortunately, the idea of adding meta tags isn't really an ideal solution, because we can't add it to all the old versions we host. In the case of astropy for example, we host a lot of old versions based on GitHub tags, e.g.: http://docs.astropy.org/en/v1.0/ We can't change all the tags in our GitHub repo for all the old versions, so any solution that involves changes to the repository are a no-go. The only real solution would be to be able to customize robots.txt from the RTD settings interface.
2019-01-10T11:41:08
readthedocs/readthedocs.org
5,109
readthedocs__readthedocs.org-5109
[ "5106" ]
33ed2735dc6e5e6e3b9e552bbbb72df600648a4a
diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -32,6 +32,7 @@ from readthedocs.builds.models import BuildCommandResultMixin from readthedocs.core.utils import slugify from readthedocs.projects.constants import LOG_TEMPLATE +from readthedocs.projects.models import Feature from readthedocs.restapi.client import api as api_v2 from .constants import ( @@ -726,10 +727,18 @@ def __init__(self, *args, **kwargs): project_name=self.project.slug, )[:DOCKER_HOSTNAME_MAX_LEN], ) + + # Decide what Docker image to use, based on priorities: + # Use the Docker image set by our feature flag: ``testing`` or, + if self.project.has_feature(Feature.USE_TESTING_BUILD_IMAGE): + self.container_image = 'readthedocs/build:testing' + # the image set by user or, if self.config and self.config.build.image: self.container_image = self.config.build.image + # the image overridden by the project (manually set by an admin). if self.project.container_image: self.container_image = self.project.container_image + if self.project.container_mem_limit: self.container_mem_limit = self.project.container_mem_limit if self.project.container_time_limit: diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1009,6 +1009,7 @@ def add_features(sender, **kwargs): ALLOW_V2_CONFIG_FILE = 'allow_v2_config_file' MKDOCS_THEME_RTD = 'mkdocs_theme_rtd' DONT_SHALLOW_CLONE = 'dont_shallow_clone' + USE_TESTING_BUILD_IMAGE = 'use_testing_build_image' FEATURES = ( (USE_SPHINX_LATEST, _('Use latest version of Sphinx')), @@ -1023,6 +1024,8 @@ def add_features(sender, **kwargs): (MKDOCS_THEME_RTD, _('Use Read the Docs theme for MkDocs as default theme')), (DONT_SHALLOW_CLONE, _( 'Do not shallow clone when cloning git repos')), + (USE_TESTING_BUILD_IMAGE, _( + 'Use Docker image labelled as `testing` to build the docs')), ) projects = models.ManyToManyField(
Sample some projects for Docker image 5.0rc1 We just released `4.0` and `5.0rc1`. So, * `2.0` got deprecated * `3.0` became `stable` * `4.0` became `latest` Now, we want to start testing `5.0rc1` with some projects. This image have [some changes](https://github.com/rtfd/readthedocs-docker-images/compare/4.0.0..master). We want to create a Feature flag that "no matter has the user selected for the docker image we want to use `5.0rc` for `container_image`" when building the docs. Once we have this feature added, we can start with 100, 500, 1500 and 5000 projects for a month and half and after that, release the new image.
Current implementation of the config module allow us to override user's image frmo the db field `container_image` Yes, but we don't want to do that avoid future miss-understandings. We just released these images and we didn't know why there were projects with `4.0rc1`, it it was because we wanted to test the image or because they needed something specific from that image. Now, we want to avoid that confusion by creating a Feature flag. This flag, will be useful to test all the `rc` releases, not just this one in particular.
2019-01-15T09:29:00
readthedocs/readthedocs.org
5,118
readthedocs__readthedocs.org-5118
[ "4978" ]
39a0d5fb35f30691d14fa689d4c40891ca0c95b5
diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -52,6 +52,7 @@ BuildEnvironmentError, BuildEnvironmentWarning, BuildTimeoutError, + MkDocsYAMLParseError, ProjectBuildsSkippedError, VersionLockedError, YAMLParseError, @@ -64,7 +65,7 @@ from readthedocs.worker import app from .constants import LOG_TEMPLATE -from .exceptions import RepositoryError +from .exceptions import ProjectConfigurationError, RepositoryError from .models import Domain, HTMLFile, ImportedFile, Project from .signals import ( after_build, @@ -264,6 +265,10 @@ def run(self, version_pk): # pylint: disable=arguments-differ return False +# Exceptions under ``throws`` argument are considered ERROR from a Build +# perspective (the build failed and can continue) but as a WARNING for the +# application itself (RTD code didn't failed). These exception are logged as +# ``INFO`` and they are not sent to Sentry. @app.task( bind=True, max_retries=5, @@ -273,7 +278,11 @@ def run(self, version_pk): # pylint: disable=arguments-differ ProjectBuildsSkippedError, YAMLParseError, BuildTimeoutError, + BuildEnvironmentWarning, + RepositoryError, + ProjectConfigurationError, ProjectBuildsSkippedError, + MkDocsYAMLParseError, ), ) def update_docs_task(self, project_id, *args, **kwargs): @@ -604,8 +613,6 @@ def setup_vcs(self): Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. - - :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) @@ -619,14 +626,17 @@ def setup_vcs(self): try: self.sync_repo() except RepositoryError: - # Do not log as ERROR handled exceptions log.warning('There was an error with the repository', exc_info=True) + # Re raise the exception to stop the build at this point + raise except vcs_support_utils.LockTimeout: log.info( 'Lock still active: project=%s version=%s', self.project.slug, self.version.slug, ) + # Raise the proper exception (won't be sent to Sentry) + raise VersionLockedError except Exception: # Catch unhandled errors when syncing log.exception( @@ -640,6 +650,8 @@ def setup_vcs(self): }, }, ) + # Re raise the exception to stop the build at this point + raise commit = self.project.vcs_repo(self.version.slug).commit if commit: @@ -1044,7 +1056,7 @@ def symlink_project(project_pk): sym.run() [email protected](queue='web', throws=(BuildEnvironmentWarning,)) [email protected](queue='web') def symlink_domain(project_pk, domain, delete=False): """ Symlink domain. @@ -1093,7 +1105,7 @@ def broadcast_remove_orphan_symlinks(): broadcast(type='web', task=remove_orphan_symlinks, args=[]) [email protected](queue='web', throws=(BuildEnvironmentWarning,)) [email protected](queue='web') def symlink_subproject(project_pk): project = Project.objects.get(pk=project_pk) for symlink in [PublicSymlink, PrivateSymlink]:
BuildEnvironmentWarning logged as error After #4804 got merged `BuildEnvironmentWarning` start being logged again as an error. This is not a big problem but it's annoying when debugging since it uses our Sentry quota. * https://sentry.io/read-the-docs/readthedocscom/issues/793207804/ * https://sentry.io/read-the-docs/readthedocs-org/issues/783202503/ Relating to #4590 since that could help us to avoid logging all of these.
@humitos I would like to take this issue up. Any idea on how to start? Edit: I'm working on it according to this [comment](https://github.com/rtfd/readthedocs.org/issues/4590#issuecomment-434682068).
2019-01-16T09:50:28
readthedocs/readthedocs.org
5,140
readthedocs__readthedocs.org-5140
[ "3684" ]
ed8dd29a68d009fc08929fabf7155883482619c1
diff --git a/readthedocs/builds/admin.py b/readthedocs/builds/admin.py --- a/readthedocs/builds/admin.py +++ b/readthedocs/builds/admin.py @@ -7,6 +7,7 @@ from readthedocs.builds.models import Build, BuildCommandResult, Version from readthedocs.core.utils import trigger_build +from readthedocs.core.utils.general import wipe_version_via_slugs class BuildCommandResultInline(admin.TabularInline): @@ -57,7 +58,22 @@ class VersionAdmin(GuardedModelAdmin): list_filter = ('type', 'privacy_level', 'active', 'built') search_fields = ('slug', 'project__slug') raw_id_fields = ('project',) - actions = ['build_version'] + actions = ['wipe_selected_versions', 'build_version'] + + def wipe_selected_versions(self, request, queryset): + """Wipes the selected versions.""" + for version in queryset: + wipe_version_via_slugs( + version_slug=version.slug, + project_slug=version.project.slug + ) + self.message_user( + request, + 'Wiped {}.'.format(version.slug), + level=messages.SUCCESS + ) + + wipe_selected_versions.short_description = 'Wipe selected versions' def build_version(self, request, queryset): """Trigger a build for the project version.""" diff --git a/readthedocs/core/utils/general.py b/readthedocs/core/utils/general.py new file mode 100644 --- /dev/null +++ b/readthedocs/core/utils/general.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +import os + +from django.shortcuts import get_object_or_404 + +from readthedocs.core.utils import broadcast +from readthedocs.projects.tasks import remove_dirs +from readthedocs.builds.models import Version + + +def wipe_version_via_slugs(version_slug, project_slug): + """Wipes the given version of a given project.""" + version = get_object_or_404( + Version, + slug=version_slug, + project__slug=project_slug, + ) + del_dirs = [ + os.path.join(version.project.doc_path, 'checkouts', version.slug), + os.path.join(version.project.doc_path, 'envs', version.slug), + os.path.join(version.project.doc_path, 'conda', version.slug), + ] + for del_dir in del_dirs: + broadcast(type='build', task=remove_dirs, args=[(del_dir,)]) diff --git a/readthedocs/core/views/__init__.py b/readthedocs/core/views/__init__.py --- a/readthedocs/core/views/__init__.py +++ b/readthedocs/core/views/__init__.py @@ -6,8 +6,6 @@ documentation and header rendering, and server errors. """ -from __future__ import absolute_import -from __future__ import division import os import logging from urllib.parse import urlparse @@ -19,6 +17,7 @@ from readthedocs.builds.models import Version +from readthedocs.core.utils.general import wipe_version_via_slugs from readthedocs.core.resolver import resolve_path from readthedocs.core.symlink import PrivateSymlink, PublicSymlink from readthedocs.core.utils import broadcast @@ -89,13 +88,10 @@ def wipe_version(request, project_slug, version_slug): raise Http404('You must own this project to wipe it.') if request.method == 'POST': - del_dirs = [ - os.path.join(version.project.doc_path, 'checkouts', version.slug), - os.path.join(version.project.doc_path, 'envs', version.slug), - os.path.join(version.project.doc_path, 'conda', version.slug), - ] - for del_dir in del_dirs: - broadcast(type='build', task=remove_dirs, args=[(del_dir,)]) + wipe_version_via_slugs( + version_slug=version_slug, + project_slug=project_slug + ) return redirect('project_version_list', project_slug) return render( request,
diff --git a/readthedocs/rtd_tests/tests/test_core_utils.py b/readthedocs/rtd_tests/tests/test_core_utils.py --- a/readthedocs/rtd_tests/tests/test_core_utils.py +++ b/readthedocs/rtd_tests/tests/test_core_utils.py @@ -1,11 +1,17 @@ # -*- coding: utf-8 -*- """Test core util functions.""" +import os import mock + +from mock import call +from django.http import Http404 from django.test import TestCase from django_dynamic_fixture import get from readthedocs.builds.models import Version +from readthedocs.core.utils.general import wipe_version_via_slugs +from readthedocs.projects.tasks import remove_dirs from readthedocs.core.utils import slugify, trigger_build from readthedocs.projects.models import Project @@ -153,3 +159,58 @@ def test_slugify(self): slugify('A title_-_with separated parts', dns_safe=False), 'a-title_-_with-separated-parts', ) + + @mock.patch('readthedocs.core.utils.general.broadcast') + def test_wipe_version_via_slug(self, mock_broadcast): + wipe_version_via_slugs( + version_slug=self.version.slug, + project_slug=self.version.project.slug + ) + expected_del_dirs = [ + os.path.join(self.version.project.doc_path, 'checkouts', self.version.slug), + os.path.join(self.version.project.doc_path, 'envs', self.version.slug), + os.path.join(self.version.project.doc_path, 'conda', self.version.slug), + ] + + mock_broadcast.assert_has_calls( + [ + call(type='build', task=remove_dirs, args=[(expected_del_dirs[0],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[1],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[2],)]), + ], + any_order=False + ) + + @mock.patch('readthedocs.core.utils.general.broadcast') + def test_wipe_version_via_slug_wrong_param(self, mock_broadcast): + self.assertFalse(Version.objects.filter(slug='wrong-slug').exists()) + with self.assertRaises(Http404): + wipe_version_via_slugs( + version_slug='wrong-slug', + project_slug=self.version.project.slug + ) + mock_broadcast.assert_not_called() + + @mock.patch('readthedocs.core.utils.general.broadcast') + def test_wipe_version_via_slugs_same_version_slug_with_diff_proj(self, mock_broadcast): + project_2 = get(Project) + version_2 = get(Version, project=project_2, slug=self.version.slug) + wipe_version_via_slugs( + version_slug=version_2.slug, + project_slug=project_2.slug, + ) + + expected_del_dirs = [ + os.path.join(version_2.project.doc_path, 'checkouts', version_2.slug), + os.path.join(version_2.project.doc_path, 'envs', version_2.slug), + os.path.join(version_2.project.doc_path, 'conda', version_2.slug), + ] + + mock_broadcast.assert_has_calls( + [ + call(type='build', task=remove_dirs, args=[(expected_del_dirs[0],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[1],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[2],)]), + ], + any_order=False + ) diff --git a/readthedocs/rtd_tests/tests/versions/__init__.py b/readthedocs/rtd_tests/tests/versions/__init__.py new file mode 100644 diff --git a/readthedocs/rtd_tests/tests/versions/test_admin_actions.py b/readthedocs/rtd_tests/tests/versions/test_admin_actions.py new file mode 100644 --- /dev/null +++ b/readthedocs/rtd_tests/tests/versions/test_admin_actions.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +import os +import mock + +from mock import call +import django_dynamic_fixture as fixture +from django.test import TestCase +from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME +from django.contrib.auth.models import User +from django import urls + +from readthedocs.builds.models import Version +from readthedocs.core.models import UserProfile +from readthedocs.projects.models import Project +from readthedocs.projects.tasks import remove_dirs + + +class VersionAdminActionsTest(TestCase): + + @classmethod + def setUpTestData(cls): + cls.owner = fixture.get(User) + cls.profile = fixture.get(UserProfile, user=cls.owner, banned=False) + cls.admin = fixture.get(User, is_staff=True, is_superuser=True) + cls.project = fixture.get( + Project, + main_language_project=None, + users=[cls.owner], + ) + cls.version = fixture.get(Version, project=cls.project) + + def setUp(self): + self.client.force_login(self.admin) + + @mock.patch('readthedocs.core.utils.general.broadcast') + def test_wipe_selected_version(self, mock_broadcast): + action_data = { + ACTION_CHECKBOX_NAME: [self.version.pk], + 'action': 'wipe_selected_versions', + 'post': 'yes', + } + resp = self.client.post( + urls.reverse('admin:builds_version_changelist'), + action_data + ) + expected_del_dirs = [ + os.path.join(self.version.project.doc_path, 'checkouts', self.version.slug), + os.path.join(self.version.project.doc_path, 'envs', self.version.slug), + os.path.join(self.version.project.doc_path, 'conda', self.version.slug), + ] + + mock_broadcast.assert_has_calls( + [ + call(type='build', task=remove_dirs, args=[(expected_del_dirs[0],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[1],)]), + call(type='build', task=remove_dirs, args=[(expected_del_dirs[2],)]), + ], + any_order=False + )
Add admin functions for wiping a version from web instances This is exposed to project owners, but an admin function here would be really helpful for debugging purposes. The existing view for wiping versions will likely need to be broken out to a reusable view for the admin function.
@agjohnson, I would like to work on this issue. Could use please elaborate it little more. Hi @agjohnson, I'm new here! This enhancement is not completely clear to me, so I wanted to ask a few things: 1. Are you talking about a ModelAdmin action here by `admin function` or something different from out-of-the-box Django admin actions? 2. If yes, is it about adding the action to `readthedocs.builds.admin.BuildAdmin` or to `readthedocs.projects.admin.ProjectAdmin` (more likely I believe)? 3. If no, then I think it has to be done with `readthedocs.projects.views.private.project_versions` view! I think point 3 is what you're talking about. Please let me know if this is the case or maybe I'm missing something here. @ajatprabha we are looking for a django site admin feature, on VersionAdmin. This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.
2019-01-21T18:37:19
readthedocs/readthedocs.org
5,143
readthedocs__readthedocs.org-5143
[ "1991" ]
86043cc92cdbad9a492664e54e11664e101a8ff7
diff --git a/readthedocs/core/middleware.py b/readthedocs/core/middleware.py --- a/readthedocs/core/middleware.py +++ b/readthedocs/core/middleware.py @@ -1,19 +1,14 @@ -# -*- coding: utf-8 -*- - -"""Middleware for core app.""" - import logging from django.conf import settings from django.contrib.sessions.middleware import SessionMiddleware -from django.core.cache import cache from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist from django.http import Http404, HttpResponseBadRequest from django.urls.base import set_urlconf from django.utils.deprecation import MiddlewareMixin from django.utils.translation import ugettext_lazy as _ +from django.shortcuts import render -from readthedocs.core.utils import cname_to_slug from readthedocs.projects.models import Domain, Project @@ -112,46 +107,21 @@ def process_request(self, request): ) # Try header first, then DNS elif not hasattr(request, 'domain_object'): - try: - slug = cache.get(host) - if not slug: - slug = cname_to_slug(host) - cache.set(host, slug, 60 * 60) - # Cache the slug -> host mapping permanently. - log.info( - LOG_TEMPLATE.format( - msg='CNAME cached: {}->{}'.format(slug, host), - **log_kwargs - ), - ) - request.slug = slug - request.urlconf = SUBDOMAIN_URLCONF - log.warning( - LOG_TEMPLATE.format( - msg='CNAME detected: %s' % request.slug, - **log_kwargs - ), - ) - except: # noqa - # Some crazy person is CNAMEing to us. 404. - log.warning( - LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs), - ) - raise Http404(_('Invalid hostname')) + # Some person is CNAMEing to us without configuring a domain - 404. + log.warning(LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs)) + return render(request, 'core/dns-404.html', context={'host': host}, status=404) # Google was finding crazy www.blah.readthedocs.org domains. # Block these explicitly after trying CNAME logic. if len(domain_parts) > 3 and not settings.DEBUG: # Stop www.fooo.readthedocs.org if domain_parts[0] == 'www': - log.debug( - LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs), - ) + log.debug(LOG_TEMPLATE.format( + msg='404ing long domain', **log_kwargs + )) return HttpResponseBadRequest(_('Invalid hostname')) - log.debug( - LOG_TEMPLATE - .format(msg='Allowing long domain name', **log_kwargs), - ) - # raise Http404(_('Invalid hostname')) + log.debug(LOG_TEMPLATE.format( + msg='Allowing long domain name', **log_kwargs + )) # Normal request. return None diff --git a/readthedocs/core/utils/__init__.py b/readthedocs/core/utils/__init__.py --- a/readthedocs/core/utils/__init__.py +++ b/readthedocs/core/utils/__init__.py @@ -59,15 +59,6 @@ def broadcast(type, task, args, kwargs=None, callback=None): # pylint: disable= return task_promise -def cname_to_slug(host): - # TODO: remove - from dns import resolver - answer = [ans for ans in resolver.query(host, 'CNAME')][0] - domain = answer.target.to_unicode() - slug = domain.split('.')[0] - return slug - - def prepare_build( project, version=None,
diff --git a/readthedocs/rtd_tests/tests/test_middleware.py b/readthedocs/rtd_tests/tests/test_middleware.py --- a/readthedocs/rtd_tests/tests/test_middleware.py +++ b/readthedocs/rtd_tests/tests/test_middleware.py @@ -1,15 +1,11 @@ -# -*- coding: utf-8 -*- - from corsheaders.middleware import CorsMiddleware from django.conf import settings -from django.core.cache import cache from django.http import Http404 from django.test import TestCase from django.test.client import RequestFactory from django.test.utils import override_settings from django.urls.base import get_urlconf, set_urlconf from django_dynamic_fixture import get -from mock import patch from readthedocs.core.middleware import SubdomainMiddleware from readthedocs.projects.models import Domain, Project, ProjectRelationship @@ -26,12 +22,18 @@ def setUp(self): self.middleware = SubdomainMiddleware() self.url = '/' self.owner = create_user(username='owner', password='test') - self.pip = get(Project, slug='pip', users=[self.owner], privacy_level='public') + self.pip = get( + Project, + slug='pip', + users=[self.owner], + privacy_level='public' + ) def test_failey_cname(self): + self.assertFalse(Domain.objects.filter(domain='my.host.com').exists()) request = self.factory.get(self.url, HTTP_HOST='my.host.com') - with self.assertRaises(Http404): - self.middleware.process_request(request) + r = self.middleware.process_request(request) + self.assertEqual(r.status_code, 404) self.assertEqual(request.cname, True) @override_settings(PRODUCTION_DOMAIN='readthedocs.org') @@ -74,7 +76,9 @@ def test_restore_urlconf_after_request(self): @override_settings(PRODUCTION_DOMAIN='prod.readthedocs.org') def test_subdomain_different_length(self): - request = self.factory.get(self.url, HTTP_HOST='pip.prod.readthedocs.org') + request = self.factory.get( + self.url, HTTP_HOST='pip.prod.readthedocs.org' + ) self.middleware.process_request(request) self.assertEqual(request.urlconf, self.urlconf_subdomain) self.assertEqual(request.subdomain, True) @@ -92,19 +96,13 @@ def test_domain_object(self): def test_domain_object_missing(self): self.domain = get(Domain, domain='docs.foobar2.com', project=self.pip) request = self.factory.get(self.url, HTTP_HOST='docs.foobar.com') - with self.assertRaises(Http404): - self.middleware.process_request(request) - - def test_proper_cname(self): - cache.get = lambda x: 'my_slug' - request = self.factory.get(self.url, HTTP_HOST='my.valid.homename') - self.middleware.process_request(request) - self.assertEqual(request.urlconf, self.urlconf_subdomain) - self.assertEqual(request.cname, True) - self.assertEqual(request.slug, 'my_slug') + r = self.middleware.process_request(request) + self.assertEqual(r.status_code, 404) def test_request_header(self): - request = self.factory.get(self.url, HTTP_HOST='some.random.com', HTTP_X_RTD_SLUG='pip') + request = self.factory.get( + self.url, HTTP_HOST='some.random.com', HTTP_X_RTD_SLUG='pip' + ) self.middleware.process_request(request) self.assertEqual(request.urlconf, self.urlconf_subdomain) self.assertEqual(request.cname, True) @@ -113,7 +111,7 @@ def test_request_header(self): @override_settings(PRODUCTION_DOMAIN='readthedocs.org') def test_proper_cname_uppercase(self): - cache.get = lambda x: x.split('.')[0] + get(Domain, project=self.pip, domain='pip.random.com') request = self.factory.get(self.url, HTTP_HOST='PIP.RANDOM.COM') self.middleware.process_request(request) self.assertEqual(request.urlconf, self.urlconf_subdomain) @@ -121,20 +119,35 @@ def test_proper_cname_uppercase(self): self.assertEqual(request.slug, 'pip') def test_request_header_uppercase(self): - request = self.factory.get(self.url, HTTP_HOST='some.random.com', HTTP_X_RTD_SLUG='PIP') + request = self.factory.get( + self.url, HTTP_HOST='some.random.com', HTTP_X_RTD_SLUG='PIP' + ) self.middleware.process_request(request) self.assertEqual(request.urlconf, self.urlconf_subdomain) self.assertEqual(request.cname, True) self.assertEqual(request.rtdheader, True) self.assertEqual(request.slug, 'pip') - @override_settings(USE_SUBDOMAIN=True) - # no need to do a real dns query so patch cname_to_slug - @patch('readthedocs.core.middleware.cname_to_slug', new=lambda x: 'doesnt') - def test_use_subdomain_on(self): - request = self.factory.get(self.url, HTTP_HOST='doesnt.really.matter') - ret_val = self.middleware.process_request(request) - self.assertIsNone(ret_val, None) + def test_use_subdomain(self): + domain = 'doesnt.exists.org' + get(Domain, project=self.pip, domain=domain) + request = self.factory.get(self.url, HTTP_HOST=domain) + res = self.middleware.process_request(request) + self.assertIsNone(res) + self.assertEqual(request.slug, 'pip') + self.assertTrue(request.domain_object) + + def test_long_bad_subdomain(self): + domain = 'www.pip.readthedocs.org' + request = self.factory.get(self.url, HTTP_HOST=domain) + res = self.middleware.process_request(request) + self.assertEqual(res.status_code, 400) + + def test_long_subdomain(self): + domain = 'some.long.readthedocs.org' + request = self.factory.get(self.url, HTTP_HOST=domain) + res = self.middleware.process_request(request) + self.assertIsNone(res) class TestCORSMiddleware(TestCase):
Domain resolution too clever One of the tactics that we use for domain magic can result in some unexpected problems. If the project does not have a domain associated with it yet, our middleware will attempt a DNS lookup. This block will attempt to get a slug from a CNAME: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/core/middleware.py#L67-L76 We aren't doing anything to check that the domain is our though. In this case, the domain `virtualenv.pypa.io` resolves as such: ``` % dig +short virtualenv.pypa.io c.global-ssl.fastly.net. 199.27.79.175 ``` The above block will split the CNAME returned, assume the slug is the `c`, and apply the domain to this project if it exists. Instead, we should probably ensure the CNAME domain is something we control, and this is indeed a slug, not a hostname.
This is also happening in API v2: http://readthedocs.org/api/v2/cname/?host=packaging.python.org ``` json {"host":"packaging.python.org","slug":"python"} ``` `dig` output for packaging.python.org: ``` console $ dig +short packaging.python.org python.map.fastly.net. prod.python.map.fastlylb.net. 151.101.60.223 ``` I've opened https://github.com/rtfd/readthedocs.org/pull/2317 to use the same helper both in middleware and API. So, I was reading this, I don't fully understand the issue. In what case this bug is triggered? Why some of this domains aren't resolving to *.readthedocs.io (but they are hosted by rtd). As a solution, we should check that the domain matches `PUBLIC_DOMAIN`, is that correct? But I still have the same doubts as above p: Also, do we really need the `/cname` endpoint? So, to trigger this, we have middleware that tries to guess the project based on DNS. I'm not convinced this should be a feature of RTD. The lookup logic is supposed to be: * See incoming request for domain that isn't our PUBLIC_DOMAIN and isn't already a Domain * Do a DNS lookup on the domain * Assume the domain is pointed to `<project_slug>.readthedocs.io` and parse it accordingly * Bring up the project for `<project_slug>` When it fails is when the domain we're seeing a request for either isn't actually pointing to us, or when there's some other record, like the DNS request above: ``` % dig packaging.python.org +short dualstack.python.map.fastly.net. 151.101.52.223 ``` Our code assumes the project slug is `dualstack`, ie: http://readthedocs.org/api/v2/cname/?host=packaging.python.org So! * We need to determine what might be removed if we just gutted this feature * We need to know if there domains that would be affected by removing this feature? We might be logging this currently. Historically I think we made Domain objects that were `machine=True` when we relied on this feature * I'm not sure what else is using the /cname endpoint, that also needs some discovery Based on what you find, we are going to be weighing: * Removing the feature altogether and forcing users to establish domains with us (with cloudflare, I believe this might be an absolute requirement) -- @davidfischer can confirm * Or do we repair the logic and fix parsing. We are currently asking users to point to `readthedocs.io` I believe in our docs, so this middleware will just be working less and less. Looks like things could be stacking up to just remove the feature :fire: So, this happens if users cname us without creating a domain in the admin panel, right? > Historically I think we made Domain objects that were machine=True when we relied on this feature There are ~1k of these. > Removing the feature altogether and forcing users to establish domains with us (with cloudflare, I believe this might be an absolute requirement) -- @davidfischer can confirm It is required to generate a certificate but `*.readthedocs.io` goes right to our load balancer and not to Cloudflare. Edit: at least until #4521. With that said, I'm +1 on removing this feature. If people want to use RTD with a custom domain, I don't think requiring them to put that domain in the dashboard is burdensome. > So, this happens if users cname us without creating a domain in the admin panel, right? Correct > I don't think requiring them to put that domain in the dashboard is burdensome I'd say it is probably expected even +1 to removing our old magic logic. We just need to make sure it won't break a bunch of users when we do it. I believe we backported domains that were doing this into `Domain` objects when we first released the explicit `Domain`'s, but we might need to do something similar when we remove the auto-slugify based on DNS, to make sure old domains don't break.
2019-01-22T01:09:26
readthedocs/readthedocs.org
5,157
readthedocs__readthedocs.org-5157
[ "5151" ]
aace7321d253d2b8c24793cb48aebb44341c5962
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -22,6 +22,7 @@ from readthedocs.oauth.models import RemoteRepository from readthedocs.projects import constants from readthedocs.projects.exceptions import ProjectSpamError +from readthedocs.projects.templatetags.projects_tags import sort_version_aware from readthedocs.projects.models import ( Domain, EmailHook, @@ -235,9 +236,7 @@ def __init__(self, *args, **kwargs): choices=[default_choice] + list(all_versions), ) - active_versions = self.instance.all_active_versions().values_list( - 'slug', 'verbose_name' - ) # yapf: disabled + active_versions = self.get_all_active_versions() self.fields['default_version'].widget = forms.Select( choices=active_versions, ) @@ -253,6 +252,21 @@ def clean_conf_py_file(self): ) # yapf: disable return filename + def get_all_active_versions(self): + """ + Returns all active versions. + + Returns a smartly sorted list of tuples. + First item of each tuple is the version's slug, + and the second item is version's verbose_name. + """ + version_qs = self.instance.all_active_versions() + if version_qs.exists(): + version_qs = sort_version_aware(version_qs) + all_versions = [(version.slug, version.verbose_name) for version in version_qs] + return all_versions + return [()] + class UpdateProjectForm( ProjectTriggerBuildMixin, @@ -429,12 +443,14 @@ def build_versions_form(project): versions_qs = project.versions.all() # Admin page, so show all versions active = versions_qs.filter(active=True) if active.exists(): + active = sort_version_aware(active) choices = [(version.slug, version.verbose_name) for version in active] attrs['default-version'] = forms.ChoiceField( label=_('Default Version'), choices=choices, initial=project.get_default_version(), ) + versions_qs = sort_version_aware(versions_qs) for version in versions_qs: field_name = 'version-{}'.format(version.slug) privacy_name = 'privacy-{}'.format(version.slug) diff --git a/readthedocs/projects/views/public.py b/readthedocs/projects/views/public.py --- a/readthedocs/projects/views/public.py +++ b/readthedocs/projects/views/public.py @@ -26,6 +26,7 @@ from readthedocs.builds.views import BuildTriggerMixin from readthedocs.projects.models import Project from readthedocs.search.indexes import PageIndex +from readthedocs.projects.templatetags.projects_tags import sort_version_aware from readthedocs.search.views import LOG_TEMPLATE from .base import ProjectOnboardMixin @@ -171,6 +172,7 @@ def project_downloads(request, project_slug): slug=project_slug, ) versions = Version.objects.public(user=request.user, project=project) + versions = sort_version_aware(versions) version_data = OrderedDict() for version in versions: data = version.get_downloads()
Sort versions smartly everywhere We currently have code that is smart about version sorting in Python: https://github.com/rtfd/readthedocs.org/blob/master/readthedocs/projects/templatetags/projects_tags.py#L13 We should make sure that we're using this everywhere that we display versions to a user. Refs https://github.com/rtfd/readthedocs.org/pull/5037.
2019-01-23T08:56:19
readthedocs/readthedocs.org
5,180
readthedocs__readthedocs.org-5180
[ "5175" ]
5067f5c60ac92a78849f339775acf2220d6eb538
diff --git a/readthedocs/projects/migrations/0038_update-doctype-helptext.py b/readthedocs/projects/migrations/0038_update-doctype-helptext.py new file mode 100644 --- /dev/null +++ b/readthedocs/projects/migrations/0038_update-doctype-helptext.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.18 on 2019-02-02 19:45 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('projects', '0037_add_htmlfile'), + ] + + operations = [ + migrations.AlterField( + model_name='project', + name='documentation_type', + field=models.CharField(choices=[('sphinx', 'Sphinx Html'), ('mkdocs', 'Mkdocs (Markdown)'), ('sphinx_htmldir', 'Sphinx HtmlDir'), ('sphinx_singlehtml', 'Sphinx Single Page HTML')], default='sphinx', help_text='Type of documentation you are building. <a href="http://www.sphinx-doc.org/en/stable/builders.html#sphinx.builders.html.DirectoryHTMLBuilder">More info on sphinx builders</a>.', max_length=20, verbose_name='Documentation type'), + ), + ] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -188,7 +188,7 @@ class Project(models.Model): help_text=_( 'Type of documentation you are building. <a href="' 'http://www.sphinx-doc.org/en/stable/builders.html#sphinx.builders.html.' - 'DirectoryHTMLBuilder">More info</a>.', + 'DirectoryHTMLBuilder">More info on sphinx builders</a>.', ), )
'More info' link in admin settings goes to unexpected location ## Details In Admin->Settings, section 'Documentation type' there is a link titled 'More info' next to a dropdown menu allowing selection of 'Sphinx Html', 'Mkdocs (Markdown)', 'Sphinx Htmldir' or 'Sphinx Single Page HTML'. ## Expected Result I expected this to link a page containing information about the possible choices from the menu. ## Actual Result The link is to the `sphinx.builders.html.DirectoyHTMLBuilder` class (http://www.sphinx-doc.org/en/master/usage/builders/#sphinx.builders.html.DirectoryHTMLBuilder). This is unexpected if one is new to read the docs.
The link was added in 3fb8a68, which references a discussion of the sphinx builders.
2019-01-26T16:54:44
readthedocs/readthedocs.org
5,186
readthedocs__readthedocs.org-5186
[ "1410" ]
3f31cbb5d396fcd071811988e300f96eba50ab19
diff --git a/readthedocs/builds/version_slug.py b/readthedocs/builds/version_slug.py --- a/readthedocs/builds/version_slug.py +++ b/readthedocs/builds/version_slug.py @@ -26,6 +26,7 @@ from django.db import models from django.utils.encoding import force_text +from slugify import slugify as unicode_slugify def get_fields_with_model(cls): @@ -53,13 +54,15 @@ def get_fields_with_model(cls): class VersionSlugField(models.CharField): - """Inspired by ``django_extensions.db.fields.AutoSlugField``.""" + """ + Inspired by ``django_extensions.db.fields.AutoSlugField``. - invalid_chars_re = re.compile('[^-._a-z0-9]') - leading_punctuation_re = re.compile('^[-._]+') - placeholder = '-' - fallback_slug = 'unknown' + Uses ``unicode-slugify`` to generate the slug. + """ + + ok_chars = '-._' # dash, dot, underscore test_pattern = re.compile('^{pattern}$'.format(pattern=VERSION_SLUG_REGEX)) + fallback_slug = 'unknown' def __init__(self, *args, **kwargs): kwargs.setdefault('db_index', True) @@ -78,13 +81,42 @@ def get_queryset(self, model_cls, slug_field): return model._default_manager.all() return model_cls._default_manager.all() + def _normalize(self, content): + """ + Normalize some invalid characters (/, %, !, ?) to become a dash (``-``). + + .. note:: + + We replace these characters to a dash to keep compatibility with the + old behavior and also because it makes this more readable. + + For example, ``release/1.0`` will become ``release-1.0``. + """ + return re.sub('[/%!?]', '-', content) + def slugify(self, content): + """ + Make ``content`` a valid slug. + + It uses ``unicode-slugify`` behind the scenes which works properly with + Unicode characters. + """ if not content: return '' - slugified = content.lower() - slugified = self.invalid_chars_re.sub(self.placeholder, slugified) - slugified = self.leading_punctuation_re.sub('', slugified) + normalized = self._normalize(content) + slugified = unicode_slugify( + normalized, + only_ascii=True, + spaces=False, + lower=True, + ok=self.ok_chars, + space_replacement='-', + ) + + # Remove first character wile it's an invalid character for the + # beginning of the slug + slugified = slugified.lstrip(self.ok_chars) if not slugified: return self.fallback_slug
diff --git a/readthedocs/rtd_tests/tests/test_version_slug.py b/readthedocs/rtd_tests/tests/test_version_slug.py --- a/readthedocs/rtd_tests/tests/test_version_slug.py +++ b/readthedocs/rtd_tests/tests/test_version_slug.py @@ -105,3 +105,15 @@ def test_uniquifying_suffix(self): self.assertEqual(field.uniquifying_suffix(25), '_z') self.assertEqual(field.uniquifying_suffix(26), '_ba') self.assertEqual(field.uniquifying_suffix(52), '_ca') + + def test_unicode(self): + version = Version.objects.create( + verbose_name='camión', + project=self.pip, + ) + self.assertEqual(version.slug, 'camion') + version = Version.objects.create( + verbose_name='ŭñíč°də-branch', + project=self.pip, + ) + self.assertEqual(version.slug, 'unicd-branch')
UTF8 characters on version slugging -- or slugging in general This ticket came up as part of #1407. We should make sure version slugging is handling UTF8 characters in a sane way.
Is there any algorithm you can suggest? Currently if a all-non-ascii name is provided the algorithm will return `unknown`. Partly non-ascii words like `Straße` will look "chunked" like `stra-e`. This might be a good solution: https://pypi.python.org/pypi/unicode-slugify -- It convert utf8 chars to their "equivalent" in ascii. We could also just generate utf-8 slugs. I don't know a lot about this, but in general I think we support UTF-8, but we might run into issues with nginx/etc downstream, but perhaps that is the best solution. On Mon, Jul 6, 2015 at 11:30 AM, Gregor Müllegger [email protected] wrote: > Is there any algorithm you can suggest? Currently if a all-non-ascii name > is provided the algorithm will return unknown. Partly non-ascii words > like Straße will look "chunked" like stra-e. > > — > Reply to this email directly or view it on GitHub > https://github.com/rtfd/readthedocs.org/issues/1410#issuecomment-118949432 > . ## Eric Holscher Maker of the internet residing in Portland, Oregon http://ericholscher.com I think using UTF-8 in the URL is no good as there are just too many tools that do not properly handle UTF-8. However Wikipedia is doing it. The problem I see is with subdomains that contain project slugs. They definitely will make problems when they contain non-ascii characters. We've been using the std library to handle this (convert all the unicode to its ascii representation): ``` >>> import unicodedata >>> unicodedata.normalize('NFKD', u'camión').encode('ascii', 'ignore') 'camion' >>> ``` > think using UTF-8 in the URL is no good as there are just too many tools that do not properly handle UTF-8. However Wikipedia is doing it. I think it's not a problem using unicode chars on the URL. Actually, we do support this for filenames: https://test-builds.readthedocs.io/en/unicode-filename/ but we do replace the unicode chars when they are in the version's name/identifier/slug: https://test-builds.readthedocs.io/en/d--branch/ > The problem I see is with subdomains that contain project slugs. They definitely will make problems when they contain non-ascii characters. We should probably keep the project's slug as ASCII. This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. I want to reopen it and apply the solution proposed by Eric: use `unicode-slugify`. I also considered `django.utils.text.slugify` but it does not work as good as `unicode-slugify`: `django.utils.text.slugify`: ``` In [1]: from django.utils.text import slugify In [2]: slugify('北京 (capital of China)') Out[2]: 'capital-of-china' ``` `unicode-slugify`: ``` from slugify import slugify slugify(u'北京 (capital of China)', only_ascii=True) # u'bei-jing-capital-of-china' ``` (there are more examples on its docs) @humitos Just a doubt. I think to solve this, these lines https://github.com/rtfd/readthedocs.org/blob/05cc76b89a72cc963e091854b38adf28ab19ae3e/readthedocs/builds/version_slug.py#L85-L87 are to be replaced by using `unicode-slugify`. Something like: ``` from slugify import slugify slugified = slugify(content) ```
2019-01-28T14:29:45
readthedocs/readthedocs.org
5,222
readthedocs__readthedocs.org-5222
[ "4036" ]
5067f5c60ac92a78849f339775acf2220d6eb538
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -2,6 +2,7 @@ """Models for the builds app.""" +import datetime import logging import os.path import re @@ -32,6 +33,7 @@ BRANCH, BUILD_STATE, BUILD_STATE_FINISHED, + BUILD_STATE_TRIGGERED, BUILD_TYPES, LATEST, NON_REPOSITORY_VERSIONS, @@ -630,6 +632,12 @@ def finished(self): """Return if build has a finished state.""" return self.state == BUILD_STATE_FINISHED + @property + def is_stale(self): + """Return if build state is triggered & date more than 5m ago.""" + mins_ago = timezone.now() - datetime.timedelta(minutes=5) + return self.state == BUILD_STATE_TRIGGERED and self.date < mins_ago + class BuildCommandResultMixin:
diff --git a/readthedocs/rtd_tests/tests/test_builds.py b/readthedocs/rtd_tests/tests/test_builds.py --- a/readthedocs/rtd_tests/tests/test_builds.py +++ b/readthedocs/rtd_tests/tests/test_builds.py @@ -1,9 +1,11 @@ # -*- coding: utf-8 -*- +import datetime import os import mock from django.test import TestCase from django_dynamic_fixture import fixture, get +from django.utils import timezone from readthedocs.builds.models import Build, Version from readthedocs.doc_builder.config import load_yaml_config @@ -531,3 +533,32 @@ def test_do_not_reference_empty_configs(self): build_two.save() self.assertEqual(build_two._config, {}) self.assertEqual(build_two.config, {}) + + def test_build_is_stale(self): + now = timezone.now() + + build_one = get( + Build, + project=self.project, + version=self.version, + date=now - datetime.timedelta(minutes=8), + state='finished' + ) + build_two = get( + Build, + project=self.project, + version=self.version, + date=now - datetime.timedelta(minutes=6), + state='triggered' + ) + build_three = get( + Build, + project=self.project, + version=self.version, + date=now - datetime.timedelta(minutes=2), + state='triggered' + ) + + self.assertFalse(build_one.is_stale) + self.assertTrue(build_two.is_stale) + self.assertFalse(build_three.is_stale)
Update build list to include an alert state As raised in #4018, a better UI for a queue backup would be to show the affected builds in an alert state: * Show an alert state on each hanging build in the build list if build.state == triggered and build.date > 5m ago. No need for redis here * Alert state should be very minimal, a FA icon -- ie https://fontawesome.com/icons/exclamation-triangle?style=solid * Perhaps mouse over on icon shows a brief message "This build is still waiting to be built" * Link still goes to detail page * There should be a warning box (using the standard notification pattern!) that says something along the lines of "This build has not started yet. This can be due to heavy build volume or to a problem with Read the Docs. If the problem persists, contact us"
Where should the warning box be on the list page or the detail page?
2019-02-03T19:54:40
readthedocs/readthedocs.org
5,308
readthedocs__readthedocs.org-5308
[ "5303" ]
4d91f61ba15945d8b20f9091ebd77a0082886d0a
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -131,6 +131,10 @@ def clean_name(self): return name + def clean_repo(self): + repo = self.cleaned_data.get('repo', '') + return repo.rstrip('/') + def clean_remote_repository(self): remote_repo = self.cleaned_data.get('remote_repository', None) if not remote_repo:
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py --- a/readthedocs/rtd_tests/tests/test_project_forms.py +++ b/readthedocs/rtd_tests/tests/test_project_forms.py @@ -1,12 +1,9 @@ -# -*- coding: utf-8 -*- - import mock from django.contrib.auth.models import User from django.test import TestCase from django.test.utils import override_settings from django_dynamic_fixture import get from textclassifier.validators import ClassifierValidator -from django.core.exceptions import ValidationError from readthedocs.builds.constants import LATEST from readthedocs.builds.models import Version @@ -180,6 +177,18 @@ def test_length_of_tags(self): error_msg = 'Length of each tag must be less than or equal to 100 characters.' self.assertDictEqual(form.errors, {'tags': [error_msg]}) + def test_strip_repo_url(self): + form = ProjectBasicsForm({ + 'name': 'foo', + 'repo_type': 'git', + 'repo': 'https://github.com/rtfd/readthedocs.org/' + }) + self.assertTrue(form.is_valid()) + self.assertEqual( + form.cleaned_data['repo'], + 'https://github.com/rtfd/readthedocs.org' + ) + class TestProjectAdvancedForm(TestCase): @@ -514,7 +523,7 @@ class TestNotificationForm(TestCase): def setUp(self): self.project = get(Project) - + def test_webhookform(self): self.assertEqual(self.project.webhook_notifications.all().count(), 0)
Extra slash in "Edit on GitHub" link ## Details * Read the Docs project URL: http://glom.readthedocs.io/ * GitHub URL: https://github.com/mahmoud/glom * Build URL (if applicable): N/A * Read the Docs username (if applicable): mahmoud ## Expected Result "Edit on GitHub" link at the top of the page should link to doc on GitHub. ## Actual Result "Edit on GitHub" link at the top of the page links to 404. There is an extra slash in the URL: `https://github.com/mahmoud/glom//master/docs/index.rst` (after `glom`, before `master`) Interestingly, the "Edit" link in the bottom left menu seems to work fine.
Hi @mahmoud , I don't think it happens because of the extra slash but because of the missing word `blob`. The url should be `https://github.com/mahmoud/glom/blob/master/docs/index.rst`. Isn't it? Can you try updating the rtd-theme? https://github.com/mahmoud/glom/blob/5a10e66c2f1204988f97e6545055bfc5b36e052f/requirements-rtd.txt#L36 Current one is `0.4.3` https://pypi.python.org/pypi/sphinx_rtd_theme @rexzing you're absolutely right! @stsewd trying this now, thanks! @stsewd Hmm, no luck I'm afraid. In fact, I see 3 slashes now: `https://github.com/mahmoud/glom///master/docs/index.rst`. Thanks, I'll try to replicate this locally and dig a little more Whoops, I lied, did stuff on the wrong local branch. Updating the theme got me a working link, with `blob`, but it does have an extra slash fwiw: `https://github.com/mahmoud/glom//blob/master/docs/index.rst` It still gets me to GitHub though, so I'm happy enough! :) Thanks for your help! The extra slash is from the repo url, we could strip it, tho I'll leave it up to you, but I think an rstrip and join of a single slash should yield a nicely normalized URL. :)
2019-02-19T01:07:51
readthedocs/readthedocs.org
5,311
readthedocs__readthedocs.org-5311
[ "4986" ]
6d960f14dc2433ae66dff675b4e66f30dbc606b7
diff --git a/readthedocs/restapi/views/integrations.py b/readthedocs/restapi/views/integrations.py --- a/readthedocs/restapi/views/integrations.py +++ b/readthedocs/restapi/views/integrations.py @@ -349,7 +349,7 @@ class IsAuthenticatedOrHasToken(permissions.IsAuthenticated): """ def has_permission(self, request, view): - has_perm = (super().has_permission(request, view)) + has_perm = super().has_permission(request, view) return has_perm or 'token' in request.data @@ -420,8 +420,18 @@ class WebhookView(APIView): ever get webhook requests for established webhooks on our side. The other views can receive webhooks for unknown webhooks, as all legacy webhooks will be. + + .. warning:: + We're turning off Authenication for this view. + This fixes a bug where we were double-authenticating these views, + because of the way we're passing the request along to the subviews. + + If at any time we add real logic to this view, + it will be completely unauthenticated. """ + authentication_classes = [] + VIEW_MAP = { Integration.GITHUB_WEBHOOK: GitHubWebhookView, Integration.GITLAB_WEBHOOK: GitLabWebhookView,
csrf required for triggering build via http auth authenticated webhook ## Details Hi, In OpenStack, we've been using authentication with the webhook endpoints to trigger builds. The way this has been working is that projects add a common ``openstackci`` user as an admin, and then in their project configuration inside our platform they add their webhook URL. When new releases are made, our CI system uses the ``openstackci`` account (with the password only known to it) to authenticate and trigger RTD builds for the releasing project with a POST. The exact code that does this is https://git.openstack.org/cgit/openstack-infra/zuul-jobs/tree/roles/trigger-readthedocs/tasks/main.yaml#n11 But in essence that boils down to (for, say jenkins-job-builder project) ``` curl --request POST -u openstackci:OURPASSWORD https://readthedocs.org/api/v2/webhook/jenkins-job-builder/47271/ ``` Our jobs starting failing around 2018-12-05 (http://zuul.openstack.org/builds?job_name=trigger-readthedocs-webhook). The jobs don't log the output of this POST so we don't leak the password into logs. However, testing manually, the responses indicate a content-length is now required, and a referer, which ends up with ``` $ curl --request POST -H "Content-Length:0" -u openstackci:PASSWORD -e https://readthedocs.org/api/v2/webhook/jenkins-jo2/webhook/jenkins-job-builder/47271/ https://readthedocs.org/api/v2/webhook/jenkins-jo2/webhook/jenkins-job-builder/47271/ {"detail":"CSRF Failed: CSRF cookie not set."} ``` So I'm guessing all this points to CSRF has been somehow enabled for webhook endpoints when using HTTP basic-auth? Is this possibly a new configuration? We don't use the token-based webhook method because this system means we only have one common secret to keep -- the openstackci user password -- and everything else is self-service for projects who simply add the common user as an admin and include in their configuration their endpoint URL. Any help with this will be greatly appreciated, thanks
On 2018-12-05 we did a deploy: https://docs.readthedocs.io/en/latest/changelog.html#version-2-8-3 but I don't find anything relevant there. I need to research a little more what has happened here. Also, I got a different error: $ curl -X POST -d "branches=datetime" -u humitos:$RTD_PASS https://readthedocs.org/api/v2/webhook/test-builds/56359/ {"detail":"CSRF Failed: Referer checking failed - no Referer."} @humitos thanks - yes I saw that at first to. If you add a "-e" with your URL to the request, it will set a referer. I sort of had 3 steps to get to the csrf error; first it wanted a content-length to be set, then the referer, then it failed with the csrf error @humitos did you manage to find anything? No yet. I will try to take a look at this soon. Marking this as bug, since it was working properly and also the docs says that it should work properly: https://docs.readthedocs.io/en/latest/webhooks.html#authentication @humitos any update on this? I see it's been tagged with a milestone that seems closed, but per http://zuul.openstack.org/builds?job_name=trigger-readthedocs-webhook all our publishing jobs are still failing. If it expected to be fixed, I can look closer and see what's going on (as mentioned before, the logging isn't very helpful as we don't want to leak the password, so I'll have to dig into it more manually). @ianw the PR is still under review. I'm pinging @agjohnson here since he had different opinions about this. Hello team. Airship project (lives under OpenStack) is affected by this bug. Is there anything I can help with? Thank you.
2019-02-19T18:37:16
readthedocs/readthedocs.org
5,338
readthedocs__readthedocs.org-5338
[ "5228" ]
ed8dd29a68d009fc08929fabf7155883482619c1
diff --git a/readthedocs/config/config.py b/readthedocs/config/config.py --- a/readthedocs/config/config.py +++ b/readthedocs/config/config.py @@ -489,15 +489,14 @@ def validate_conda(self): raw_conda = self.raw_config['conda'] with self.catch_validation_error('conda'): validate_dict(raw_conda) - conda_environment = None - if 'file' in raw_conda: - with self.catch_validation_error('conda.file'): - conda_environment = validate_file( - raw_conda['file'], - self.base_path, - ) - conda['environment'] = conda_environment - + with self.catch_validation_error('conda.file'): + if 'file' not in raw_conda: + raise ValidationError('file', VALUE_NOT_FOUND) + conda_environment = validate_file( + raw_conda['file'], + self.base_path, + ) + conda['environment'] = conda_environment return conda return None
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -557,29 +557,33 @@ def test_it_priorities_image_from_env_config(self, tmpdir, image): assert build.build.image == image -def test_use_conda_default_false(): +def test_use_conda_default_none(): build = get_build_config({}) build.validate() assert build.conda is None -def test_use_conda_respects_config(): +def test_validates_conda_file(tmpdir): + apply_fs(tmpdir, {'environment.yml': ''}) build = get_build_config( - {'conda': {}}, + {'conda': {'file': 'environment.yml'}}, + source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() assert isinstance(build.conda, Conda) + assert build.conda.environment == str(tmpdir.join('environment.yml')) -def test_validates_conda_file(tmpdir): +def test_file_is_required_when_using_conda(tmpdir): apply_fs(tmpdir, {'environment.yml': ''}) build = get_build_config( - {'conda': {'file': 'environment.yml'}}, + {'conda': {'foo': 'environment.yml'}}, source_file=str(tmpdir.join('readthedocs.yml')), ) - build.validate() - assert isinstance(build.conda, Conda) - assert build.conda.environment == str(tmpdir.join('environment.yml')) + with raises(InvalidConfig) as excinfo: + build.validate() + assert excinfo.value.key == 'conda.file' + assert excinfo.value.code == VALUE_NOT_FOUND def test_requirements_file_empty():
TypeError: sequence item 7: expected str instance, NoneType found Sentry Issue: [READTHEDOCS-ORG-EMX](https://sentry.io/read-the-docs/readthedocs-org/issues/864447516/?referrer=github_integration) ``` TypeError: sequence item 7: expected str instance, NoneType found (34 additional frame(s) were not displayed) ... File "readthedocs/doc_builder/environments.py", line 550, in __exit__ ret = self.handle_exception(exc_type, exc_value, tb) File "readthedocs/doc_builder/environments.py", line 891, in __exit__ return super().__exit__(exc_type, exc_value, tb) File "readthedocs/projects/tasks.py", line 568, in run_build log.warning('No build ID, not syncing files') File "readthedocs/projects/tasks.py", line 404, in run self.run_build(docker=docker, record=record) File "readthedocs/projects/tasks.py", line 281, in update_docs_task return step.run(project_id, *args, **kwargs) (Build) [delira:tf_update_docs] sequence item 7: expected str instance, NoneType found ```
2019-02-22T01:15:58
readthedocs/readthedocs.org
5,344
readthedocs__readthedocs.org-5344
[ "5340" ]
86bfdffaee2a6a5143f6c7c75af2600817101ce1
diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py --- a/readthedocs/doc_builder/backends/mkdocs.py +++ b/readthedocs/doc_builder/backends/mkdocs.py @@ -140,15 +140,25 @@ def append_conf(self, **__): ), ) - user_config.setdefault('extra_javascript', []).extend([ + extra_javascript_list = [ 'readthedocs-data.js', '%score/js/readthedocs-doc-embed.js' % static_url, '%sjavascript/readthedocs-analytics.js' % static_url, - ]) - user_config.setdefault('extra_css', []).extend([ + ] + extra_css_list = [ '%scss/badge_only.css' % static_url, '%scss/readthedocs-doc-embed.css' % static_url, - ]) + ] + + # Only add static file if the files are not already in the list + user_config.setdefault('extra_javascript', []).extend( + [js for js in extra_javascript_list if js not in user_config.get( + 'extra_javascript')] + ) + user_config.setdefault('extra_css', []).extend( + [css for css in extra_css_list if css not in user_config.get( + 'extra_css')] + ) # The docs path is relative to the location # of the mkdocs configuration file.
diff --git a/readthedocs/rtd_tests/tests/test_doc_builder.py b/readthedocs/rtd_tests/tests/test_doc_builder.py --- a/readthedocs/rtd_tests/tests/test_doc_builder.py +++ b/readthedocs/rtd_tests/tests/test_doc_builder.py @@ -279,7 +279,6 @@ def test_get_theme_name_with_feature_flag(self, checkout_path, run): mock.ANY, ) - @patch('readthedocs.doc_builder.base.BaseBuilder.run') @patch('readthedocs.projects.models.Project.checkout_path') def test_append_conf_create_yaml(self, checkout_path, run): @@ -420,7 +419,6 @@ def test_append_conf_existing_yaml_on_root_with_invalid_setting(self, checkout_p with self.assertRaises(MkDocsYAMLParseError): self.searchbuilder.append_conf() - @patch('readthedocs.doc_builder.base.BaseBuilder.run') @patch('readthedocs.projects.models.Project.checkout_path') def test_dont_override_theme(self, checkout_path, run): @@ -489,3 +487,54 @@ def test_write_js_data_docs_dir(self, checkout_path, run, generate_rtd_data): docs_dir='docs', mkdocs_config=mock.ANY, ) + + @patch('readthedocs.doc_builder.base.BaseBuilder.run') + @patch('readthedocs.projects.models.Project.checkout_path') + def test_append_conf_existing_yaml_with_extra(self, checkout_path, run): + tmpdir = tempfile.mkdtemp() + os.mkdir(os.path.join(tmpdir, 'docs')) + yaml_file = os.path.join(tmpdir, 'mkdocs.yml') + yaml.safe_dump( + { + 'site_name': 'mkdocs', + 'google_analytics': ['UA-1234-5', 'mkdocs.org'], + 'docs_dir': 'docs', + 'extra_css': [ + 'http://readthedocs.org/static/css/badge_only.css' + ], + 'extra_javascript': ['readthedocs-data.js'], + }, + open(yaml_file, 'w'), + ) + checkout_path.return_value = tmpdir + + python_env = Virtualenv( + version=self.version, + build_env=self.build_env, + config=None, + ) + self.searchbuilder = MkdocsHTML( + build_env=self.build_env, + python_env=python_env, + ) + self.searchbuilder.append_conf() + + run.assert_called_with('cat', 'mkdocs.yml', cwd=mock.ANY) + + config = yaml.safe_load(open(yaml_file)) + + self.assertEqual( + config['extra_css'], + [ + 'http://readthedocs.org/static/css/badge_only.css', + 'http://readthedocs.org/static/css/readthedocs-doc-embed.css', + ], + ) + self.assertEqual( + config['extra_javascript'], + [ + 'readthedocs-data.js', + 'http://readthedocs.org/static/core/js/readthedocs-doc-embed.js', + 'http://readthedocs.org/static/javascript/readthedocs-analytics.js', + ], + )
Be more defensive in how we insert our mkdocs media This logic can be executed multiple times on [some builds](https://readthedocs.org/projects/phpspreadsheet/builds/8340376/) (see the `mkdocs.yml`): https://github.com/rtfd/readthedocs.org/blob/ed8dd29a68d009fc08929fabf7155883482619c1/readthedocs/doc_builder/backends/mkdocs.py#L143-L152 If we have an issue reseting the branch/tag of the docs, we will end up inserting our media files twice, which causes them to be executed twice. ## Solution This code should check if one of the files we insert is already in the list: ``` if 'readthedocs-data.js' not in user_config.get('extra_javascript'): ```
Can I work on This Issue? If I understood the problem correctly we need to add conditions for `'extra_javascript'` and `'extra_css'` to check if the static files already exist in the `user_config` if not then add the files. Please Correct me if I'm Wrong.
2019-02-23T12:50:11
readthedocs/readthedocs.org
5,345
readthedocs__readthedocs.org-5345
[ "5061" ]
86bfdffaee2a6a5143f6c7c75af2600817101ce1
diff --git a/readthedocs/projects/migrations/0040_increase_path_max_length.py b/readthedocs/projects/migrations/0040_increase_path_max_length.py new file mode 100644 --- /dev/null +++ b/readthedocs/projects/migrations/0040_increase_path_max_length.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.20 on 2019-02-23 15:05 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('projects', '0039_update-doctype-helptext'), + ] + + operations = [ + migrations.AlterField( + model_name='importedfile', + name='path', + field=models.CharField(max_length=4096, verbose_name='Path'), + ), + ] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1089,7 +1089,11 @@ class ImportedFile(models.Model): ) name = models.CharField(_('Name'), max_length=255) slug = models.SlugField(_('Slug')) - path = models.CharField(_('Path'), max_length=255) + + # max_length is set to 4096 because linux has a maximum path length + # of 4096 characters for most filesystems (including EXT4). + # https://github.com/rtfd/readthedocs.org/issues/5061 + path = models.CharField(_('Path'), max_length=4096) md5 = models.CharField(_('MD5 checksum'), max_length=255) commit = models.CharField(_('Commit'), max_length=255) modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
Increase name and path's max_lenght for ImportedFile model Some projects have some large filenames/nested directories. https://github.com/rtfd/readthedocs.org/blob/ae991c4f131e2dd22fd4ce61deec381ae33f3aa4/readthedocs/projects/models.py#L895-L897 Any reason to not do that? Any suggested number? (In sentry the length of the failed file was 256). Related sentry issue https://sentry.io/read-the-docs/readthedocs-org/issues/659636695/
We need to put a number here. So, in case we want to fully support this we should the maximum values for the filesystem that we use (ext4) > Linux has a maximum filename length of 255 characters for most filesystems (including EXT4), and a maximum path of 4096 characters. https://unix.stackexchange.com/questions/32795/what-is-the-maximum-allowed-filename-and-folder-size-with-ecryptfs There are cases where it fails because of the `path` and not the `name` length: https://sentry.io/read-the-docs/readthedocs-org/issues/659636695/events/89c15864ee9c4a33bf05ad764e0c2869/ ``` >>> len('glotaran.models.spectral_temporal.spectral_temporal_dataset_descriptor.SpectralTemporalDatasetDescriptor.validate_parameter.rst.txt') 131 >>> len('/home/docs/checkouts/readthedocs.org/user_builds/glotaran/rtd-builds/latest/_sources/api/models/spectral_temporal/spectral_temporal_dataset_descriptor/classes/SpectralTemporalDatasetDescriptor/methods/glotaran.models.spectral_temporal.spectral_temporal_dataset_descriptor.SpectralTemporalDatasetDescriptor.validate_parameter.rst.txt') 332 ``` I think we should increase `path` to 4096 to avoid this problem and be safe.
2019-02-23T15:14:45
readthedocs/readthedocs.org
5,346
readthedocs__readthedocs.org-5346
[ "5300" ]
86bfdffaee2a6a5143f6c7c75af2600817101ce1
diff --git a/conftest.py b/conftest.py --- a/conftest.py +++ b/conftest.py @@ -47,6 +47,12 @@ def pytest_configure(config): def settings_modification(settings): settings.CELERY_ALWAYS_EAGER = True + @pytest.fixture def api_client(): return APIClient() + + [email protected](scope="class") +def url_scheme(request): + request.cls.url_scheme = request.config.option.url_scheme
diff --git a/readthedocs/rtd_tests/tests/test_core_tags.py b/readthedocs/rtd_tests/tests/test_core_tags.py --- a/readthedocs/rtd_tests/tests/test_core_tags.py +++ b/readthedocs/rtd_tests/tests/test_core_tags.py @@ -10,13 +10,14 @@ from readthedocs.projects.models import Project [email protected]("url_scheme") @override_settings(USE_SUBDOMAIN=False, PRODUCTION_DOMAIN='readthedocs.org') class CoreTagsTests(TestCase): fixtures = ['eric', 'test_data'] def setUp(self): url_base = '{scheme}://{domain}/docs/pip{{version}}'.format( - scheme=pytest.config.option.url_scheme, + scheme=self.url_scheme, domain=settings.PRODUCTION_DOMAIN, )
Remove all warnings from pytest When running `tox` we see these warnings in the summary. We should use `request` fixture and access to `request.config` instead. Docs: https://docs.pytest.org/en/latest/fixture.html#request-context Change log: https://docs.pytest.org/en/latest/deprecations.html#pytest-config-global ``` ====================================================================================== warnings summary ====================================================================================== readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_index_no_directory_urls readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_mkdocs_no_directory_urls readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_and_page_signlehtml readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_and_version_singlehtml readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_project_only_singlehtml readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_restructured_text_invalid readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_and_page_singlehtml readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_and_version_singlehtml readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_htmldir readthedocs/rtd_tests/tests/test_core_tags.py::CoreTagsTests::test_translation_project_only_singlehtml /home/humitos/rtfd/code/readthedocs-corporate/.tox/py36/readthedocs.org/readthedocs/rtd_tests/tests/test_core_tags.py:19: PytestDeprecationWarning: the `pytest.config` global is deprecated. Please use `request.config` or `pytest_configure` (if you're a pytest plugin) instead. scheme=pytest.config.option.url_scheme, -- Docs: https://docs.pytest.org/en/latest/warnings.html ```
How can I reproduce this warning summary? Hi @rexzing. You can use `tox` for running all the tests and to reproduce these warnings. Docs: https://docs.readthedocs.io/en/latest/tests.html Also, these warnings can be seen in travis, like in this case: https://travis-ci.org/rtfd/readthedocs.org/jobs/495002436 Got it. I am following these steps. Thanks Hi guys, I searched for a solution in past few days. But couldn't find out how to use `request.config` inside the `class CoreTagsTests(TestCase)`.
2019-02-23T20:55:25
readthedocs/readthedocs.org
5,377
readthedocs__readthedocs.org-5377
[ "5364" ]
c7ec8578e981a80785c6db69768e67cdea123968
diff --git a/readthedocs/config/validation.py b/readthedocs/config/validation.py --- a/readthedocs/config/validation.py +++ b/readthedocs/config/validation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """Validations for the RTD configuration file.""" import os @@ -81,28 +79,33 @@ def validate_bool(value): def validate_directory(value, base_path): """Check that ``value`` is a directory.""" - path = validate_path(value, base_path) + path = os.path.join( + base_path, + validate_path(value, base_path) + ) if not os.path.isdir(path): raise ValidationError(value, INVALID_DIRECTORY) - return path + return os.path.relpath(path, base_path) def validate_file(value, base_path): """Check that ``value`` is a file.""" - path = validate_path(value, base_path) + path = os.path.join( + base_path, + validate_path(value, base_path) + ) if not os.path.isfile(path): raise ValidationError(value, INVALID_FILE) - return path + return os.path.relpath(path, base_path) def validate_path(value, base_path): """Check that ``value`` is an existent file in ``base_path``.""" string_value = validate_string(value) pathed_value = os.path.join(base_path, string_value) - final_value = os.path.abspath(pathed_value) - if not os.path.exists(final_value): + if not os.path.exists(pathed_value): raise ValidationError(value, INVALID_PATH) - return final_value + return os.path.relpath(pathed_value, base_path) def validate_string(value): diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py --- a/readthedocs/doc_builder/backends/sphinx.py +++ b/readthedocs/doc_builder/backends/sphinx.py @@ -42,6 +42,11 @@ def __init__(self, *args, **kwargs): try: if not self.config_file: self.config_file = self.project.conf_file(self.version.slug) + else: + self.config_file = os.path.join( + self.project.checkout_path(self.version.slug), + self.config_file, + ) self.old_artifact_path = os.path.join( os.path.dirname(self.config_file), self.sphinx_build_dir, diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -82,11 +82,10 @@ def install_package(self, install): :param install: A install object from the config module. :type install: readthedocs.config.models.PythonInstall """ - rel_path = os.path.relpath(install.path, self.checkout_path) if install.method == PIP: # Prefix ./ so pip installs from a local path rather than pypi local_path = ( - os.path.join('.', rel_path) if rel_path != '.' else rel_path + os.path.join('.', install.path) if install.path != '.' else install.path ) extra_req_param = '' if install.extra_requirements: @@ -111,7 +110,7 @@ def install_package(self, install): elif install.method == SETUPTOOLS: self.build_env.run( self.venv_bin(filename='python'), - os.path.join(rel_path, 'setup.py'), + os.path.join(install.path, 'setup.py'), 'install', '--force', cwd=self.checkout_path, @@ -364,7 +363,10 @@ def install_requirements_file(self, install): for path, req_file in itertools.product(paths, req_files): test_path = os.path.join(self.checkout_path, path, req_file) if os.path.exists(test_path): - requirements_file_path = test_path + requirements_file_path = os.path.relpath( + test_path, + self.checkout_path, + ) break if requirements_file_path: @@ -381,10 +383,7 @@ def install_requirements_file(self, install): '--cache-dir', self.project.pip_cache_path, '-r', - os.path.relpath( - requirements_file_path, - self.checkout_path - ), + requirements_file_path, ] self.build_env.run( *args,
diff --git a/readthedocs/config/tests/test_config.py b/readthedocs/config/tests/test_config.py --- a/readthedocs/config/tests/test_config.py +++ b/readthedocs/config/tests/test_config.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import re import textwrap @@ -604,7 +603,7 @@ def test_validates_conda_file(tmpdir): ) build.validate() assert isinstance(build.conda, Conda) - assert build.conda.environment == str(tmpdir.join('environment.yml')) + assert build.conda.environment == 'environment.yml' def test_file_is_required_when_using_conda(tmpdir): @@ -640,7 +639,7 @@ def test_requirements_file_repects_default_value(tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].requirements == str(tmpdir.join('myrequirements.txt')) + assert install[0].requirements == 'myrequirements.txt' def test_requirements_file_respects_configuration(tmpdir): @@ -652,7 +651,7 @@ def test_requirements_file_respects_configuration(tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].requirements == str(tmpdir.join('requirements.txt')) + assert install[0].requirements == 'requirements.txt' def test_requirements_file_is_null(tmpdir): @@ -744,7 +743,7 @@ def test_as_dict(tmpdir): 'python': { 'version': 3.7, 'install': [{ - 'requirements': str(tmpdir.join('requirements.txt')), + 'requirements': 'requirements.txt', }], 'use_system_site_packages': False, }, @@ -859,7 +858,7 @@ def test_conda_check_valid(self, tmpdir): source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() - assert build.conda.environment == str(tmpdir.join('environment.yml')) + assert build.conda.environment == 'environment.yml' def test_conda_check_invalid(self, tmpdir): apply_fs(tmpdir, {'environment.yml': ''}) @@ -1054,7 +1053,7 @@ def test_python_install_check_default(self, tmpdir): install = build.python.install assert len(install) == 1 assert isinstance(install[0], PythonInstall) - assert install[0].path == str(tmpdir) + assert install[0].path == '.' assert install[0].method == PIP assert install[0].extra_requirements == [] @@ -1107,7 +1106,7 @@ def test_python_install_requirements_check_valid(self, tmpdir): install = build.python.install assert len(install) == 1 assert isinstance(install[0], PythonInstallRequirements) - assert install[0].requirements == str(tmpdir.join('requirements.txt')) + assert install[0].requirements == 'requirements.txt' def test_python_install_requirements_check_invalid(self, tmpdir): apply_fs(tmpdir, {'requirements.txt': ''}) @@ -1187,7 +1186,7 @@ def test_python_install_requirements_priority_over_default(self, tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].requirements == str(tmpdir.join('requirements.txt')) + assert install[0].requirements == 'requirements.txt' @pytest.mark.parametrize('value', [3, [], {}]) def test_python_install_requirements_check_invalid_types(self, value, tmpdir): @@ -1237,7 +1236,7 @@ def test_python_install_pip_check_valid(self, tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].path == str(tmpdir) + assert install[0].path == '.' assert install[0].method == PIP def test_python_install_pip_have_priority_over_default(self, tmpdir): @@ -1256,7 +1255,7 @@ def test_python_install_pip_have_priority_over_default(self, tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].path == str(tmpdir) + assert install[0].path == '.' assert install[0].method == PIP def test_python_install_setuptools_check_valid(self, tmpdir): @@ -1274,7 +1273,7 @@ def test_python_install_setuptools_check_valid(self, tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].path == str(tmpdir) + assert install[0].path == '.' assert install[0].method == SETUPTOOLS def test_python_install_setuptools_ignores_default(self): @@ -1301,7 +1300,7 @@ def test_python_install_setuptools_priority_over_default(self, tmpdir): build.validate() install = build.python.install assert len(install) == 1 - assert install[0].path == str(tmpdir) + assert install[0].path == '.' assert install[0].method == SETUPTOOLS def test_python_install_allow_empty_list(self): @@ -1419,14 +1418,14 @@ def test_python_install_several_respects_order(self, tmpdir): install = build.python.install assert len(install) == 3 - assert install[0].path == str(tmpdir.join('one')) + assert install[0].path == 'one' assert install[0].method == PIP assert install[0].extra_requirements == [] - assert install[1].path == str(tmpdir.join('two')) + assert install[1].path == 'two' assert install[1].method == SETUPTOOLS - assert install[2].requirements == str(tmpdir.join('three.txt')) + assert install[2].requirements == 'three.txt' def test_python_install_reports_correct_invalid_index(self, tmpdir): apply_fs(tmpdir, { @@ -1564,7 +1563,7 @@ def test_sphinx_configuration_check_valid(self, tmpdir): source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() - assert build.sphinx.configuration == str(tmpdir.join('conf.py')) + assert build.sphinx.configuration == 'conf.py' def test_sphinx_configuration_check_invalid(self, tmpdir): apply_fs(tmpdir, {'conf.py': ''}) @@ -1607,7 +1606,7 @@ def test_sphinx_configuration_respects_default(self, tmpdir): source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() - assert build.sphinx.configuration == str(tmpdir.join('conf.py')) + assert build.sphinx.configuration == 'conf.py' def test_sphinx_configuration_default_can_be_none(self, tmpdir): apply_fs(tmpdir, {'conf.py': ''}) @@ -1627,7 +1626,7 @@ def test_sphinx_configuration_priorities_over_default(self, tmpdir): source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() - assert build.sphinx.configuration == str(tmpdir.join('conf.py')) + assert build.sphinx.configuration == 'conf.py' @pytest.mark.parametrize('value', [[], True, 0, {}]) def test_sphinx_configuration_validate_type(self, value): @@ -1674,7 +1673,7 @@ def test_mkdocs_configuration_check_valid(self, tmpdir): source_file=str(tmpdir.join('readthedocs.yml')), ) build.validate() - assert build.mkdocs.configuration == str(tmpdir.join('mkdocs.yml')) + assert build.mkdocs.configuration == 'mkdocs.yml' assert build.doctype == 'mkdocs' assert build.sphinx is None @@ -2032,7 +2031,7 @@ def test_as_dict(self, tmpdir): 'python': { 'version': 3.6, 'install': [{ - 'requirements': str(tmpdir.join('requirements.txt')), + 'requirements': 'requirements.txt', }], 'use_system_site_packages': False, }, diff --git a/readthedocs/config/tests/test_validation.py b/readthedocs/config/tests/test_validation.py --- a/readthedocs/config/tests/test_validation.py +++ b/readthedocs/config/tests/test_validation.py @@ -1,6 +1,3 @@ -# -*- coding: utf-8 -*- -import os - from mock import patch from pytest import raises @@ -133,10 +130,10 @@ def test_it_accepts_absolute_path(self, tmpdir): path = str(tmpdir.mkdir('a directory')) validate_path(path, 'does not matter') - def test_it_returns_absolute_path(self, tmpdir): + def test_it_returns_relative_path(self, tmpdir): tmpdir.mkdir('a directory') path = validate_path('a directory', str(tmpdir)) - assert path == os.path.abspath(path) + assert path == 'a directory' def test_it_only_accepts_strings(self): with raises(ValidationError) as excinfo: diff --git a/readthedocs/rtd_tests/tests/test_config_integration.py b/readthedocs/rtd_tests/tests/test_config_integration.py --- a/readthedocs/rtd_tests/tests/test_config_integration.py +++ b/readthedocs/rtd_tests/tests/test_config_integration.py @@ -277,7 +277,7 @@ def test_conda_with_cofig(self, checkout_path): ) config = load_yaml_config(self.version) self.assertTrue(config.conda is not None) - self.assertEqual(config.conda.environment, full_conda_file) + self.assertEqual(config.conda.environment, conda_file) @mock.patch('readthedocs.projects.models.Project.checkout_path') def test_conda_without_cofig(self, checkout_path): @@ -303,7 +303,7 @@ def test_requirements_file_from_project_setting(self, checkout_path): self.assertEqual(len(config.python.install), 1) self.assertEqual( config.python.install[0].requirements, - full_requirements_file + requirements_file ) @mock.patch('readthedocs.projects.models.Project.checkout_path') @@ -328,7 +328,7 @@ def test_requirements_file_from_yml(self, checkout_path): self.assertEqual(len(config.python.install), 1) self.assertEqual( config.python.install[0].requirements, - full_requirements_file + requirements_file ) @@ -459,7 +459,6 @@ def test_conda_environment(self, build_failed, checkout_path, tmpdir): update_docs = self.get_update_docs_task() update_docs.run_build(docker=False, record=False) - conda_file = path.join(str(base_path), conda_file) assert update_docs.config.conda.environment == conda_file assert isinstance(update_docs.python_env, Conda) @@ -530,11 +529,10 @@ def test_python_install_requirements(self, run, checkout_path, tmpdir): update_docs.python_env.install_requirements() args, kwargs = run.call_args - full_requirements_file = str(base_path.join(requirements_file)) install = config.python.install assert len(install) == 1 - assert install[0].requirements == full_requirements_file + assert install[0].requirements == requirements_file assert requirements_file in args @patch('readthedocs.doc_builder.environments.BuildEnvironment.run') @@ -954,7 +952,7 @@ def test_mkdocs_configuration( args, kwargs = run.call_args assert '--config-file' in args - assert path.join(str(tmpdir), 'docx/mkdocs.yml') in args + assert 'docx/mkdocs.yml' in args append_conf.assert_called_once() move.assert_called_once()
Save relative paths from the config module We currently are saving the whole path, including the root of the servers, that info isn't useful for us or for the user later. We will not lose any kind of information, because we already have the base path to validate files. This shouldn't be a big refactor, and we have a lot of tests. Ref https://github.com/rtfd/readthedocs.org/pull/4863#discussion_r260421353
I saw that we are using absolute path in other places, not just in this config module. For example, on the build output page --which generate to scroll horizontally to know the command executed. Do you think it worth to consider to user relative path in other places also? What are your thoughts on this? > Do you think it worth to consider to user relative path in other places also? What are your thoughts on this? Yeah, when I refactored the code, I tried to put most of the commands with relative paths (like requirements, conda env, etc). We still depend on absolute paths to set the cache to pip, virtualenv root, python binnary. We can remove some of those using env variables, after all, isn't helpfull at all for the user to know where we are saving the cache/virtualenv. This is another issue, not in the scope of this one.
2019-03-01T00:40:20
readthedocs/readthedocs.org
5,383
readthedocs__readthedocs.org-5383
[ "5327" ]
13351a582e616fd33a36b5c2e8268f6e691da0f8
diff --git a/readthedocs/core/migrations/0006_remove_userprofile_allow_email.py b/readthedocs/core/migrations/0006_remove_userprofile_allow_email.py new file mode 100644 --- /dev/null +++ b/readthedocs/core/migrations/0006_remove_userprofile_allow_email.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.20 on 2019-03-01 17:30 +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0005_migrate-old-passwords'), + ] + + operations = [ + migrations.RemoveField( + model_name='userprofile', + name='allow_email', + ), + ] diff --git a/readthedocs/core/models.py b/readthedocs/core/models.py --- a/readthedocs/core/models.py +++ b/readthedocs/core/models.py @@ -32,11 +32,6 @@ class UserProfile(models.Model): help_text=_('If unchecked, you will still see community ads.'), default=True, ) - allow_email = models.BooleanField( - _('Allow email'), - help_text=_('Show your email on VCS contributions.'), - default=True, - ) def __str__(self): return ( @@ -49,19 +44,3 @@ def get_absolute_url(self): 'profiles_profile_detail', kwargs={'username': self.user.username}, ) - - def get_contribution_details(self): - """ - Get the line to put into commits to attribute the author. - - Returns a tuple (name, email) - """ - if self.user.first_name and self.user.last_name: - name = '{} {}'.format(self.user.first_name, self.user.last_name) - else: - name = self.user.username - if self.allow_email: - email = self.user.email - else: - email = STANDARD_EMAIL - return (name, email)
Remove unused fiels and methods from core/models We are not using these: https://github.com/rtfd/readthedocs.org/blob/3218c808e87206983c4eb3ce4faceddbcc0f7aa3/readthedocs/core/models.py#L35-L39 https://github.com/rtfd/readthedocs.org/blob/3218c808e87206983c4eb3ce4faceddbcc0f7aa3/readthedocs/core/models.py#L53-L67 And I don't think we are going to use them in the future, if we agreed in the removal, we can mark this as a good first issue.
Hello @stsewd, do we just delete these specific lines and send a PR? Yes, and any other code related to these lines. Also, you can check that all tests are passing after that. And I think you'll need to generate a migration file.
2019-03-01T17:31:31
readthedocs/readthedocs.org
5,393
readthedocs__readthedocs.org-5393
[ "5358" ]
bdb4aa5da5b5b5fb45244176c2bee58fb5442414
diff --git a/readthedocs/core/utils/__init__.py b/readthedocs/core/utils/__init__.py --- a/readthedocs/core/utils/__init__.py +++ b/readthedocs/core/utils/__init__.py @@ -16,7 +16,7 @@ from django.utils.text import slugify as slugify_base from celery import group, chord -from readthedocs.builds.constants import LATEST, BUILD_STATE_TRIGGERED +from readthedocs.builds.constants import BUILD_STATE_TRIGGERED from readthedocs.doc_builder.constants import DOCKER_LIMITS log = logging.getLogger(__name__) @@ -73,7 +73,7 @@ def prepare_build( project has ``skip=True``, the build is not triggered. :param project: project's documentation to be built - :param version: version of the project to be built. Default: ``latest`` + :param version: version of the project to be built. Default: ``project.get_default_version()`` :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed :param immutable: whether or not create an immutable Celery signature @@ -95,7 +95,8 @@ def prepare_build( return (None, None) if not version: - version = project.versions.get(slug=LATEST) + default_version = project.get_default_version() + version = project.versions.get(slug=default_version) kwargs = { 'version_pk': version.pk,
diff --git a/readthedocs/rtd_tests/tests/test_core_utils.py b/readthedocs/rtd_tests/tests/test_core_utils.py --- a/readthedocs/rtd_tests/tests/test_core_utils.py +++ b/readthedocs/rtd_tests/tests/test_core_utils.py @@ -14,6 +14,7 @@ from readthedocs.projects.tasks import remove_dirs from readthedocs.core.utils import slugify, trigger_build from readthedocs.projects.models import Project +from readthedocs.builds.constants import LATEST class CoreUtilTests(TestCase): @@ -34,6 +35,61 @@ def test_trigger_skipped_project(self, update_docs_task): self.assertFalse(update_docs_task.signature.called) self.assertFalse(update_docs_task.signature().apply_async.called) + @mock.patch('readthedocs.projects.tasks.update_docs_task') + def test_trigger_build_when_version_not_provided_default_version_exist(self, update_docs_task): + self.assertFalse(Version.objects.filter(slug='test-default-version').exists()) + + project_1 = get(Project) + version_1 = get(Version, project=project_1, slug='test-default-version', active=True) + + project_1.default_version = 'test-default-version' + project_1.save() + + default_version = project_1.get_default_version() + self.assertEqual(default_version, 'test-default-version') + + trigger_build(project=project_1) + kwargs = { + 'version_pk': version_1.pk, + 'record': True, + 'force': False, + 'build_pk': mock.ANY, + } + + update_docs_task.signature.assert_has_calls([ + mock.call( + args=(project_1.pk,), + kwargs=kwargs, + options=mock.ANY, + immutable=True, + ), + ]) + + @mock.patch('readthedocs.projects.tasks.update_docs_task') + def test_trigger_build_when_version_not_provided_default_version_doesnt_exist(self, update_docs_task): + + trigger_build(project=self.project) + default_version = self.project.get_default_version() + version_ = self.project.versions.get(slug=default_version) + + self.assertEqual(version_.slug, LATEST) + + kwargs = { + 'version_pk': version_.pk, + 'record': True, + 'force': False, + 'build_pk': mock.ANY, + } + + update_docs_task.signature.assert_has_calls([ + mock.call( + args=(self.project.pk,), + kwargs=kwargs, + options=mock.ANY, + immutable=True, + ), + ]) + @mock.patch('readthedocs.projects.tasks.update_docs_task') def test_trigger_custom_queue(self, update_docs): """Use a custom queue when routing the task."""
Trigger build on default branch when saving a project Currently when we save a project form (admin panel), it triggers a build to latest https://github.com/rtfd/readthedocs.org/blob/9874b866fea9696fa8495d7b3699f1bf1a3f923d/readthedocs/projects/forms.py#L69-L74 https://github.com/rtfd/readthedocs.org/blob/9874b866fea9696fa8495d7b3699f1bf1a3f923d/readthedocs/core/utils/__init__.py#L97-L98 Even if latest is deactivated, we should trigger a build to the default branch instead.
@stsewd I'm working on it. @stsewd Does the build needs to trigger build for both the `default_branch` and `latest` branch ? @dojutsu-user I think it should trigger just the `default_branch` without `latest`.
2019-03-04T14:55:48
readthedocs/readthedocs.org
5,404
readthedocs__readthedocs.org-5404
[ "5402" ]
5f78337874cb7dd0f73864d43283fe819d03594d
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py --- a/readthedocs/projects/forms.py +++ b/readthedocs/projects/forms.py @@ -378,6 +378,17 @@ def clean_child(self): ) return child + def clean_alias(self): + alias = self.cleaned_data['alias'] + subproject = self.project.subprojects.filter( + alias=alias).exclude(id=self.instance.pk) + + if subproject.exists(): + raise forms.ValidationError( + _('A subproject with this alias already exists'), + ) + return alias + def get_subproject_queryset(self): """ Return scrubbed subproject choice queryset.
diff --git a/readthedocs/rtd_tests/tests/test_subprojects.py b/readthedocs/rtd_tests/tests/test_subprojects.py --- a/readthedocs/rtd_tests/tests/test_subprojects.py +++ b/readthedocs/rtd_tests/tests/test_subprojects.py @@ -65,6 +65,10 @@ def test_adding_subproject_fails_when_user_is_not_admin(self): form.errors['child'][0], r'Select a valid choice.', ) + self.assertEqual( + [proj_id for (proj_id, __) in form.fields['child'].choices], + [''], + ) def test_adding_subproject_passes_when_user_is_admin(self): user = fixture.get(User) @@ -161,6 +165,45 @@ def test_exclude_self_project_as_subproject(self): [proj_id for (proj_id, __) in form.fields['child'].choices], ) + def test_alias_already_exists_for_a_project(self): + user = fixture.get(User) + project = fixture.get(Project, users=[user]) + subproject = fixture.get(Project, users=[user]) + subproject_2 = fixture.get(Project, users=[user]) + relation = fixture.get( + ProjectRelationship, parent=project, child=subproject, + alias='subproject' + ) + form = ProjectRelationshipForm( + { + 'child': subproject_2.id, + 'alias': 'subproject' + }, + project=project, + user=user, + ) + self.assertFalse(form.is_valid()) + error_msg = 'A subproject with this alias already exists' + self.assertDictEqual(form.errors, {'alias': [error_msg]}) + + def test_edit_only_lists_instance_project_in_child_choices(self): + user = fixture.get(User) + project = fixture.get(Project, users=[user]) + subproject = fixture.get(Project, users=[user]) + relation = fixture.get( + ProjectRelationship, parent=project, child=subproject, + alias='subproject' + ) + form = ProjectRelationshipForm( + instance=relation, + project=project, + user=user, + ) + self.assertEqual( + [proj_id for (proj_id, __) in form.fields['child'].choices], + ['', relation.child.id], + ) + @override_settings(PUBLIC_DOMAIN='readthedocs.org') class ResolverBase(TestCase): @@ -172,12 +215,12 @@ def setUp(self): self.pip = fixture.get(Project, slug='pip', users=[self.owner], main_language_project=None) self.subproject = fixture.get( Project, slug='sub', language='ja', - users=[ self.owner], + users=[self.owner], main_language_project=None, ) self.translation = fixture.get( Project, slug='trans', language='ja', - users=[ self.owner], + users=[self.owner], main_language_project=None, ) self.pip.add_subproject(self.subproject)
Don't allow to create subprojects with same alias Currently we can create several subprojects with the same alias, when serving the docs from https://github.com/rtfd/readthedocs.org/blob/bdb4aa5da5b5b5fb45244176c2bee58fb5442414/readthedocs/core/views/serve.py#L71-L74 It gives a 500 error.
@stsewd I would like to work on this bug fix. Sure
2019-03-06T11:23:52
readthedocs/readthedocs.org
5,407
readthedocs__readthedocs.org-5407
[ "4363" ]
a5616b991b0632161b5b61722fcaaff15d747bd3
diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py --- a/readthedocs/doc_builder/backends/sphinx.py +++ b/readthedocs/doc_builder/backends/sphinx.py @@ -213,11 +213,14 @@ def build(self): build_command.append('-E') if self.config.sphinx.fail_on_warning: build_command.append('-W') + doctree_path = f'_build/doctrees-{self.sphinx_builder}' + if self.project.has_feature(Feature.SHARE_SPHINX_DOCTREE): + doctree_path = '_build/doctrees' build_command.extend([ '-b', self.sphinx_builder, '-d', - '_build/doctrees-{format}'.format(format=self.sphinx_builder), + doctree_path, '-D', 'language={lang}'.format(lang=project.language), '.', diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1313,31 +1313,40 @@ def add_features(sender, **kwargs): API_LARGE_DATA = 'api_large_data' DONT_SHALLOW_CLONE = 'dont_shallow_clone' USE_TESTING_BUILD_IMAGE = 'use_testing_build_image' + SHARE_SPHINX_DOCTREE = 'share_sphinx_doctree' FEATURES = ( (USE_SPHINX_LATEST, _('Use latest version of Sphinx')), (USE_SETUPTOOLS_LATEST, _('Use latest version of setuptools')), (ALLOW_DEPRECATED_WEBHOOKS, _('Allow deprecated webhook views')), (PIP_ALWAYS_UPGRADE, _('Always run pip install --upgrade')), - (SKIP_SUBMODULES, _('Skip git submodule checkout')), ( + (SKIP_SUBMODULES, _('Skip git submodule checkout')), + ( DONT_OVERWRITE_SPHINX_CONTEXT, _( 'Do not overwrite context vars in conf.py with Read the Docs context', ), - ), ( + ), + ( MKDOCS_THEME_RTD, - _('Use Read the Docs theme for MkDocs as default theme') - ), ( + _('Use Read the Docs theme for MkDocs as default theme'), + ), + ( DONT_SHALLOW_CLONE, - _( - 'Do not shallow clone when cloning git repos', - ), - ), ( + _('Do not shallow clone when cloning git repos'), + ), + ( USE_TESTING_BUILD_IMAGE, - _( - 'Use Docker image labelled as `testing` to build the docs', - ), - ), (API_LARGE_DATA, _('Try alternative method of posting large data')) + _('Use Docker image labelled as `testing` to build the docs'), + ), + ( + API_LARGE_DATA, + _('Try alternative method of posting large data'), + ), + ( + SHARE_SPHINX_DOCTREE, + _('Use shared directory for doctrees'), + ), ) projects = models.ManyToManyField(
Is it possible to control the "force" resp. "-E" option for Sphinx? (turns out it's actually the "-d" option) My builds take quite a bit of time and memory because I'm using a custom Sphinx source parser that executes Jupyter notebooks (http://nbsphinx.readthedocs.io/). I've seen that the `-E` option is used when calling Sphinx, which means that for every output format (HTML, JSON, PDF, ...) the source files are parsed (and in my case the Jupyter notebooks are executed) again and again. Parsing the source files multiple times doesn't (and shouldn't!) change anything, it just burns some of my (and your) precious build time and memory. Is it possible to disable this on a per-project basis? I guess there is some reason to do this in the first place, but if not, it should probably be deactivated globally? I've looked through the source code and found that this is controlled by the `force` argument to the builder class and just about everywhere the default is `force=False`. But it seems that the actual build process is somehow started with `force=True`.
mmm, I always thought that option is for reuse with the same builder only (not with different builders). No, Sphinx parses all source files and stores the "document" (and the "environment") in a representation that's independent of the builder. Normally that doesn't make a big difference, since parsing the source files is quite fast, but in my case it's not. But creating the HTML output and the LaTeX output is very quick if the "environment" is unchanged. In my case that could easily save several minutes build time. I'd like to take a deeper look at this, but at first sight, I think it's at least, weird :). I already wanted to remove the `force` attribute some time ago but I left it because I wasn't sure how it was used. Today, I found that the only place where `force=True` is when the build is triggered by a webhook: https://github.com/rtfd/readthedocs.org/blob/8a34164f73f886afea8c2f923e7d42ac7b9ca108/readthedocs/core/views/hooks.py#L45 So, I suppose that if you trigger a build manually `force` will be `False`. It'd be a good experiment to do and see how much is the difference in building time. Could you provide your RTD's project URL here? I've seen some projects having problems with memory limits in the past weeks and maybe this could be one of the reasons (#4403) Even if we remove the `-E` globally/completely, all the "first time" builds will create the environment from scratch. "First time" build will happen regularly since RTD cleans it cache after a couple of hours if the project didn't triggered a build. Also, we currently have 4 build servers, so if the task is executed in 1 and then in 2, those two will be "first time" builds. That said, I'm not sure that removing this option will change the building time/memory used too much but just make RTD builds fail randomly instead. @humitos since the source files are preserved in the first builder (html), the further builders will reuse that (pdf, json, singlehtml, etc), that would save some time in the same build process each time. @stsewd that's true, but does the same apply for the `nbsphinx`? I mean, I understand that `nbsphinx` is a plugin that runs inside the HTML builder (which is the first one in our steps) so, for this particular case it won't make any difference in memory usage, right? TL;DR: I think I found the solution: all builders should use the same `-d`option! @humitos > Could you provide your RTD's project URL here? Sure, I should have provided those in my original comment. It's https://splines.readthedocs.io/, an example build is https://readthedocs.org/projects/splines/builds/7518379/. I didn't notice this before, but it looks like the `-E` option is only used in the first call to Sphinx (using the `readthedocs` builder) but it isn't used in the second call (using the `json` builder). However, even though `-E` is not used in the second call, the source files are parsed again anyway. After having a closer look, I now think I know what's the problem: each Sphinx call on RTD seems to use a separate `-d` option. That means each builder uses their own "environment" and therefore it can never be re-used between builders! I think this is a big waste of resources, isn't it? Here is another example: https://readthedocs.org/projects/nbsphinx/builds/7532399/ Again, the `-E` flag is only used in the first invocation of Sphinx, but the source files are re-parsed anyway in the following invocations (most likely because separate `-d` options are used). > That said, I'm not sure that removing this option will change the building time/memory used too much but just make RTD builds fail randomly instead. If done correctly, this should cause a significant reduction in building time. I think it makes sense to re-build the "environment" for each new version of the project, because problems could arise with certain changes of the configuration. I guess the current usage of `-E` makes sense for that. But it should be safe to re-use the "environment" for all builders of one given version. While the current usage of `-E` can be kept as is, the usage of `-d` should be changed for that. @stsewd > since the source files are preserved in the first builder (html), the further builders will reuse that (pdf, json, singlehtml, etc), that would save some time in the same build process each time. They *should* re-use the "environment" of the first builder, but apparently they don't. The `-E` flag seems to be used only for the first builder, which is OK. But since each builder uses their own `-d` options the "environment" is never re-used. @humitos > I understand that nbsphinx is a plugin that runs inside the HTML builder (which is the first one in our steps) so, for this particular case it won't make any difference in memory usage, right? `nbsphinx` provides a source parser and most of its work is done during parsing and not while the builder is running. Ideally, all Sphinx calls after the first one should completely skip the source parsing stage. oh, I see, I think rtd have separate build dirs to be able to copy each generated resource in a _clean_ way, but I'm not sure if that is really necessary, maybe we can reuse the same directory. @stsewd Please note that the "environment" directory (given with the `-d` option, e.g. `_build/doctrees-json`) is different from the "output" directory (given as the second non-option argument, e.g. `_build/json`). I guess it makes sense to have separate "output" directories for the builders in order to easily copy files around, but I don't see why it would make sense to have separate "environment" directories. At first sight, it makes sense to me what you said regarding sharing the same `-d` PATH between different build steps (html, pdf, epub, etc) --note that `json` was removed and it's done inside the `html` step now. ``` -d PATH path for the cached environment and doctree files (default: OUTPUTDIR/.doctrees) ``` I'd like to have more context on why this was done like this (independent environment for each step) and thinking a little more if it's something that can be really shared or it could cause some kind of conflict. It seems it was using a shared direction for doctrees some years ago and it was changed here: https://github.com/rtfd/readthedocs.org/commit/a5d9d45e4c3f168cbd89326c99df28557d83efd4 There is no much info there, maybe @ericholscher or @agjohnson can help us here. To me, it seems like a reasonable thing revert and use a shared path. From the docs http://www.sphinx-doc.org/en/master/man/sphinx-build.html#cmdoption-sphinx-build-d > the doctrees can be shared between all builders I don't have any background on why this would have been changed. The change seems to imply we directly had some issues sharing the doctree environment though. If I had to guess, I'd say we're safe to reuse the doctree, though I think we don't want to reuse the existing doctree on the first build maybe? I haven't dug too deep here, but it looks like we force a new doctree env on the first builder (unless it's the only builder?), and we don't force on subsequent build steps, though we are using one-off doctree envs. It seems like what we want is to always wipe the doctree env and share that between builds instead? If there isn't a good reason to separate the doctree envs, I think this makes sense. Yea, I don't 100% remember why this was done. I'm guessing because of build caching of changed files. So: * I build the HTML files, and it updates all changes files * I build the PDF, but it doesn't see any files as changed since the "last" build, so it doesn't update anything I'd be curious to see benchmarks here, and some real testing to see if this is an issue while running. I'm guessing it will lead to issues. > I build the PDF, but it doesn't see any files as changed since the "last" build, so it doesn't update anything I don't think this will happen because the documentation says that it can be shared among all the builders. Although, it definitely worth a test. > I'd be curious to see benchmarks here, and some real testing to see if this is an issue while running. I'm guessing it will lead to issues. This is also that needs some testing and could help us to decide what to do here. I wouldn't take any action before having at least a couple of tests shared here for different projects. Otherwise, we will end up with strange behavior on production and we won't know why. This is perhaps a good candidate for a feature flag. We can test a small sample with this change. It is probably a slight speed up for builds, so might be a good addition. I'm putting this a few versions out though, it probably isn't a pressing priority right now. If we notice build bugs, we can re-evaluate priority. Just putting some tests here # Separated doctrees ``` sphinx-build -T -b html -d _build/doctrees . _build/html 9.01s user 0.30s system 72% cpu 12.873 total sphinx-build -T -b singlehtml -d _build/doctrees-singlehtml . 6.21s user 0.28s system 67% cpu 9.614 total sphinx-build -T -b html -d _build/doctrees . _build/html 13.76s user 0.44s system 82% cpu 17.216 total sphinx-build -T -b singlehtml -d _build/doctrees-singlehtml . 9.62s user 0.42s system 65% cpu 15.374 total sphinx-build -T -b html -d _build/doctrees . _build/html 9.90s user 0.32s system 73% cpu 13.818 total sphinx-build -T -b singlehtml -d _build/doctrees-singlehtml . 6.16s user 0.28s system 65% cpu 9.828 total sphinx-build -T -b html -d _build/doctrees . _build/html 13.36s user 0.35s system 79% cpu 17.183 total sphinx-build -T -b singlehtml -d _build/doctrees-singlehtml . 8.08s user 0.40s system 59% cpu 14.348 total sphinx-build -T -b html -d _build/doctrees . _build/html 9.48s user 0.33s system 78% cpu 12.575 total sphinx-build -T -b singlehtml -d _build/doctrees-singlehtml . 6.28s user 0.26s system 71% cpu 9.192 total ``` # Shared doctrees ``` sphinx-build -T -b html -d _build/doctrees . _build/html 9.96s user 0.31s system 78% cpu 13.104 total sphinx-build -T -b singlehtml -d _build/doctrees . _build/singlehtml 3.34s user 0.24s system 99% cpu 3.615 total sphinx-build -T -b html -d _build/doctrees . _build/html 8.96s user 0.33s system 70% cpu 13.119 total sphinx-build -T -b singlehtml -d _build/doctrees . _build/singlehtml 3.42s user 0.20s system 99% cpu 3.625 total sphinx-build -T -b html -d _build/doctrees . _build/html 11.01s user 0.36s system 79% cpu 14.351 total sphinx-build -T -b singlehtml -d _build/doctrees . _build/singlehtml 3.41s user 0.23s system 99% cpu 3.652 total ``` > This is perhaps a good candidate for a feature flag. We can test a small sample with this change. It is probably a slight speed up for builds, so might be a good addition :+1: we should go in this direction that is not harmful. I'm curious if the projects in prod are still active. We can probably just ping each 4 of them and ask them to configure formats, and be done with it. @ericholscher wrong issue I guess p: Ha yea, I was wondering where that comment went :D
2019-03-06T17:09:59