diff --git a/.gitignore b/.gitignore index f6226761..6b92becc 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,8 @@ ENV/ .cache *.egg-info .coverage +.pytest_cache # nix stuff result +result-* diff --git a/.travis.yml b/.travis.yml index 71071941..18fafdb8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,11 +4,11 @@ services: language: nix deploy: provider: script - script: make dockerize && make docker-push + script: make dockerize && make docker-push VERSION="$TRAVIS_BRANCH" on: repo: smarkets/marge-bot - tags: true - condition: "$TRAVIS_TAG = $(cat version)" + all_branches: true + condition: "$TRAVIS_BRANCH = $(cat version) || $TRAVIS_BRANCH = master" env: global: # smarkets ci docker username diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cf3325f..620aedc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,33 @@ + * 0.9.1: + - Feature: support passing a timezone with the embargo #228 + - Fix: fix not checking the target project for MRs from forked projects #218 + * 0.9.0: + - Feature: support rebasing through GitLab's API #160 + - Feature: allow restrict source branches #206 + - Fix: only fetch projects with min access level #166 + - Fix: bump all dependencies (getting rid of vulnerable packages) #179 + - Fix: support multiple assignees #186, #192 + - Fix: fetch pipelines by merge request instead of branch #212 + - Fix: fix unassign when author is Marge #211 + - Enhancement: ignore archived projects #177 + - Enhancement: add a timeout to all gitlab requests #200 + - Enhancement: smaller docker image size #199 + * 0.8.1 + - Feature: allow merging in order of last-update time #149 + * 0.8.0 + - Feature: allow reference repository in git clone #129 + - Feature: add new stable/master tags for docker images #142 + - Fix: fix TypeError when fetching source project #122 + - Fix: handle CI status 'skipped' #127 + - Fix: handle merging when source branch is master #127 + - Fix: handle error on pushing to protected branches #127 + - Enhancement: add appropriate error if unresolved discussions on merge request #136 + - Enhancement: ensure reviewer and commit author aren't the same #137 + * 0.7.0: + - Feature: add `--batch` to better support repos with many daily MRs and slow-ish CI (#84, #116) + - Fix: fix fuse() call when using experimental --use-merge-strategy to update source branch #102 + - Fix: Get latest CI status of a commit filtered by branch #96 (thanks to benjamb) + - Enhancement: Check MR is mergeable before accepting MR #117 * 0.6.1: - Fix when target SHA is retrieved #92. - Replace word "gitlab" with "GitLab" #93. diff --git a/Makefile b/Makefile index 090ce6e7..d95ea86e 100644 --- a/Makefile +++ b/Makefile @@ -1,34 +1,50 @@ -requirements_frozen.txt requirements.nix requirements_override.nix: requirements.txt - pypi2nix -V 3.6 -r $^ +VERSION?=$$(git rev-parse --abbrev-ref HEAD) .PHONY: all -all: requirements_frozen.txt requirements.nix requirements_override.nix default.nix - nix-build -K . +all: requirements_frozen.txt requirements.nix requirements_override.nix marge-bot dockerize + +.PHONY: marge-bot +marge-bot: + nix-build --keep-failed --attr marge-bot default.nix .PHONY: clean clean: - rm -rf .cache result requirements_frozen.txt + rm -rf .cache result result-* requirements_frozen.txt + +.PHONY: bump +bump: bump-requirements bump-sources + +.PHONY: bump-sources +bump-sources: + nix-shell --run niv update .PHONY: bump-requirements bump-requirements: clean requirements_frozen.txt -.PHONY: dockerize -dockerize: dockerize.nix - docker load --input $$(nix-build dockerize.nix) +requirements_frozen.txt requirements.nix requirements_override.nix: requirements.txt + pypi2nix -V 3.6 -r $^ +.PHONY: dockerize +dockerize: + docker load --input $$(nix-build --attr docker-image default.nix) .PHONY: docker-push docker-push: if [ -n "$$DOCKER_USERNAME" -a -n "$$DOCKER_PASSWORD" ]; then \ - docker login -u "$${DOCKER_USERNAME}" -p "$${DOCKER_PASSWORD}"; \ + docker login -u "$${DOCKER_USERNAME}" -p "$${DOCKER_PASSWORD}"; \ else \ - docker login; \ + docker login; \ + fi + docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:$(VERSION) + if [ "$(VERSION)" = "$$(cat version)" ]; then \ + docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:latest; \ + docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:stable; \ + docker push smarkets/marge-bot:stable; \ + docker push smarkets/marge-bot:latest; \ fi - docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:latest - docker push smarkets/marge-bot:$$(cat version) - docker push smarkets/marge-bot:latest + docker push smarkets/marge-bot:$(VERSION) # for backwards compatibility push to previous location - docker tag smarkets/marge-bot:latest smarketshq/marge-bot:latest - docker tag smarkets/marge-bot:latest smarketshq/marge-bot:$$(cat version) - docker push smarketshq/marge-bot:$$(cat version) + docker tag smarkets/marge-bot:$$(cat version) smarketshq/marge-bot:latest + docker tag smarkets/marge-bot:$$(cat version) smarketshq/marge-bot:$(VERSION) + docker push smarketshq/marge-bot:$(VERSION) docker push smarketshq/marge-bot:latest diff --git a/README.md b/README.md index d0ce4b45..20e4bdf2 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![build status](https://travis-ci.org/smarkets/marge-bot.png?branch=master)](https://travis-ci.org/smarkets/marge-bot) +[![build status](https://travis-ci.org/smarkets/marge-bot.svg?branch=master)](https://travis-ci.org/smarkets/marge-bot) # Marge-bot @@ -47,7 +47,13 @@ of commits (e.g. `Reviewed-by: ...`) or preventing merges during certain hours. ## Configuring -Args that start with '--' (eg. --auth-token) can also be set in a config file (specified via --config-file). The config file uses YAML syntax and must represent a YAML 'mapping' (for details, see http://learn.getgrav.org/advanced/yaml). If an arg is specified in more than one place, then commandline values override environment variables which override config file values which override defaults. +Args that start with '--' (eg. --auth-token) can also be set in a config file +(specified via --config-file). The config file uses YAML syntax and must +represent a YAML 'mapping' (for details, see +http://learn.getgrav.org/advanced/yaml). If an arg is specified in more than one +place, then commandline values override environment variables which override +config file values which override defaults. + ```bash optional arguments: -h, --help show this help message and exit @@ -69,13 +75,18 @@ optional arguments: --ssh-key-file FILE Path to the private ssh key for marge so it can clone/push. [env var: MARGE_SSH_KEY_FILE] (default: None) --embargo INTERVAL[,..] - Time(s) during which no merging is to take place, e.g. "Friday 1pm - Monday 9am". + Time(s) during which no merging is to take place, e.g. "Friday 1pm - Monday 9am" + or "Fri 12:30 Europe/London - Mon 08:00 Europe/London" [env var: MARGE_EMBARGO] (default: None) - --use-merge-strategy Use git merge instead of git rebase (EXPERIMENTAL) - Enable if you use a workflow based on merge-commits and not linear history. + --use-merge-strategy Use git merge instead of git rebase to update the *source* branch (EXPERIMENTAL) + If you need to use a strict no-rebase workflow (in most cases + you don't want this, even if you configured gitlab to use merge requests + to use merge commits on the *target* branch (the default).) [env var: MARGE_USE_MERGE_STRATEGY] (default: False) --add-tested Add "Tested: marge-bot <$MR_URL>" for the final commit on branch after it passed CI. [env var: MARGE_ADD_TESTED] (default: False) + --batch Enable processing MRs in batches + [env var: MARGE_BATCH] (default: False) --add-part-of Add "Part-of: <$MR_URL>" to each commit in MR. [env var: MARGE_ADD_PART_OF] (default: False) --add-reviewers Add "Reviewed-by: $approver" for each approver of MR to each commit in MR. @@ -83,11 +94,15 @@ optional arguments: --impersonate-approvers Marge-bot pushes effectively don't change approval status. [env var: MARGE_IMPERSONATE_APPROVERS] (default: False) + --merge-order The order you want marge to merge its requests. + As of earliest merge request creation time (created_at) or update time (updated_at) + [env var: MARGE_MERGE_ORDER] (default: created_at) --approval-reset-timeout APPROVAL_RESET_TIMEOUT How long to wait for approvals to reset after pushing. Only useful with the "new commits remove all approvals" option in a project's settings. This is to handle the potential race condition where approvals don't reset in GitLab after a force push due to slow processing of the event. + [env var: MARGE_APPROVAL_RESET_TIMEOUT] (default: 0s) --project-regexp PROJECT_REGEXP Only process projects that match; e.g. 'some_group/.*' or '(?!exclude/me)'. [env var: MARGE_PROJECT_REGEXP] (default: .*) @@ -100,13 +115,14 @@ optional arguments: --git-timeout GIT_TIMEOUT How long a single git operation can take. [env var: MARGE_GIT_TIMEOUT] (default: 120s) + --git-reference-repo GIT_REFERENCE_REPO + A reference repo to be used when git cloning. + [env var: MARGE_GIT_REFERENCE_REPO] (default: None) --branch-regexp BRANCH_REGEXP Only process MRs whose target branches match the given regular expression. [env var: MARGE_BRANCH_REGEXP] (default: .*) --debug Debug logging (includes all HTTP requests etc). [env var: MARGE_DEBUG] (default: False) - --batch Enable processing MRs in batches. - [env var: MARGE_BATCH] (default: False) ``` Here is a config file example ```yaml @@ -140,17 +156,30 @@ code strips trailing whitespace in the name, so it won't show up elsewhere). Then add `marge-bot` to your projects as `Developer` or `Master`, the latter being required if she will merge to protected branches. -For certain features, namely, `--impersonate-approvers`, and -`--add-reviewers`, you will need to grant `marge-bot` admin privileges as -well. In the latter, so that she can query the email of the reviewers to include -it in the commit. +For certain features, namely, `--impersonate-approvers`, and `--add-reviewers`, +you will need to grant `marge-bot` admin privileges as well. In the latter, so +that she can query the email of the reviewers to include it in the commit. Note +that if you're trying to run marge-bot against a GitLab instance you don't have +yourself admin access to (e.g. https://www.gitlab.com), you won't be able to use +features that require admin for marge-bot. + +Second, you need an authentication token for the `marge-bot` user. You will need +to select the `api` and `read_user` scopes in all cases. + +If marge-bot was made an admin to handle approver impersonation and/or adding a +reviewed-by field, then you will also need to add **`sudo`** scope under +`Impersonation Tokens` in the User Settings. Assuming your GitLab install is +install is `https://your-gitlab.example.com` the link will be at +`https://your-gitlab.example.com/admin/users/marge-bot/impersonation_tokens`). + +On older GitLab installs, to be able to use impersonation features if marge-bot +was made an admin, use the **PRIVATE TOKEN** found in marge-bot's `Profile +Settings`; otherwise just use a personal token (you will need to impersonate the +marge-bot user via the admin UI to get the private token, it should then be at +`http://my-gitlab.example.com/profile/personal_access_tokens` reachable via +`Profile Settings -> Acess Tokens`). -Second, you need an authentication token for the `marge-bot` user. If she was -made an admin to handle approver impersonation and/or adding a reviewed-by -field, then you will need to use the **PRIVATE TOKEN** found in her `Profile -Settings`. Otherwise, you can just use a personal access token that can be -generated from `Profile Settings -> Access Tokens`. Make sure it has `api` and -`read_user` scopes. Put the token in a file, e.g. `marge-bot.token`. +Once you have the token, put it in a file, e.g. `marge-bot.token`. Finally, create a new ssh key-pair, e.g like so @@ -158,42 +187,66 @@ Finally, create a new ssh key-pair, e.g like so ssh-keygen -t ed25519 -C marge-bot@invalid -f marge-bot-ssh-key -P '' ``` -Add the public key (`marge-bot-ssh-key.pub`) to the user's `SSH Keys` in Gitlab +Add the public key (`marge-bot-ssh-key.pub`) to the user's `SSH Keys` in GitLab and keep the private one handy. -The bot can then be started from the command line as follows (using the minimal settings): -```bash -marge.app --auth-token-file marge-bot.token \ - --gitlab-url 'http://your.gitlab.instance.com' \ - --ssh-key-file marge-bot-ssh-key -``` +### Running marge-bot in docker (what we do) -Alternatively, you can also pass the auth token as the environment variable -`MARGE_AUTH_TOKEN` and the **CONTENTS** of the ssh-key-file as the environment -variable `MARGE_SSH_KEY`. This is very useful for running the official docker -image we provide: +Assuming you have already got docker installed, the quickest and most minimal +way to run marge is like so (*but see note about passing secrets on the +commandline below*): ```bash -docker run \ +docker run --restart=on-failure \ # restart if marge crashes because GitLab is flaky -e MARGE_AUTH_TOKEN="$(cat marge-bot.token)" \ -e MARGE_SSH_KEY="$(cat marge-bot-ssh-key)" \ smarkets/marge-bot \ --gitlab-url='http://your.gitlab.instance.com' ``` -For completeness sake, here's how we run marge-bot at Smarkets ourselves: +Note that other users on the machine can see the secrets in `ps`, because +although they are env vars *inside* docker, we used a commandline switch to set +them for docker run. + +To avoid that you have several options. You can just use a yaml file and mount +that into the container, for example this is how we actually run marge-bot at +Smarkets ourselves: + +```yaml +# marge-bot-config.yml +add-part-of: true +add-reviewers: true +add-tested: true +impersonate-approvers: true +gitlab-url: "https://git.corp.smarkets.com" +project-regexp: "smarkets/smarkets$" +auth-token: "WoNtTelly0u" +ssh-key: | + -----BEGIN OPENSSH PRIVATE KEY----- + [...] + -----END OPENSSH PRIVATE KEY----- +``` + ```bash -docker run \ - -e MARGE_AUTH_TOKEN="$(cat marge-bot.token)" \ - -e MARGE_SSH_KEY="$(cat marge-bot-ssh-key)" \ +docker run --restart=on-failure \ + -v "$(pwd)":/configuration \ smarkets/marge-bot \ - --add-tested \ - --add-reviewers \ - --impersonate-approvers \ - --gitlab-url='http://your.gitlab.instance.com' + --config-file=/configuration/marge-bot-config.yaml ``` -Kubernetes templating with ktmpl: +By default docker will use the `latest` tag, which corresponds to the latest +stable version. You can also use the `stable` tag to make this more explicit. +If you want a development version, you can use the `master` tag to obtain an +image built from the HEAD commit of the `master` branch. Note that this image +may contain bugs. + +You can also specify a particular version as a tag, e.g. +`smarkets/marge-bot:0.7.0`. + +### Running marge-bot in kubernetes +It's also possible to run marge in kubernetes, e.g. here's how you use a ktmpl +template: + ```bash ktmpl ./deploy.yml \ --parameter APP_NAME "marge-bot" \ @@ -208,6 +261,32 @@ ktmpl ./deploy.yml \ Once running, the bot will continuously monitor all projects that have its user as a member and will pick up any changes in membership at runtime. +### Running marge-bot as a plain python app + +#### Installing marge-bot with nix + +Alternatively, if you prefer not to use docker, you can also directly run marge. +If you use [nix](https://nixos.org/nix/) do `nix-env --install -f default.nix`. + +The nix install should be fully reproducible on any version of linux (and also +work on OS X, although this is not something we properly test). If you don't +want to use docker we recommend you give nix a try. + +#### Installing marge-bot the old-fashioned way + +Finally, although this is our least preferred alternative, you can always do +`python3 setup.py install` (note that you will need python3.6). + +Afterwards, the minimal way to run marge is as follows. + +```bash +marge.app --auth-token-file marge-bot.token \ + --gitlab-url 'http://your.gitlab.instance.com' \ + --ssh-key-file marge-bot-ssh-key +``` + +However, we suggest you use a systemd unit file or some other mechanism to +automatically restart marge-bot in case of intermittent GitLab problems. ## Suggested workflow 1. Alice creates a new merge request and assigns Bob and Charlie as reviewers @@ -262,7 +341,7 @@ commits introduced by a single Merge Request when using a fast-forward/rebase based merge workflow. ## Impersonating approvers -If you want a full audit trail, you will configure Gitlab +If you want a full audit trail, you will configure GitLab [require approvals](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html#approvals-required) for PRs and also turn on [reset approvals on push](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html#reset-approvals-on-push). @@ -292,9 +371,9 @@ significantly speed up the rate at which marge-bot processes jobs - not just because merge requests can be tested together, but because marge-bot will ensure the whole set of merge requests is mergeable first. This includes, for example, checking if a merge request is marked as WIP, or does not have enough approvals. -Essentially, users get faster feedback if there is an issue. - -This is currently an experimental feature. +Essentially, users get faster feedback if there is an issue. Note that you +probably won't need this unless you have tens of merge requests a day (or +extremely slow CI). ### How it works @@ -315,11 +394,15 @@ request, before attempting a new batch job. ### Limitations -* Batch mode is incompatible with the tested-by trailer, as trailers are only - added to the original merge requests. This means the tested-by trailer would - be added to each merge request's last commit, as opposed to the last commit of - the last merge request in the batch, which is the only that should be - considered tested. +* Currently we still add the tested-by trailer for each merge request's final + commit in the batch, but it would probably be more correct to add the trailer + only to the last commit in the whole batch request (since that's the only one + we know passed for sure in that combination). We might change this in the + future or make it configurable, but note that there's still a much stronger + chance all intermittent final commits also passed then when just testing on + each source branch, because we know the final linearization of all commits + passes in that all MRs passed individually on their branches. + * As trailers are added to the original merge requests only, their branches would need to be pushed to in order to reflect this change. This would trigger CI in each of the branches again that would have to be passed before merging, @@ -330,9 +413,10 @@ request, before attempting a new batch job. passed CI. This does still mean the triggered CI jobs will be running even though the merge requests are merged. marge-bot will attempt to cancel these pipelines, although this doesn't work too effectively if external CI is used. + * There is what can be considered to be a flaw in this implementation that could potentially result in a non-green master; consider the following situation: - + 1. A batch merge request is created, and passes CI. 2. Several merge requests are then merged to master, but one could fail (perhaps due to someone pushing directly to master in between). @@ -370,6 +454,8 @@ marge-bot with `--embargo "Friday 1pm - Monday 9am" --branch-regexp master` and the other with `--branch-regexp (?!master)`. This would allow development to continue on other branches during the embargo on master. +It is possible to restrict the source branches with `--source-branch-regexp`. + ## Some handy git aliases Only `git bisect run` on commits that have passed CI (requires running marge-bot with `--add-tested`): diff --git a/default.nix b/default.nix index 1f4c3c05..6bfb447f 100644 --- a/default.nix +++ b/default.nix @@ -1,3 +1,6 @@ -let pkgs = import ./pinnedNixpkgs.nix; -in -pkgs.callPackage ./marge.nix {} +let sources = import ./nix/sources.nix; in +with import sources.nixpkgs {}; +{ + marge-bot = callPackage ./marge.nix {}; + docker-image = callPackage ./dockerize.nix {}; +} diff --git a/dockerize.nix b/dockerize.nix index b7466d4d..cafe9f1f 100644 --- a/dockerize.nix +++ b/dockerize.nix @@ -1,20 +1,39 @@ -{ pkgs ? import ./pinnedNixpkgs.nix }: -let callPackage = pkgs.lib.callPackageWith (pkgs); - marge = callPackage ./marge.nix {}; - version = marge.version; +{ pkgs }: +let + marge = pkgs.callPackage ./marge.nix {}; + version = marge.version; + basicShadow = + # minimal user setup, so ssh won't whine 'No user exists for uid 0' + pkgs.runCommand "basic-shadow-setup" {} + '' + mkdir -p $out + cd $out + mkdir -p root/.ssh + mkdir -p etc/pam.d + echo "root:x:0:0::/root:/bin/sh" >etc/passwd + echo "root:!x:::::::" >etc/shadow + echo "root:x:0:" >etc/group + echo "root:x::" >etc/gshadow + cat >etc/pam.d/other <<\EOF + account sufficient pam_unix.so + auth sufficient pam_rootok.so + password requisite pam_unix.so nullok sha512 + session required pam_unix.so + EOF + ''; in -pkgs.dockerTools.buildImage { - name = "smarkets/marge-bot"; - tag = "${version}"; - # minimal user setup, so ssh won't whine 'No user exists for uid 0' - runAsRoot = '' - #!${pkgs.stdenv.shell} - ${pkgs.dockerTools.shadowSetup} - mkdir -p /root/.ssh - ''; - contents = [marge pkgs.bash pkgs.coreutils pkgs.openssh pkgs.glibcLocales]; - config = { - Entrypoint = [ "/bin/marge.app" ]; - Env = ["LANG=en_US.UTF-8" ''LOCALE_ARCHIVE=/lib/locale/locale-archive'']; - }; -} + pkgs.dockerTools.buildImage { + name = "smarkets/marge-bot"; + tag = "${version}"; + contents = + with pkgs; [ + basicShadow + busybox + gitMinimal + openssh + ] ++ [ marge ]; + config = { + Entrypoint = [ "/bin/marge.app" ]; + Env = ["LANG=en_US.UTF-8" ''LOCALE_ARCHIVE=/lib/locale/locale-archive'']; + }; + } diff --git a/marge.nix b/marge.nix index c4eb3dc6..667b48dc 100644 --- a/marge.nix +++ b/marge.nix @@ -1,22 +1,40 @@ -{pkgs ? import ./pinnedNixpkgs.nix }: -let version = builtins.replaceStrings ["\n"] [""] (builtins.readFile ./version); - python = (import ./requirements.nix { inherit pkgs; }); - py = python.packages; +{ pkgs +, lib +}: +let + python = import ./requirements.nix { inherit pkgs; }; + version = lib.fileContents ./version; in -python.mkDerivation { - version = "${version}"; - name = "marge-${version}"; - src = ./.; - buildInputs = [py.pytest py.pytest-cov py.pytest-flake8 py.pytest-pylint py.pytest-runner]; - propagatedBuildInputs = [py.ConfigArgParse py.maya py.PyYAML py.requests pkgs.openssh pkgs.git]; - meta = { - homepage = "https://github.com/smarkets/marge-bot"; - description = "A build bot for GitLab"; - license = with pkgs.lib.licenses; [bsd3] ; - maintainers = [ - "Daniel Gorin " - "Alexander Schmolck " + python.mkDerivation { + version = "${version}"; + name = "marge-${version}"; + src = lib.sourceByRegex ./. [ + "marge(/.*\.py)?" + "tests(/.*\.py)?" + "marge\.app" + "pylintrc" + "setup\.cfg" + "setup\.py" + "version" ]; - platforms = pkgs.lib.platforms.linux ++ pkgs.lib.platforms.darwin; - }; - } + checkInputs = with python.packages; [ + pytest + pytest-cov + pytest-flake8 + pytest-pylint + pytest-runner + ]; + propagatedBuildInputs = with python.packages; [ + ConfigArgParse maya PyYAML requests + ]; + meta = { + homepage = "https://github.com/smarkets/marge-bot"; + description = "A build bot for GitLab"; + license = lib.licenses.bsd3; + maintainers = [ + "Alexander Schmolck " + "Jaime Lennox " + ]; + platforms = pkgs.lib.platforms.linux ++ pkgs.lib.platforms.darwin; + }; + } diff --git a/marge/app.py b/marge/app.py index 8e826887..71b98df9 100644 --- a/marge/app.py +++ b/marge/app.py @@ -104,19 +104,30 @@ def regexp(str_regex): '--use-merge-strategy', action='store_true', help=( - 'Use git merge instead of git rebase (EXPERIMENTAL)\n' - 'Enable if you use a workflow based on merge-commits and not linear history.\n' + 'Use git merge instead of git rebase to update the *source* branch (EXPERIMENTAL)\n' + 'If you need to use a strict no-rebase workflow (in most cases\n' + 'you don\'t want this, even if you configured gitlab to use merge requests\n' + 'to use merge commits on the *target* branch (the default).)\n' ), ) - experimental_group.add_argument( + parser.add_argument( + '--rebase-remotely', + action='store_true', + help=( + "Instead of rebasing in a local clone of the repository, use GitLab's\n" + "built-in rebase functionality, via their API. Note that Marge can't add\n" + "information in the commits in this case.\n" + ), + ) + parser.add_argument( '--add-tested', action='store_true', help='Add "Tested: marge-bot <$MR_URL>" for the final commit on branch after it passed CI.\n', ) - experimental_group.add_argument( + parser.add_argument( '--batch', action='store_true', - help='Enable processing MRs in batches (EXPERIMENTAL)\n', + help='Enable processing MRs in batches\n', ) parser.add_argument( '--add-part-of', @@ -133,6 +144,12 @@ def regexp(str_regex): action='store_true', help='Marge-bot pushes effectively don\'t change approval status.\n', ) + parser.add_argument( + '--merge-order', + default='created_at', + choices=('created_at', 'updated_at'), + help='Order marge merges assigned requests. created_at (default) or updated_at.\n', + ) parser.add_argument( '--approval-reset-timeout', type=time_interval, @@ -168,12 +185,24 @@ def regexp(str_regex): default='120s', help='How long a single git operation can take.\n' ) + parser.add_argument( + '--git-reference-repo', + type=str, + default=None, + help='A reference repo to be used when git cloning.\n' + ) parser.add_argument( '--branch-regexp', type=regexp, default='.*', help='Only process MRs whose target branches match the given regular expression.\n', ) + parser.add_argument( + '--source-branch-regexp', + type=regexp, + default='.*', + help='Only process MRs whose source branches match the given regular expression.\n', + ) parser.add_argument( '--job-regexp', type=regexp, @@ -192,6 +221,21 @@ def regexp(str_regex): ) config = parser.parse_args(args) + if config.use_merge_strategy and config.batch: + raise MargeBotCliArgError('--use-merge-strategy and --batch are currently mutually exclusive') + if config.use_merge_strategy and config.add_tested: + raise MargeBotCliArgError('--use-merge-strategy and --add-tested are currently mutually exclusive') + if config.rebase_remotely: + conflicting_flag = [ + '--use-merge-strategy', + '--add-tested', + '--add-reviewers', + '--add-part-of', + ] + for flag in conflicting_flag: + if getattr(config, flag[2:].replace("-", "_")): + raise MargeBotCliArgError('--rebase-remotely and %s are mutually exclusive' % flag) + cli_args = [] # pylint: disable=protected-access for _, (_, value) in parser._source_to_settings.get(configargparse._COMMAND_LINE_SOURCE_KEY, {}).items(): @@ -218,7 +262,7 @@ def _secret_auth_token_and_ssh_key(options): def main(args=None): - if not args: + if args is None: args = sys.argv[1:] logging.basicConfig() @@ -242,12 +286,28 @@ def main(args=None): if options.batch: logging.warning('Experimental batch mode enabled') + if options.use_merge_strategy: + fusion = bot.Fusion.merge + elif options.rebase_remotely: + version = api.version() + if version.release < (11, 6): + raise Exception( + "Need GitLab 11.6+ to use rebase through the API, " + "but your instance is {}".format(version) + ) + fusion = bot.Fusion.gitlab_rebase + else: + fusion = bot.Fusion.rebase + config = bot.BotConfig( user=user, ssh_key_file=ssh_key_file, project_regexp=options.project_regexp, git_timeout=options.git_timeout, + git_reference_repo=options.git_reference_repo, branch_regexp=options.branch_regexp, + source_branch_regexp=options.source_branch_regexp, + merge_order=options.merge_order, merge_opts=bot.MergeJobOptions.default( add_tested=options.add_tested, add_part_of=options.add_part_of, @@ -256,7 +316,7 @@ def main(args=None): approval_timeout=options.approval_reset_timeout, embargo=options.embargo, ci_timeout=options.ci_timeout, - use_merge_strategy=options.use_merge_strategy, + fusion=fusion, job_regexp=options.job_regexp, create_pipeline=options.create_pipeline, ), diff --git a/marge/bot.py b/marge/bot.py index 96c550a6..398dde46 100644 --- a/marge/bot.py +++ b/marge/bot.py @@ -14,7 +14,7 @@ MergeRequest = merge_request_module.MergeRequest -class Bot(object): +class Bot: def __init__(self, *, api, config): self._api = api self._config = config @@ -37,6 +37,7 @@ def start(self): root_dir=root_dir, ssh_key_file=self._config.ssh_key_file, timeout=self._config.git_timeout, + reference=self._config.git_reference_repo, ) self._run(repo_manager) @@ -90,7 +91,7 @@ def _process_projects( for project in projects: project_name = project.path_with_namespace - if project.access_level.value < AccessLevel.reporter.value: + if project.access_level < AccessLevel.reporter: log.warning("Don't have enough permissions to browse merge requests in %s!", project_name) continue merge_requests = self._get_merge_requests(project, project_name) @@ -102,7 +103,8 @@ def _get_merge_requests(self, project, project_name): my_merge_requests = MergeRequest.fetch_all_open_for_user( project_id=project.id, user_id=self.user.id, - api=self._api + api=self._api, + merge_order=self._config.merge_order, ) branch_regexp = self._config.branch_regexp filtered_mrs = [mr for mr in my_merge_requests @@ -117,7 +119,20 @@ def _get_merge_requests(self, project, project_name): 'MRs that do not match branch_regexp: %s', [mr.web_url for mr in filtered_out] ) - return filtered_mrs + source_branch_regexp = self._config.source_branch_regexp + source_filtered_mrs = [mr for mr in filtered_mrs + if source_branch_regexp.match(mr.source_branch)] + log.debug( + 'MRs that match source_branch_regexp: %s', + [mr.web_url for mr in source_filtered_mrs] + ) + source_filtered_out = set(filtered_mrs) - set(source_filtered_mrs) + if filtered_out: + log.debug( + 'MRs that do not match source_branch_regexp: %s', + [mr.web_url for mr in source_filtered_out] + ) + return source_filtered_mrs def _process_merge_requests(self, repo_manager, project, merge_requests): if not merge_requests: @@ -153,20 +168,28 @@ def _process_merge_requests(self, repo_manager, project, merge_requests): log.exception('BatchMergeJob failed: %s', err) log.info('Attempting to merge the oldest MR...') merge_request = merge_requests[0] - merge_job = single_merge_job.SingleMergeJob( + merge_job = self._get_single_job( + project=project, merge_request=merge_request, repo=repo, + options=self._config.merge_opts, + ) + merge_job.execute() + + def _get_single_job(self, project, merge_request, repo, options): + return single_merge_job.SingleMergeJob( api=self._api, user=self.user, project=project, merge_request=merge_request, repo=repo, - options=self._config.merge_opts, + options=options, ) - merge_job.execute() class BotConfig(namedtuple('BotConfig', - 'user ssh_key_file project_regexp merge_opts git_timeout branch_regexp batch')): + 'user ssh_key_file project_regexp merge_order merge_opts git_timeout ' + + 'git_reference_repo branch_regexp source_branch_regexp batch')): pass MergeJobOptions = job.MergeJobOptions +Fusion = job.Fusion diff --git a/marge/branch.py b/marge/branch.py new file mode 100644 index 00000000..f0ef33ea --- /dev/null +++ b/marge/branch.py @@ -0,0 +1,25 @@ +from . import gitlab + + +GET = gitlab.GET + + +class Branch(gitlab.Resource): + + @classmethod + def fetch_by_name(cls, project_id, branch, api): + info = api.call(GET( + '/projects/{project_id}/repository/branches/{branch}'.format( + project_id=project_id, + branch=branch, + ), + )) + return cls(api, info) + + @property + def name(self): + return self.info['name'] + + @property + def protected(self): + return self.info['protected'] diff --git a/marge/git.py b/marge/git.py index 63ffcd56..3b424b5e 100644 --- a/marge/git.py +++ b/marge/git.py @@ -30,9 +30,11 @@ def _filter_branch_script(trailer_name, trailer_values): return filter_script -class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): +class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout reference')): def clone(self): - self.git('clone', '--origin=origin', self.remote_url, self.local_path, from_repo=False) + reference_flag = '--reference=' + self.reference if self.reference else '' + self.git('clone', '--origin=origin', reference_flag, self.remote_url, + self.local_path, from_repo=False) def config_user_info(self, user_name, user_email): self.git('config', 'user.email', user_email) @@ -159,7 +161,7 @@ def git(self, *args, from_repo=True): if self.ssh_key_file: env = os.environ.copy() # ssh's handling of identity files is infuriatingly dumb, to get it - # to actualy really use the IdentityFile we pass in via -i we also + # to actually really use the IdentityFile we pass in via -i we also # need to tell it to ignore ssh-agent (IdentitiesOnly=true) and not # read in any identities from ~/.ssh/config etc (-F /dev/null), # because they append and it tries them in order, starting with config file diff --git a/marge/gitlab.py b/marge/gitlab.py index 55316561..dc10c741 100644 --- a/marge/gitlab.py +++ b/marge/gitlab.py @@ -5,7 +5,7 @@ import requests -class Api(object): +class Api: def __init__(self, gitlab_url, auth_token): self._auth_token = auth_token self._api_base_url = gitlab_url.rstrip('/') + '/api/v4' @@ -17,15 +17,25 @@ def call(self, command, sudo=None, response_json=None): if sudo: headers['SUDO'] = '%d' % sudo log.debug('REQUEST: %s %s %r %r', method.__name__.upper(), url, headers, command.call_args) - response = method(url, headers=headers, **command.call_args) + # Timeout to prevent indefinitely hanging requests. 60s is very conservative, + # but should be short enough to not cause any practical annoyances. We just + # crash rather than retry since marge-bot should be run in a restart loop anyway. + try: + response = method(url, headers=headers, timeout=60, **command.call_args) + except requests.exceptions.Timeout as err: + log.error('Request timeout: %s', err) + raise log.debug('RESPONSE CODE: %s', response.status_code) log.debug('RESPONSE BODY: %r', response.content) if response_json is not None: response_json.update(response.json()) + if response.status_code == 202: + return True # Accepted + if response.status_code == 204: - return True + return True # NoContent if response.status_code < 300: return command.extract(response.json()) if command.extract else response.json() @@ -191,7 +201,7 @@ class UnexpectedError(ApiError): pass -class Resource(object): +class Resource: def __init__(self, api, info): self._info = info self._api = api @@ -227,3 +237,6 @@ def parse(cls, string): @property def is_ee(self): return self.edition == 'ee' + + def __str__(self): + return '%s-%s' % ('.'.join(map(str, self.release)), self.edition) diff --git a/marge/interval.py b/marge/interval.py index 11f4807c..325007e1 100644 --- a/marge/interval.py +++ b/marge/interval.py @@ -30,7 +30,7 @@ def find_weekday(string_or_day): raise ValueError('Not a week day: %r' % string_or_day) -class WeeklyInterval(object): +class WeeklyInterval: def __init__(self, from_weekday, from_time, to_weekday, to_time): from_weekday = find_weekday(from_weekday) to_weekday = find_weekday(to_weekday) @@ -82,9 +82,12 @@ def from_human(cls, string): def parse_part(part): part = part.replace('@', ' ') - weekday, time = part.split() + parts = part.split() + weekday = parts[0] + time = parts[1] + timezone = parts[2] if len(parts) > 2 else 'UTC' weekday = find_weekday(weekday) - time = maya.parse(time).datetime().time() + time = maya.parse(time, timezone=timezone).datetime().time() return weekday, time from_weekday, from_time = parse_part(from_) @@ -111,7 +114,7 @@ def _interval_covers(self, date): return True -class IntervalUnion(object): +class IntervalUnion: def __init__(self, iterable): self._intervals = list(iterable) diff --git a/marge/job.py b/marge/job.py index 7b442f3b..2e623527 100644 --- a/marge/job.py +++ b/marge/job.py @@ -1,18 +1,21 @@ # pylint: disable=too-many-locals,too-many-branches,too-many-statements +import enum import logging as log import time import re from collections import namedtuple from datetime import datetime, timedelta -from . import git +from . import git, gitlab +from .branch import Branch from .interval import IntervalUnion +from .merge_request import MergeRequestRebaseFailed from .project import Project from .user import User from .pipeline import Pipeline -class MergeJob(object): +class MergeJob: def __init__(self, *, api, user, project, repo, options): self._api = api @@ -57,13 +60,12 @@ def ensure_mergeable_mr(self, merge_request): if state not in ('opened', 'reopened', 'locked'): if state in ('merged', 'closed'): raise SkipMerge('The merge request is already {}!'.format(state)) - else: - raise CannotMerge('The merge request is in an unknown state: {}'.format(state)) + raise CannotMerge('The merge request is in an unknown state: {}'.format(state)) if self.during_merge_embargo(): raise SkipMerge('Merge embargo!') - if self._user.id != merge_request.assignee_id: + if self._user.id not in merge_request.assignee_ids: raise SkipMerge('It is not assigned to me anymore!') def add_trailers(self, merge_request): @@ -71,11 +73,17 @@ def add_trailers(self, merge_request): log.info('Adding trailers for MR !%s', merge_request.iid) # add Reviewed-by + should_add_reviewers = ( + self._options.add_reviewers and + self._options.fusion is not Fusion.gitlab_rebase + ) reviewers = ( _get_reviewer_names_and_emails( + merge_request.fetch_commits(), merge_request.fetch_approvals(), self._api, - ) if self._options.add_reviewers else None + ) if should_add_reviewers + else None ) sha = None if reviewers is not None: @@ -87,12 +95,18 @@ def add_trailers(self, merge_request): ) # add Tested-by - should_add_tested = self._options.add_tested and self._project.only_allow_merge_if_pipeline_succeeds + should_add_tested = ( + self._options.add_tested and + self._project.only_allow_merge_if_pipeline_succeeds and + self._options.fusion is Fusion.rebase + ) + tested_by = ( - ['{0._user.name} <{1.web_url}>'.format(self, merge_request)] if should_add_tested + ['{0._user.name} <{1.web_url}>'.format(self, merge_request)] + if should_add_tested else None ) - if tested_by is not None and not self._options.use_merge_strategy: + if tested_by is not None: sha = self._repo.tag_with_trailer( trailer_name='Tested-by', trailer_values=tested_by, @@ -101,8 +115,13 @@ def add_trailers(self, merge_request): ) # add Part-of + should_add_parts_of = ( + self._options.add_part_of and + self._options.fusion is not Fusion.gitlab_rebase + ) part_of = ( - '<{0.web_url}>'.format(merge_request) if self._options.add_part_of + '<{0.web_url}>'.format(merge_request) + if should_add_parts_of else None ) if part_of is not None: @@ -117,17 +136,25 @@ def add_trailers(self, merge_request): def get_mr_ci_status(self, merge_request, commit_sha=None): if commit_sha is None: commit_sha = merge_request.sha - pipelines = Pipeline.pipelines_by_branch( - merge_request.source_project_id, - merge_request.source_branch, - self._api, - ) - current_pipeline = next(iter(pipelines), None) + + if self._api.version().release >= (10, 5, 0): + pipelines = Pipeline.pipelines_by_merge_request( + merge_request.target_project_id, + merge_request.iid, + self._api, + ) + else: + pipelines = Pipeline.pipelines_by_branch( + merge_request.source_project_id, + merge_request.source_branch, + self._api, + ) + current_pipeline = next(iter(pipeline for pipeline in pipelines if pipeline.sha == commit_sha), None) create_pipeline = self.opts.create_pipeline trigger = False - if current_pipeline and current_pipeline.sha == commit_sha: + if current_pipeline: ci_status = current_pipeline.status jobs = current_pipeline.get_jobs() if not any(self.opts.job_regexp.match(j['name']) for j in jobs): @@ -192,6 +219,10 @@ def wait_for_ci_to_pass(self, merge_request, commit_sha=None): log.info('CI for MR !%s passed', merge_request.iid) return + if ci_status == 'skipped': + log.info('CI for MR !%s skipped', merge_request.iid) + return + if ci_status == 'failed': raise CannotMerge('CI failed!') @@ -226,12 +257,14 @@ def maybe_reapprove(self, merge_request, approvals): def sufficient_approvals(): return merge_request.fetch_approvals().sufficient # Make sure we don't race by ensuring approvals have reset since the push - time_0 = datetime.utcnow() waiting_time_in_secs = 5 + approval_timeout_in_secs = self._options.approval_timeout.total_seconds() + iterations = round(approval_timeout_in_secs / waiting_time_in_secs) log.info('Checking if approvals have reset') - while sufficient_approvals() and datetime.utcnow() - time_0 < self._options.approval_timeout: + while sufficient_approvals() and iterations: log.debug('Approvals haven\'t reset yet, sleeping for %s secs', waiting_time_in_secs) time.sleep(waiting_time_in_secs) + iterations -= 1 if not sufficient_approvals(): approvals.reapprove() @@ -243,7 +276,7 @@ def fetch_source_project(self, merge_request): remote = 'source' remote_url = source_project.ssh_url_to_repo self._repo.fetch( - remote=remote, + remote_name=remote, remote_url=remote_url, ) return source_project, remote_url, remote @@ -257,9 +290,18 @@ def get_source_project(self, merge_request): ) return source_project + def get_target_project(self, merge_request): + return Project.fetch_by_id(merge_request.target_project_id, api=self._api) + def fuse(self, source, target, source_repo_url=None, local=False): # NOTE: this leaves git switched to branch_a - strategy = self._repo.merge if self._options.use_merge_strategy else self._repo.rebase + strategies = { + Fusion.rebase: self._repo.rebase, + Fusion.merge: self._repo.merge, + Fusion.gitlab_rebase: self._repo.rebase, # we rebase locally to know sha + } + + strategy = strategies[self._options.fusion] return strategy( source, target, @@ -287,37 +329,32 @@ def update_from_target_branch_and_push( if source_repo_url is None and source_branch == target_branch: raise CannotMerge('Source and target branch seem to coincide!') - branch_updated = branch_rewritten = changes_pushed = False + branch_update_done = commits_rewrite_done = False try: + initial_mr_sha = merge_request.sha updated_sha = self.fuse( source_branch, target_branch, source_repo_url=source_repo_url, ) - branch_updated = True + branch_update_done = True # The fuse above fetches origin again, so we are now safe to fetch # the sha from the remote target branch. target_sha = repo.get_commit_hash('origin/' + target_branch) if updated_sha == target_sha: raise CannotMerge('These changes already exist in branch `{}`.'.format(target_branch)) - rewritten_sha = self.add_trailers(merge_request) or updated_sha - branch_rewritten = True - repo.push(source_branch, source_repo_url=source_repo_url, force=True) - changes_pushed = True + final_sha = self.add_trailers(merge_request) or updated_sha + commits_rewrite_done = True + branch_was_modified = final_sha != initial_mr_sha + self.synchronize_mr_with_local_changes(merge_request, branch_was_modified, source_repo_url) except git.GitError: - if not branch_updated: + if not branch_update_done: raise CannotMerge('Got conflicts while rebasing, your problem now...') - if not branch_rewritten: + if not commits_rewrite_done: raise CannotMerge('Failed on filter-branch; check my logs!') - if not changes_pushed: - if self.opts.use_merge_strategy: - raise CannotMerge('Failed to push merged changes, check my logs!') - else: - raise CannotMerge('Failed to push rebased changes, check my logs!') - raise else: - return target_sha, updated_sha, rewritten_sha + return target_sha, updated_sha, final_sha finally: # A failure to clean up probably means something is fucked with the git repo # and likely explains any previous failure, so it will better to just @@ -325,15 +362,88 @@ def update_from_target_branch_and_push( if source_branch != 'master': repo.checkout_branch('master') repo.remove_branch(source_branch) - else: - assert source_repo_url is not None + def synchronize_mr_with_local_changes( + self, + merge_request, + branch_was_modified, + source_repo_url=None, + ): + if self._options.fusion is Fusion.gitlab_rebase: + self.synchronize_using_gitlab_rebase(merge_request) + else: + self.push_force_to_mr( + merge_request, + branch_was_modified, + source_repo_url=source_repo_url, + ) -def _get_reviewer_names_and_emails(approvals, api): - """Return a list ['A. Prover '.format(User.fetch_by_id(uid, api)) for uid in uids] + users = [User.fetch_by_id(uid, api) for uid in uids] + self_reviewed = {commit['author_email'] for commit in commits} & {user.email for user in users} + if self_reviewed and len(users) <= 1: + raise CannotMerge('Commits require at least one independent reviewer.') + return ['{0.name} <{0.email}>'.format(user) for user in users] + + +@enum.unique +class Fusion(enum.Enum): + merge = 0 + rebase = 1 + gitlab_rebase = 2 JOB_OPTIONS = [ @@ -344,7 +454,7 @@ def _get_reviewer_names_and_emails(approvals, api): 'approval_timeout', 'embargo', 'ci_timeout', - 'use_merge_strategy', + 'fusion', 'job_regexp', 'create_pipeline', ] @@ -361,7 +471,7 @@ def requests_commit_tagging(self): def default( cls, *, add_tested=False, add_part_of=False, add_reviewers=False, reapprove=False, - approval_timeout=None, embargo=None, ci_timeout=None, use_merge_strategy=False, + approval_timeout=None, embargo=None, ci_timeout=None, fusion=Fusion.rebase, job_regexp=re.compile('.*'), create_pipeline=False ): approval_timeout = approval_timeout or timedelta(seconds=0) @@ -375,7 +485,7 @@ def default( approval_timeout=approval_timeout, embargo=embargo, ci_timeout=ci_timeout, - use_merge_strategy=use_merge_strategy, + fusion=fusion, job_regexp=job_regexp, create_pipeline=create_pipeline, ) @@ -393,3 +503,11 @@ def reason(self): class SkipMerge(CannotMerge): pass + + +class GitLabRebaseResultMismatch(CannotMerge): + def __init__(self, gitlab_sha, expected_sha): + super(GitLabRebaseResultMismatch, self).__init__( + "GitLab rebase ended up with a different commit:" + "I expected %s but they got %s" % (expected_sha, gitlab_sha) + ) diff --git a/marge/merge_request.py b/marge/merge_request.py index f0f22069..eab13a39 100644 --- a/marge/merge_request.py +++ b/marge/merge_request.py @@ -1,3 +1,6 @@ +import logging as log +import time + from . import gitlab from .approvals import Approvals @@ -31,14 +34,15 @@ def fetch_by_iid(cls, project_id, merge_request_iid, api): return merge_request @classmethod - def fetch_all_open_for_user(cls, project_id, user_id, api): + def fetch_all_open_for_user(cls, project_id, user_id, api, merge_order): all_merge_request_infos = api.collect_all_pages(GET( '/projects/{project_id}/merge_requests'.format(project_id=project_id), - {'state': 'opened', 'order_by': 'created_at', 'sort': 'asc'}, + {'state': 'opened', 'order_by': merge_order, 'sort': 'asc'}, )) my_merge_request_infos = [ mri for mri in all_merge_request_infos - if (mri['assignee'] or {}).get('id') == user_id + if ((mri.get('assignee', {}) or {}).get('id') == user_id) or + (user_id in [assignee.get('id') for assignee in (mri.get('assignees', []) or [])]) ] return [cls(api, merge_request_info) for merge_request_info in my_merge_request_infos] @@ -60,9 +64,18 @@ def state(self): return self.info['state'] @property - def assignee_id(self): - assignee = self.info['assignee'] or {} - return assignee.get('id') + def rebase_in_progress(self): + return self.info.get('rebase_in_progress', False) + + @property + def merge_error(self): + return self.info.get('merge_error') + + @property + def assignee_ids(self): + if 'assignees' in self.info: + return [assignee.get('id') for assignee in (self.info['assignees'] or [])] + return [(self.info.get('assignee', {}) or {}).get('id')] @property def author_id(self): @@ -116,6 +129,31 @@ def comment(self, message): return self._api.call(POST(notes_url, {'body': message})) + def rebase(self): + self.refetch_info() + + if not self.rebase_in_progress: + self._api.call(PUT( + '/projects/{0.project_id}/merge_requests/{0.iid}/rebase'.format(self), + )) + else: + # We wanted to rebase and someone just happened to press the button for us! + log.info('A rebase was already in progress on the merge request!') + + max_attempts = 30 + wait_between_attempts_in_secs = 1 + + for _ in range(max_attempts): + self.refetch_info() + if not self.rebase_in_progress: + if self.merge_error: + raise MergeRequestRebaseFailed(self.merge_error) + return + + time.sleep(wait_between_attempts_in_secs) + + raise TimeoutError('Waiting for merge request to be rebased by GitLab') + def accept(self, remove_branch=False, sha=None): return self._api.call(PUT( '/projects/{0.project_id}/merge_requests/{0.iid}/merge'.format(self), @@ -139,7 +177,7 @@ def assign_to(self, user_id): )) def unassign(self): - return self.assign_to(None) + return self.assign_to(0) def fetch_approvals(self): # 'id' needed for for GitLab 9.2.2 hack (see Approvals.refetch_info()) @@ -148,6 +186,9 @@ def fetch_approvals(self): approvals.refetch_info() return approvals + def fetch_commits(self): + return self._api.call(GET('/projects/{0.project_id}/merge_requests/{0.iid}/commits'.format(self))) + def triggered(self, user_id): if self._api.version().release >= (9, 2, 2): notes_url = '/projects/{0.project_id}/merge_requests/{0.iid}/notes'.format(self) @@ -159,3 +200,7 @@ def triggered(self, user_id): message = 'I created a new pipeline for [{sha:.8s}]'.format(sha=self.sha) my_comments = [c['body'] for c in comments if c['author']['id'] == user_id] return any(message in c for c in my_comments) + + +class MergeRequestRebaseFailed(Exception): + pass diff --git a/marge/pipeline.py b/marge/pipeline.py index 4ebc2d07..e49de858 100644 --- a/marge/pipeline.py +++ b/marge/pipeline.py @@ -31,6 +31,17 @@ def pipelines_by_branch( return [cls(api, pipeline_info, project_id) for pipeline_info in pipelines_info] + @classmethod + def pipelines_by_merge_request(cls, project_id, merge_request_iid, api): + """Fetch all pipelines for a merge request in descending order of pipeline ID.""" + pipelines_info = api.call(GET( + '/projects/{project_id}/merge_requests/{merge_request_iid}/pipelines'.format( + project_id=project_id, merge_request_iid=merge_request_iid, + ) + )) + pipelines_info.sort(key=lambda pipeline_info: pipeline_info['id'], reverse=True) + return [cls(api, pipeline_info, project_id) for pipeline_info in pipelines_info] + @classmethod def create(cls, project_id, ref, api): try: diff --git a/marge/project.py b/marge/project.py index 95e69d76..80860fbe 100644 --- a/marge/project.py +++ b/marge/project.py @@ -1,5 +1,5 @@ import logging as log -from enum import Enum, unique +from enum import IntEnum, unique from functools import partial from . import gitlab @@ -27,9 +27,21 @@ def filter_by_path_with_namespace(projects): @classmethod def fetch_all_mine(cls, api): + projects_kwargs = {'membership': True, + 'with_merge_requests_enabled': True, + 'archived': False, + } + + # GitLab has an issue where projects may not show appropriate permissions in nested groups. Using + # `min_access_level` is known to provide the correct projects, so we'll prefer this method + # if it's available. See #156 for more details. + use_min_access_level = api.version().release >= (11, 2) + if use_min_access_level: + projects_kwargs["min_access_level"] = int(AccessLevel.developer) + projects_info = api.collect_all_pages(GET( '/projects', - {'membership': True, 'with_merge_requests_enabled': True}, + projects_kwargs, )) def project_seems_ok(project_info): @@ -43,7 +55,19 @@ def project_seems_ok(project_info): return permissions_ok - return [cls(api, project_info) for project_info in projects_info if project_seems_ok(project_info)] + projects = [] + + for project_info in projects_info: + if use_min_access_level: + # We know we fetched projects with at least developer access, so we'll use that as + # a fallback if GitLab doesn't correctly report permissions as described above. + project_info["permissions"]["marge"] = {"access_level": AccessLevel.developer} + elif not project_seems_ok(projects_info): + continue + + projects.append(cls(api, project_info)) + + return projects @property def path_with_namespace(self): @@ -61,6 +85,10 @@ def merge_requests_enabled(self): def only_allow_merge_if_pipeline_succeeds(self): return self.info['only_allow_merge_if_pipeline_succeeds'] + @property + def only_allow_merge_if_all_discussions_are_resolved(self): # pylint: disable=invalid-name + return self.info['only_allow_merge_if_all_discussions_are_resolved'] + @property def approvals_required(self): return self.info['approvals_before_merge'] @@ -68,13 +96,17 @@ def approvals_required(self): @property def access_level(self): permissions = self.info['permissions'] - effective_access = permissions['project_access'] or permissions['group_access'] + effective_access = ( + permissions['project_access'] + or permissions['group_access'] + or permissions.get("marge") + ) assert effective_access is not None, "GitLab failed to provide user permissions on project" return AccessLevel(effective_access['access_level']) @unique -class AccessLevel(Enum): +class AccessLevel(IntEnum): # See https://docs.gitlab.com/ce/api/access_requests.html guest = 10 reporter = 20 diff --git a/marge/pylintrc b/marge/pylintrc new file mode 120000 index 00000000..05334af9 --- /dev/null +++ b/marge/pylintrc @@ -0,0 +1 @@ +../pylintrc \ No newline at end of file diff --git a/marge/single_merge_job.py b/marge/single_merge_job.py index 2d2df7e8..bfc6316f 100644 --- a/marge/single_merge_job.py +++ b/marge/single_merge_job.py @@ -5,7 +5,7 @@ from . import git, gitlab from .commit import Commit -from .job import CannotMerge, MergeJob, SkipMerge +from .job import CannotMerge, GitLabRebaseResultMismatch, MergeJob, SkipMerge class SingleMergeJob(MergeJob): @@ -48,11 +48,19 @@ def update_merge_request_and_accept(self, approvals): while not updated_into_up_to_date_target_branch: self.ensure_mergeable_mr(merge_request) source_project, source_repo_url, _ = self.fetch_source_project(merge_request) - # NB. this will be a no-op if there is nothing to update/rewrite - target_sha, _updated_sha, actual_sha = self.update_from_target_branch_and_push( - merge_request, - source_repo_url=source_repo_url, - ) + target_project = self.get_target_project(merge_request) + try: + # NB. this will be a no-op if there is nothing to update/rewrite + + target_sha, _updated_sha, actual_sha = self.update_from_target_branch_and_push( + merge_request, + source_repo_url=source_repo_url, + ) + except GitLabRebaseResultMismatch: + log.info("Gitlab rebase didn't give expected result") + merge_request.comment("Someone skipped the queue! Will have to try again...") + continue + log.info('Commit id to merge %r (into: %r)', actual_sha, target_sha) time.sleep(5) @@ -65,9 +73,12 @@ def update_merge_request_and_accept(self, approvals): self.maybe_reapprove(merge_request, approvals) - if source_project.only_allow_merge_if_pipeline_succeeds: + if target_project.only_allow_merge_if_pipeline_succeeds: self.wait_for_ci_to_pass(merge_request, actual_sha) time.sleep(2) + + self.ensure_mergeable_mr(merge_request) + try: merge_request.accept(remove_branch=True, sha=actual_sha) except gitlab.NotAcceptable as err: @@ -105,20 +116,25 @@ def update_merge_request_and_accept(self, approvals): raise CannotMerge( 'The request was marked as WIP as I was processing it (maybe a WIP commit?)' ) - elif merge_request.state == 'reopened': + if merge_request.state == 'reopened': raise CannotMerge( 'GitLab refused to merge this branch. I suspect that a Push Rule or a git-hook ' 'is rejecting my commits; maybe my email needs to be white-listed?' ) - elif merge_request.state == 'closed': + if merge_request.state == 'closed': raise CannotMerge('Someone closed the merge request while I was attempting to merge it.') - elif merge_request.state == 'merged': + if merge_request.state == 'merged': # We are not covering any observed behaviour here, but if at this # point the request is merged, our job is done, so no need to complain log.info('Merge request is already merged, someone was faster!') updated_into_up_to_date_target_branch = True else: - raise CannotMerge("GitLab refused to merge this request and I don't know why!") + raise CannotMerge( + "GitLab refused to merge this request and I don't know why!" + ( + " Maybe you have unresolved discussions?" + if self._project.only_allow_merge_if_all_discussions_are_resolved else "" + ) + ) except gitlab.ApiError: log.exception('Unanticipated ApiError from GitLab on merge attempt') raise CannotMerge('Had some issue with GitLab, check my logs...') diff --git a/marge/store.py b/marge/store.py index 24982d6f..5f819673 100644 --- a/marge/store.py +++ b/marge/store.py @@ -3,14 +3,15 @@ from . import git -class RepoManager(object): +class RepoManager: - def __init__(self, user, root_dir, ssh_key_file=None, timeout=None): + def __init__(self, user, root_dir, ssh_key_file=None, timeout=None, reference=None): self._root_dir = root_dir self._user = user self._ssh_key_file = ssh_key_file self._repos = {} self._timeout = timeout + self._reference = reference def repo_for_project(self, project): repo = self._repos.get(project.id) @@ -18,7 +19,8 @@ def repo_for_project(self, project): repo_url = project.ssh_url_to_repo local_repo_dir = tempfile.mkdtemp(dir=self._root_dir) - repo = git.Repo(repo_url, local_repo_dir, ssh_key_file=self._ssh_key_file, timeout=self._timeout) + repo = git.Repo(repo_url, local_repo_dir, ssh_key_file=self._ssh_key_file, + timeout=self._timeout, reference=self._reference) repo.clone() repo.config_user_info( user_email=self._user.email, diff --git a/nix/sources.json b/nix/sources.json new file mode 100644 index 00000000..c51528f0 --- /dev/null +++ b/nix/sources.json @@ -0,0 +1,25 @@ +{ + "nixpkgs": { + "url": "https://github.com/NixOS/nixpkgs-channels/archive/915ce0f1e1a75adec7079ddb6cd3ffba5036b3fc.tar.gz", + "owner": "NixOS", + "branch": "nixos-19.03", + "url_template": "https://github.com///archive/.tar.gz", + "repo": "nixpkgs-channels", + "type": "tarball", + "sha256": "1kmx29i3xy4701z4lgmv5xxslb1djahrjxmrf83ig1whb4vgk4wm", + "description": "Nixpkgs/NixOS branches that track the Nixpkgs/NixOS channels", + "rev": "915ce0f1e1a75adec7079ddb6cd3ffba5036b3fc" + }, + "niv": { + "homepage": "https://github.com/nmattia/niv", + "url": "https://github.com/nmattia/niv/archive/e5e441998ede88dfce5b8b9a7ea99e1e0f1102fa.tar.gz", + "owner": "nmattia", + "branch": "master", + "url_template": "https://github.com///archive/.tar.gz", + "repo": "niv", + "type": "tarball", + "sha256": "0s3pwakbp9qmwzznl8xd3smmymz1s2vrvyip8yizqdllaps4pf18", + "description": "Easy dependency management for Nix projects", + "rev": "e5e441998ede88dfce5b8b9a7ea99e1e0f1102fa" + } +} diff --git a/nix/sources.nix b/nix/sources.nix new file mode 100644 index 00000000..6383bd44 --- /dev/null +++ b/nix/sources.nix @@ -0,0 +1,11 @@ +# Read in the json spec for packages we want (so it can be auto-updated). +# niv: no_update + +# make travis happy, reasonably new nix doesn't need this +let mapAttrs = builtins.mapAttrs or + (f: set: + builtins.listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (builtins.attrNames set))); +in with builtins; + mapAttrs + (_: spec: spec // { outPath = fetchTarball { inherit (spec) url sha256; }; }) + (fromJSON (readFile ./sources.json)) diff --git a/pinnedNixpkgs.nix b/pinnedNixpkgs.nix deleted file mode 100644 index 5ce5fce5..00000000 --- a/pinnedNixpkgs.nix +++ /dev/null @@ -1,9 +0,0 @@ -let - fetchFromGitHub = (import {}).fetchFromGitHub; - pkgs = import (fetchFromGitHub { - owner = "NixOS"; - repo = "nixpkgs"; - rev = "90afb0c10fe6f437fca498298747b2bcb6a77d39"; - sha256 = "0mvzdw5aygi1vjnvm0bc8bp7iwb9rypiqg749m6a6km84m7srm0w"; - }) {}; -in pkgs diff --git a/pylintrc b/pylintrc index ea7f1d65..95a15272 100644 --- a/pylintrc +++ b/pylintrc @@ -14,6 +14,7 @@ disable=bad-continuation, fixme, missing-docstring, no-self-use, + unsubscriptable-object [SIMILARITIES] min-similarity-lines=10 diff --git a/requirements.nix b/requirements.nix index e29f466e..247b9c20 100644 --- a/requirements.nix +++ b/requirements.nix @@ -23,10 +23,7 @@ let self: super: { bootstrapped-pip = super.bootstrapped-pip.overrideDerivation (old: { patchPhase = old.patchPhase + '' - sed -i \ - -e "s|paths_to_remove.remove(auto_confirm)|#paths_to_remove.remove(auto_confirm)|" \ - -e "s|self.uninstalled = paths_to_remove|#self.uninstalled = paths_to_remove|" \ - $out/${pkgs.python35.sitePackages}/pip/req/req_install.py + sed -i -e "s|paths_to_remove.remove(auto_confirm)|#paths_to_remove.remove(auto_confirm)|" -e "s|self.uninstalled = paths_to_remove|#self.uninstalled = paths_to_remove|" $out/${pkgs.python35.sitePackages}/pip/req/req_install.py ''; }); }; @@ -43,13 +40,11 @@ let buildInputs = [ makeWrapper ] ++ (builtins.attrValues pkgs); buildCommand = '' mkdir -p $out/bin - ln -s ${pythonPackages.python.interpreter} \ - $out/bin/${pythonPackages.python.executable} - for dep in ${builtins.concatStringsSep " " - (builtins.attrValues pkgs)}; do + ln -s ${pythonPackages.python.interpreter} $out/bin/${pythonPackages.python.executable} + for dep in ${builtins.concatStringsSep " " (builtins.attrValues pkgs)}; do if [ -d "$dep/bin" ]; then for prog in "$dep/bin/"*; do - if [ -x "$prog" ] && [ -f "$prog" ]; then + if [ -f $prog ]; then ln -s $prog $out/bin/`basename $prog` fi done @@ -60,8 +55,7 @@ let done pushd $out/bin ln -s ${pythonPackages.python.executable} python - ln -s ${pythonPackages.python.executable} \ - python3 + ln -s ${pythonPackages.python.executable} python3 popd ''; passthru.interpreter = pythonPackages.python; @@ -72,9 +66,7 @@ let mkDerivation = pythonPackages.buildPythonPackage; packages = pkgs; overrideDerivation = drv: f: - pythonPackages.buildPythonPackage ( - drv.drvAttrs // f drv.drvAttrs // { meta = drv.meta; } - ); + pythonPackages.buildPythonPackage (drv.drvAttrs // f drv.drvAttrs // { meta = drv.meta; }); withPackages = pkgs'': withPackages (pkgs // pkgs''); }; @@ -82,9 +74,10 @@ let python = withPackages {}; generated = self: { + "ConfigArgParse" = python.mkDerivation { - name = "ConfigArgParse-0.13.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/77/61/ae928ce6ab85d4479ea198488cf5ffa371bd4ece2030c0ee85ff668deac5/ConfigArgParse-0.13.0.tar.gz"; sha256 = "e6441aa58e23d3d122055808e5e2220fd742dff6e1e51082d2a4e4ed145dd788"; }; + name = "ConfigArgParse-0.14.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/55/ea/f0ade52790bcd687127a302b26c1663bf2e0f23210d5281dbfcd1dfcda28/ConfigArgParse-0.14.0.tar.gz"; sha256 = "2e2efe2be3f90577aca9415e32cb629aa2ecd92078adbe27b53a03e53ff12e91"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -97,65 +90,95 @@ let }; }; + + "PyYAML" = python.mkDerivation { - name = "PyYAML-3.12"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/4a/85/db5a2df477072b2902b0eb892feb37d88ac635d36245a72a6a69b23b383a/PyYAML-3.12.tar.gz"; sha256 = "592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab"; }; + name = "PyYAML-5.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/9f/2c/9417b5c774792634834e730932745bc09a7d36754ca00acf1ccd1ac2594d/PyYAML-5.1.tar.gz"; sha256 = "436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { - homepage = "http://pyyaml.org/wiki/PyYAML"; + homepage = "https://github.com/yaml/pyyaml"; license = licenses.mit; description = "YAML parser and emitter for Python"; }; }; + + "astroid" = python.mkDerivation { - name = "astroid-1.6.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/48/63/1e366849160817cc06e273d38e93ac0826ba83da84e8b7879786b5c67d04/astroid-1.6.1.tar.gz"; sha256 = "f0a0e386dbca9f93ea9f3ea6f32b37a24720502b7baa9cb17c3976a680d43a06"; }; + name = "astroid-2.2.5"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/85/e3/4ec967f7db4644b1fe849e4724191346d3e3f8172631ad7266f7f17a6018/astroid-2.2.5.tar.gz"; sha256 = "6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."lazy-object-proxy" self."six" + self."typed-ast" self."wrapt" ]; meta = with pkgs.stdenv.lib; { homepage = "https://github.com/PyCQA/astroid"; license = licenses.lgpl3; - description = "A abstract syntax tree for Python with inference support."; + description = "An abstract syntax tree for Python with inference support."; }; }; + + + "atomicwrites" = python.mkDerivation { + name = "atomicwrites-1.3.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz"; sha256 = "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"; }; + doCheck = commonDoCheck; + buildInputs = commonBuildInputs; + propagatedBuildInputs = [ ]; + meta = with pkgs.stdenv.lib; { + homepage = "https://github.com/untitaker/python-atomicwrites"; + license = licenses.mit; + description = "Atomic file writes."; + }; + }; + + + "attrs" = python.mkDerivation { - name = "attrs-17.4.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/8b/0b/a06cfcb69d0cb004fde8bc6f0fd192d96d565d1b8aa2829f0f20adb796e5/attrs-17.4.0.tar.gz"; sha256 = "1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9"; }; + name = "attrs-19.1.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/cc/d9/931a24cc5394f19383fbbe3e1147a0291276afa43a0dc3ed0d6cd9fda813/attrs-19.1.0.tar.gz"; sha256 = "f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; - propagatedBuildInputs = []; + propagatedBuildInputs = [ + self."coverage" + self."pytest" + self."six" + ]; meta = with pkgs.stdenv.lib; { - homepage = "http://www.attrs.org/"; + homepage = "https://www.attrs.org/"; license = licenses.mit; description = "Classes Without Boilerplate"; }; }; + + "certifi" = python.mkDerivation { - name = "certifi-2018.1.18"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/15/d4/2f888fc463d516ff7bf2379a4e9a552fef7f22a94147655d9b1097108248/certifi-2018.1.18.tar.gz"; sha256 = "edbc3f203427eef571f79a7692bb160a2b0f7ccaa31953e99bd17e307cf63f7d"; }; + name = "certifi-2019.3.9"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/06/b8/d1ea38513c22e8c906275d135818fee16ad8495985956a9b7e2bb21942a1/certifi-2019.3.9.tar.gz"; sha256 = "b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { - homepage = "http://certifi.io/"; + homepage = "https://certifi.io/"; license = licenses.mpl20; description = "Python package for providing Mozilla's CA Bundle."; }; }; + + "chardet" = python.mkDerivation { name = "chardet-3.0.4"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"; sha256 = "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"; }; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"; sha256 = "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -166,29 +189,32 @@ let }; }; + + "coverage" = python.mkDerivation { - name = "coverage-4.5"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/11/24/6c0503ffe54c639d9b56f037daf723f7f09853d8efa668a836ee54ae0b2a/coverage-4.5.tar.gz"; sha256 = "b7a06a523dfeaf417da630d46ad4f4e11ca1bae6202c9312c4cb987dde5792fc"; }; + name = "coverage-4.5.3"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz"; sha256 = "9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { - homepage = "https://bitbucket.org/ned/coveragepy"; + homepage = "https://github.com/nedbat/coveragepy"; license = licenses.asl20; description = "Code coverage measurement for Python"; }; }; + + "dateparser" = python.mkDerivation { - name = "dateparser-0.6.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/89/03/e8890489fe1c458f155e88f92cfc4d399894ff38721629fda925c3793b66/dateparser-0.6.0.tar.gz"; sha256 = "f8c24317120b06f71691d28076764ec084a132be2a250a78fdf54f6b427cac95"; }; + name = "dateparser-0.7.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/51/6f/3bf59d1cfd7845a8614bae2c2ccd540074695015210285127aab9088ea14/dateparser-0.7.1.tar.gz"; sha256 = "42d51be54e74a8e80a4d76d1fa6e4edd997098fce24ad2d94a2eab5ef247193e"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."python-dateutil" self."pytz" self."regex" - self."ruamel.yaml" self."tzlocal" ]; meta = with pkgs.stdenv.lib; { @@ -198,12 +224,30 @@ let }; }; + + + "entrypoints" = python.mkDerivation { + name = "entrypoints-0.3"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/b4/ef/063484f1f9ba3081e920ec9972c96664e2edb9fdc3d8669b0e3b8fc0ad7c/entrypoints-0.3.tar.gz"; sha256 = "c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"; }; + doCheck = commonDoCheck; + buildInputs = commonBuildInputs; + propagatedBuildInputs = [ ]; + meta = with pkgs.stdenv.lib; { + homepage = "https://github.com/takluyver/entrypoints"; + license = ""; + description = "Discover and load entry points from installed packages."; + }; + }; + + + "flake8" = python.mkDerivation { - name = "flake8-3.5.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/1e/ab/7730f6d6cdf73a3b7f98a2fe3b2cdf68e9e760a4a133e083607497d4c3a6/flake8-3.5.0.tar.gz"; sha256 = "7253265f7abd8b313e3892944044a365e3f4ac3fcdcfb4298f55ee9ddf188ba0"; }; + name = "flake8-3.7.7"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/23/e7/80626da76ff2b4c94ac9bcd92898a1011d1c891e0ba1343f24109923462d/flake8-3.7.7.tar.gz"; sha256 = "859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ + self."entrypoints" self."mccabe" self."pycodestyle" self."pyflakes" @@ -215,9 +259,11 @@ let }; }; + + "humanize" = python.mkDerivation { name = "humanize-0.5.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/8c/e0/e512e4ac6d091fc990bbe13f9e0378f34cf6eecd1c6c268c9e598dcf5bb9/humanize-0.5.1.tar.gz"; sha256 = "a43f57115831ac7c70de098e6ac46ac13be00d69abbf60bdcac251344785bb19"; }; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/8c/e0/e512e4ac6d091fc990bbe13f9e0378f34cf6eecd1c6c268c9e598dcf5bb9/humanize-0.5.1.tar.gz"; sha256 = "a43f57115831ac7c70de098e6ac46ac13be00d69abbf60bdcac251344785bb19"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -228,9 +274,11 @@ let }; }; + + "idna" = python.mkDerivation { - name = "idna-2.6"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/f4/bd/0467d62790828c23c47fc1dfa1b1f052b24efdf5290f071c7a91d0d82fd3/idna-2.6.tar.gz"; sha256 = "2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"; }; + name = "idna-2.8"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"; sha256 = "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -241,9 +289,11 @@ let }; }; + + "isort" = python.mkDerivation { - name = "isort-4.3.3"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/a2/13/f9da1234eba3d254093e70de31be9bebade75806b07602c83654246a7286/isort-4.3.3.tar.gz"; sha256 = "34929af733faadf884da29d83e7df1884363b3cc647a48e000b3c5cc13d17549"; }; + name = "isort-4.3.18"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/b5/3e/22308cdac59f5ef0e8157a33a01eb611e7a3a93e9711ed88ffc9a5b73ba0/isort-4.3.18.tar.gz"; sha256 = "f09911f6eb114e5592abe635aded8bf3d2c3144ebcfcaf81ee32e7af7b7d1870"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -254,9 +304,11 @@ let }; }; + + "lazy-object-proxy" = python.mkDerivation { name = "lazy-object-proxy-1.3.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/55/08/23c0753599bdec1aec273e322f277c4e875150325f565017f6280549f554/lazy-object-proxy-1.3.1.tar.gz"; sha256 = "eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a"; }; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/55/08/23c0753599bdec1aec273e322f277c4e875150325f565017f6280549f554/lazy-object-proxy-1.3.1.tar.gz"; sha256 = "eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -267,9 +319,11 @@ let }; }; + + "maya" = python.mkDerivation { - name = "maya-0.3.3"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/b0/6c/f88f5e7634f235c64f172dcb45a5c16feb820f5bc92fe4ee4e17e5592e32/maya-0.3.3.tar.gz"; sha256 = "bad39d8f9c6e2c8f446a2187eafbc2128aa20397787be1e4697bb29b239908f5"; }; + name = "maya-0.6.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/4e/90/e0e298b495164475331cc3fda906c640c9098a49fc933172fe5826393185/maya-0.6.1.tar.gz"; sha256 = "7f53e06d5a123613dce7c270cbc647643a6942590dba7a19ec36194d0338c3f4"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -277,7 +331,7 @@ let self."humanize" self."pendulum" self."pytz" - self."ruamel.yaml" + self."snaptime" self."tzlocal" ]; meta = with pkgs.stdenv.lib; { @@ -287,9 +341,11 @@ let }; }; + + "mccabe" = python.mkDerivation { name = "mccabe-0.6.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz"; sha256 = "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"; }; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz"; sha256 = "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -300,26 +356,44 @@ let }; }; + + + "more-itertools" = python.mkDerivation { + name = "more-itertools-7.0.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/29/ed/3a85eb4afdce6dc33e78dad885e17c678db8055bf65353e0de4944c72a40/more-itertools-7.0.0.tar.gz"; sha256 = "c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"; }; + doCheck = commonDoCheck; + buildInputs = commonBuildInputs; + propagatedBuildInputs = [ ]; + meta = with pkgs.stdenv.lib; { + homepage = "https://github.com/erikrose/more-itertools"; + license = licenses.mit; + description = "More routines for operating on iterables, beyond itertools"; + }; + }; + + + "pendulum" = python.mkDerivation { - name = "pendulum-1.4.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/d1/a7/71e2574d886b3f9bb227dbcdf5a89bbb20441d99381bc5db8659cd3e0536/pendulum-1.4.1.tar.gz"; sha256 = "3f16fb759e6126dd89d49886f8100caa72e5ab36563bc148b4f7eddfa0099c0f"; }; + name = "pendulum-2.0.4"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/5b/57/71fc910edcd937b72aa0ef51c8f5734fbd8c011fa1480fce881433847ec8/pendulum-2.0.4.tar.gz"; sha256 = "cf535d36c063575d4752af36df928882b2e0e31541b4482c97d63752785f9fcb"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."python-dateutil" self."pytzdata" - self."tzlocal" ]; meta = with pkgs.stdenv.lib; { - homepage = "https://github.com/sdispater/pendulum"; - license = licenses.mit; - description = "Python datetimes made easy."; + homepage = "https://pendulum.eustace.io"; + license = ""; + description = "Python datetimes made easy"; }; }; + + "pluggy" = python.mkDerivation { - name = "pluggy-0.6.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz"; sha256 = "7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff"; }; + name = "pluggy-0.9.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/a7/8c/55c629849c64e665258d8976322dfdad171fa2f57117590662d8a67618a4/pluggy-0.9.0.tar.gz"; sha256 = "19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -330,9 +404,11 @@ let }; }; + + "py" = python.mkDerivation { - name = "py-1.5.2"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz"; sha256 = "ca18943e28235417756316bfada6cd96b23ce60dd532642690dcfdaba988a76d"; }; + name = "py-1.8.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz"; sha256 = "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -343,9 +419,11 @@ let }; }; + + "pycodestyle" = python.mkDerivation { - name = "pycodestyle-2.3.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/e1/88/0e2cbf412bd849ea6f1af1f97882add46a374f4ba1d2aea39353609150ad/pycodestyle-2.3.1.tar.gz"; sha256 = "682256a5b318149ca0d2a9185d365d8864a768a28db66a84a2ea946bcc426766"; }; + name = "pycodestyle-2.5.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/1c/d1/41294da5915f4cae7f4b388cea6c2cd0d6cd53039788635f6875dfe8c72f/pycodestyle-2.5.0.tar.gz"; sha256 = "e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -356,9 +434,11 @@ let }; }; + + "pyflakes" = python.mkDerivation { - name = "pyflakes-1.6.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/26/85/f6a315cd3c1aa597fb3a04cc7d7dbea5b3cc66ea6bd13dfa0478bf4876e6/pyflakes-1.6.0.tar.gz"; sha256 = "8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805"; }; + name = "pyflakes-2.1.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/52/64/87303747635c2988fcaef18af54bfdec925b6ea3b80bcd28aaca5ba41c9e/pyflakes-2.1.1.tar.gz"; sha256 = "d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -369,16 +449,17 @@ let }; }; + + "pylint" = python.mkDerivation { - name = "pylint-1.8.2"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/3d/aa/eeb750d1ca97878e14e0c0b2947c1098737945c165953ae01a962f04eacb/pylint-1.8.2.tar.gz"; sha256 = "4fe3b99da7e789545327b75548cee6b511e4faa98afe268130fea1af4b5ec022"; }; + name = "pylint-2.3.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/01/8b/538911c0ebc2529f15004f4cb07e3ca562bb9aacea5df89cc25b62e01891/pylint-2.3.1.tar.gz"; sha256 = "723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."astroid" self."isort" self."mccabe" - self."six" ]; meta = with pkgs.stdenv.lib; { homepage = "https://github.com/PyCQA/pylint"; @@ -387,32 +468,40 @@ let }; }; + + "pytest" = python.mkDerivation { - name = "pytest-3.4.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/a8/84/25b93a15997be680fc8ce1a7b9315b03650ced546908e5008c0912b1c2e1/pytest-3.4.0.tar.gz"; sha256 = "6074ea3b9c999bd6d0df5fa9d12dd95ccd23550df2a582f5f5b848331d2e82ca"; }; + name = "pytest-4.4.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/2b/b1/c9a84f79fc3bad226a9085289da11ecdd9bd2779a2c654195962b37d4110/pytest-4.4.1.tar.gz"; sha256 = "b7802283b70ca24d7119b32915efa7c409982f59913c1a6c0640aacf118b95f5"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ + self."atomicwrites" self."attrs" + self."more-itertools" self."pluggy" self."py" + self."requests" self."six" ]; meta = with pkgs.stdenv.lib; { - homepage = "http://pytest.org"; + homepage = "https://docs.pytest.org/en/latest/"; license = licenses.mit; description = "pytest: simple powerful testing with Python"; }; }; + + "pytest-cov" = python.mkDerivation { - name = "pytest-cov-2.5.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz"; sha256 = "03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d"; }; + name = "pytest-cov-2.7.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz"; sha256 = "e00ea4fdde970725482f1f35630d12f074e121a23801aabf2ae154ec6bdd343a"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."coverage" self."pytest" + self."six" ]; meta = with pkgs.stdenv.lib; { homepage = "https://github.com/pytest-dev/pytest-cov"; @@ -421,9 +510,11 @@ let }; }; + + "pytest-flake8" = python.mkDerivation { - name = "pytest-flake8-0.9.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/e5/87/345c1423d3dd7c27247b61c71192c9b94a5f980647389e622ac41ff92a3d/pytest-flake8-0.9.1.tar.gz"; sha256 = "e716072d07a557defdd5c4141984569731e292961370a5663c1697283aa16200"; }; + name = "pytest-flake8-1.0.4"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/f0/b5/e1360bfe5b1218fe4f7a7fd6038de8d990e980c6f5d55c922e216de7131b/pytest-flake8-1.0.4.tar.gz"; sha256 = "4d225c13e787471502ff94409dcf6f7927049b2ec251c63b764a4b17447b60c0"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -437,9 +528,11 @@ let }; }; + + "pytest-pylint" = python.mkDerivation { - name = "pytest-pylint-0.8.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/13/69/d4966ee61290e51a9e42b26d7b02c5aeec0e9e9a661323db6be3f817c1aa/pytest-pylint-0.8.0.tar.gz"; sha256 = "41d6f223ae5e0a0fbb0056e826ecdb8046be2a828eba55c0d4f66cbfd7d27168"; }; + name = "pytest-pylint-0.14.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/52/58/cc27a07b8a7715411415c0f42d9e7c24bd2c646748b7406d2e7507da085f/pytest-pylint-0.14.0.tar.gz"; sha256 = "7bfbb66fc6dc160193a9e813a7c55e5ae32028f18660deeb90e1cb7e980cbbac"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -454,13 +547,16 @@ let }; }; + + "pytest-runner" = python.mkDerivation { - name = "pytest-runner-3.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz"; sha256 = "0f7c3a3cf5aead13f54baaa01ceb49e5ae92aba5d3ff1928e81e189c40bc6703"; }; + name = "pytest-runner-4.4"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/15/0a/1e73c3a3d3f4f5faf5eacac4e55675c1627b15d84265b80b8fef3f8a3fb5/pytest-runner-4.4.tar.gz"; sha256 = "00ad6cd754ce55b01b868a6d00b77161e4d2006b3918bde882376a0a884d0df4"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ self."pytest" + self."pytest-flake8" ]; meta = with pkgs.stdenv.lib; { homepage = "https://github.com/pytest-dev/pytest-runner"; @@ -469,9 +565,11 @@ let }; }; + + "python-dateutil" = python.mkDerivation { - name = "python-dateutil-2.6.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/54/bb/f1db86504f7a49e1d9b9301531181b00a1c7325dc85a29160ee3eaa73a54/python-dateutil-2.6.1.tar.gz"; sha256 = "891c38b2a02f5bb1be3e4793866c8df49c7d19baabf9c1bad62547e0b4866aca"; }; + name = "python-dateutil-2.8.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/ad/99/5b2e99737edeb28c71bcbec5b5dda19d0d9ef3ca3e92e3e925e7c0bb364c/python-dateutil-2.8.0.tar.gz"; sha256 = "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -484,9 +582,11 @@ let }; }; + + "pytz" = python.mkDerivation { - name = "pytz-2017.3"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/60/88/d3152c234da4b2a1f7a989f89609ea488225eaea015bc16fbde2b3fdfefa/pytz-2017.3.zip"; sha256 = "fae4cffc040921b8a2d60c6cf0b5d662c1190fe54d718271db4eb17d44a185b7"; }; + name = "pytz-2019.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/df/d5/3e3ff673e8f3096921b3f1b79ce04b832e0100b4741573154b72b756a681/pytz-2019.1.tar.gz"; sha256 = "d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -497,22 +597,26 @@ let }; }; + + "pytzdata" = python.mkDerivation { - name = "pytzdata-2018.3"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/bc/6f/c885e9f84ef39cb731af615094040d5f12aa03eba3d3793fe4cef0ffa95b/pytzdata-2018.3.tar.gz"; sha256 = "4e2cceb54335cd6c28caea46b15cd592e2aec5e8b05b0241cbccfb1b23c02ae7"; }; + name = "pytzdata-2019.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/f3/40/a33d54b253f4fb47df3ff9d1b724e70780140f6213bd16a4de32db232b1d/pytzdata-2019.1.tar.gz"; sha256 = "f0469062f799c66480fcc7eae69a8270dc83f0e6522c0e70db882d6bd708d378"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { homepage = "https://github.com/sdispater/pytzdata"; - license = licenses.mit; - description = "Official timezone database for Python."; + license = ""; + description = "The Olson timezone database for Python."; }; }; + + "regex" = python.mkDerivation { - name = "regex-2018.2.3"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/d3/39/bc41305fc954d878134b9da98048e77457a0e03daf2b2ab46c6a96036f95/regex-2018.02.03.tar.gz"; sha256 = "c5162e1ca552e50aac5101bbc97fc37562c69f939f21a13745d146ca7e748aa1"; }; + name = "regex-2019.4.14"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/11/d9/e37129676d508adf833fb3e3c3fbcb4e5a10183cf45b6c7edbaa57b4a1f2/regex-2019.04.14.tar.gz"; sha256 = "d56ce4c7b1a189094b9bee3b81c4aeb3f1ba3e375e91627ec8561b6ab483d0a8"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -523,9 +627,11 @@ let }; }; + + "requests" = python.mkDerivation { - name = "requests-2.18.4"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/b0/e1/eab4fc3752e3d240468a8c0b284607899d2fbfb236a56b7377a329aa8d09/requests-2.18.4.tar.gz"; sha256 = "9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"; }; + name = "requests-2.21.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/52/2c/514e4ac25da2b08ca5a464c50463682126385c4272c18193876e91f4bc38/requests-2.21.0.tar.gz"; sha256 = "502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -541,35 +647,59 @@ let }; }; - "ruamel.yaml" = python.mkDerivation { - name = "ruamel.yaml-0.15.35"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/8f/39/77c555d68d317457a10a30f4a92ae4a315a4ee0e05e9af7c0ac5c301df10/ruamel.yaml-0.15.35.tar.gz"; sha256 = "8dc74821e4bb6b21fb1ab35964e159391d99ee44981d07d57bf96e2395f3ef75"; }; + + + "six" = python.mkDerivation { + name = "six-1.12.0"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"; sha256 = "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { - homepage = "https://bitbucket.org/ruamel/yaml"; + homepage = "https://github.com/benjaminp/six"; license = licenses.mit; - description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"; + description = "Python 2 and 3 compatibility utilities"; }; }; - "six" = python.mkDerivation { - name = "six-1.11.0"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"; sha256 = "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"; }; + + + "snaptime" = python.mkDerivation { + name = "snaptime-0.2.4"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/f3/f4/cb818c9bfdac4605f13296f7fcfe068aee7d1c3aa89f8cc22a064c1fab20/snaptime-0.2.4.tar.gz"; sha256 = "e3f1eb89043d58d30721ab98cb65023f1a4c2740e3b197704298b163c92d508b"; }; + doCheck = commonDoCheck; + buildInputs = commonBuildInputs; + propagatedBuildInputs = [ + self."python-dateutil" + self."pytz" + ]; + meta = with pkgs.stdenv.lib; { + homepage = "https://github.com/zartstrom/snaptime"; + license = ""; + description = "Transform timestamps with a simple DSL"; + }; + }; + + + + "typed-ast" = python.mkDerivation { + name = "typed-ast-1.3.5"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/d3/b1/959c3ed4a9cc100feba7ad1a7d6336d8888937ee89f4a577f7698e09decd/typed-ast-1.3.5.tar.gz"; sha256 = "5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; meta = with pkgs.stdenv.lib; { - homepage = "http://pypi.python.org/pypi/six/"; - license = licenses.mit; - description = "Python 2 and 3 compatibility utilities"; + homepage = "https://github.com/python/typed_ast"; + license = licenses.asl20; + description = "a fork of Python 2 and 3 ast modules with type comment support"; }; }; + + "tzlocal" = python.mkDerivation { name = "tzlocal-1.5.1"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz"; sha256 = "4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"; }; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz"; sha256 = "4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -582,9 +712,11 @@ let }; }; + + "urllib3" = python.mkDerivation { - name = "urllib3-1.22"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/ee/11/7c59620aceedcc1ef65e156cc5ce5a24ef87be4107c2b74458464e437a5d/urllib3-1.22.tar.gz"; sha256 = "cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"; }; + name = "urllib3-1.24.3"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/8a/3c/1bb7ef6c435dea026f06ed9f3ba16aa93f9f4f5d3857a51a35dfa00882f1/urllib3-1.24.3.tar.gz"; sha256 = "2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ @@ -598,9 +730,11 @@ let }; }; + + "wrapt" = python.mkDerivation { - name = "wrapt-1.10.11"; - src = pkgs.fetchurl { url = "https://pypi.python.org/packages/a0/47/66897906448185fcb77fc3c2b1bc20ed0ecca81a0f2f88eda3fc5a34fc3d/wrapt-1.10.11.tar.gz"; sha256 = "d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6"; }; + name = "wrapt-1.11.1"; + src = pkgs.fetchurl { url = "https://files.pythonhosted.org/packages/67/b2/0f71ca90b0ade7fad27e3d20327c996c6252a2ffe88f50a95bba7434eda9/wrapt-1.11.1.tar.gz"; sha256 = "4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"; }; doCheck = commonDoCheck; buildInputs = commonBuildInputs; propagatedBuildInputs = [ ]; @@ -610,11 +744,12 @@ let description = "Module for decorators, wrappers and monkey patching."; }; }; + }; localOverridesFile = ./requirements_override.nix; overrides = import localOverridesFile { inherit pkgs python; }; commonOverrides = [ - + ]; allOverrides = (if (builtins.pathExists localOverridesFile) @@ -626,4 +761,4 @@ in python.withPackages generated allOverrides ) - ) + ) \ No newline at end of file diff --git a/requirements_frozen.txt b/requirements_frozen.txt index ca8f1011..47f609ed 100644 --- a/requirements_frozen.txt +++ b/requirements_frozen.txt @@ -1,39 +1,43 @@ appdirs==1.4.3 -astroid==1.6.1 -attrs==17.4.0 -certifi==2018.1.18 +astroid==2.2.5 +atomicwrites==1.3.0 +attrs==19.1.0 +certifi==2019.3.9 chardet==3.0.4 -ConfigArgParse==0.13.0 -coverage==4.5 -dateparser==0.6.0 -flake8==3.5.0 +ConfigArgParse==0.14.0 +coverage==4.5.3 +dateparser==0.7.1 +entrypoints==0.3 +flake8==3.7.7 humanize==0.5.1 -idna==2.6 -isort==4.3.3 +idna==2.8 +isort==4.3.18 lazy-object-proxy==1.3.1 -maya==0.3.3 +maya==0.6.1 mccabe==0.6.1 +more-itertools==7.0.0 packaging==16.8 -pendulum==1.4.1 -pluggy==0.6.0 -py==1.5.2 -pycodestyle==2.3.1 -pyflakes==1.6.0 -pylint==1.8.2 +pendulum==2.0.4 +pluggy==0.9.0 +py==1.8.0 +pycodestyle==2.5.0 +pyflakes==2.1.1 +pylint==2.3.1 pyparsing==2.2.0 -pytest==3.4.0 -pytest-cov==2.5.1 -pytest-flake8==0.9.1 -pytest-pylint==0.8.0 -pytest-runner==3.0 -python-dateutil==2.6.1 -pytz==2017.3 -pytzdata==2018.3 -PyYAML==3.12 -regex==2018.2.3 -requests==2.18.4 -ruamel.yaml==0.15.35 -six==1.11.0 +pytest==4.4.1 +pytest-cov==2.7.1 +pytest-flake8==1.0.4 +pytest-pylint==0.14.0 +pytest-runner==4.4 +python-dateutil==2.8.0 +pytz==2019.1 +pytzdata==2019.1 +PyYAML==5.1 +regex==2019.4.14 +requests==2.21.0 +six==1.12.0 +snaptime==0.2.4 +typed-ast==1.3.5 tzlocal==1.5.1 -urllib3==1.22 -wrapt==1.10.11 +urllib3==1.24.3 +wrapt==1.11.1 diff --git a/requirements_override.nix b/requirements_override.nix index 3b704ef9..cd367f1c 100644 --- a/requirements_override.nix +++ b/requirements_override.nix @@ -1,5 +1,10 @@ { pkgs, python }: self: super: { - -} \ No newline at end of file + # Break circular dependency: pytest depends on attrs and attrs depends on + # pytest to test itself. It certainly hasn't got it as a runtime dep though, + # so remove it. + "attrs" = python.overrideDerivation super."attrs" (old: { + propagatedBuildInputs = [ self."six" ]; + }); +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 00000000..bc5e756a --- /dev/null +++ b/shell.nix @@ -0,0 +1,14 @@ +let + addBuildTools = pkg: tools: pkg.overrideAttrs + (oldAttrs: { nativeBuildInputs = oldAttrs.nativeBuildInputs ++ tools; }); + sources = import ./nix/sources.nix; + ## Tool to bump versions of sources written as json entries to git repos etc. + ## We use it bump nixpkgs itself ATM (just `niv update`). + niv = (import sources.niv {}).niv; + pkgs = (import sources.nixpkgs {}); + pypi2nix = pkgs.pypi2nix; + make = pkgs.make; + marge-bot = (import ./.).marge-bot; +in + ## create a version of the marge-bot env that has niv + addBuildTools marge-bot [ niv pypi2nix ] diff --git a/tests/git_repo_mock.py b/tests/git_repo_mock.py new file mode 100644 index 00000000..6aeca3f0 --- /dev/null +++ b/tests/git_repo_mock.py @@ -0,0 +1,242 @@ +import logging as log +from collections import defaultdict +from datetime import timedelta +import functools +import shlex + +import marge.git as git + + +class RepoMock(git.Repo): + + @classmethod + def init_for_merge_request(cls, merge_request, initial_target_sha, project, forked_project=None): + assert bool(forked_project) == ( + merge_request.source_project_id != merge_request.target_project_id + ) + + target_url = project.ssh_url_to_repo + source_url = forked_project.ssh_url_to_repo if forked_project else target_url + + remote_repos = defaultdict(GitRepoModel) + remote_repos[source_url].set_ref(merge_request.source_branch, merge_request.sha) + remote_repos[target_url].set_ref(merge_request.target_branch, initial_target_sha) + + result = cls( + remote_url=target_url, + local_path='/tmp/blah', + ssh_key_file='/home/homer/.ssh/id_rsa', + timeout=timedelta(seconds=1000000), + reference='the_reference', + ) + + # pylint: disable=attribute-defined-outside-init + result.mock_impl = GitModel(origin=target_url, remote_repos=remote_repos) + return result + + def git(self, *args, from_repo=True): + command = args[0] + command_args = args[1:] + + log.info('Run: git %r %s', command, ' '.join(map(repr, command_args))) + assert from_repo == (command != 'clone') + + command_impl_name = command.replace('-', '_') + command_impl = getattr(self.mock_impl, command_impl_name, None) + assert command_impl, ('git: Unexpected command %s' % command) + try: + result = command_impl(*command_args) + except Exception: + log.warning('Failed to simulate: git %r %s', command, command_args) + raise + else: + return self._pretend_result_comes_from_popen(result) + + @staticmethod + def _pretend_result_comes_from_popen(result): + result_bytes = ('' if result is None else str(result)).encode('ascii') + return stub(stdout=result_bytes) + + +class stub: # pylint: disable=invalid-name,too-few-public-methods + def __init__(self, **kwargs): + self.__dict__ = kwargs + + +class GitRepoModel: + def __init__(self, copy_of=None): + # pylint: disable=protected-access + self._refs = dict(copy_of._refs) if copy_of else {} + + def set_ref(self, ref, commit): + self._refs[ref] = commit + + def get_ref(self, ref): + return self._refs[ref] + + def has_ref(self, ref): + return ref in self._refs + + def del_ref(self, ref): + self._refs.pop(ref, None) + + def __repr__(self): + return "<%s: %s>" % (type(self), self._refs) + + +class GitModel: + def __init__(self, origin, remote_repos): + assert origin in remote_repos + + self.remote_repos = remote_repos + self._local_repo = GitRepoModel() + self._remotes = dict(origin=origin) + self._remote_refs = {} + self._branch = None + self.on_push_callbacks = [] + + @property + def _head(self): + return self._local_repo.get_ref(self._branch) + + def remote(self, *args): + action = args[0] + if action == 'rm': + _, remote = args + try: + self._remotes.pop(remote) + except KeyError: + raise git.GitError('No such remote: %s' % remote) + + elif action == 'add': + _, remote, url = args + self._remotes[remote] = url + else: + assert False, args + + def fetch(self, *args): + _, remote_name = args + assert args == ('--prune', remote_name) + remote_url = self._remotes[remote_name] + remote_repo = self.remote_repos[remote_url] + self._remote_refs[remote_name] = GitRepoModel(copy_of=remote_repo) + + def checkout(self, *args): + if args[0] == '-B': # -B == create if it doesn't exist + _, branch, start_point, _ = args + assert args == ('-B', branch, start_point, '--') + assert start_point == '' or '/' in start_point # '' when "local" + + # create if it doesn't exist + if not self._local_repo.has_ref(branch): + if start_point: + remote_name, remote_branch = start_point.split('/') + assert remote_branch == branch + + remote_url = self._remotes[remote_name] + remote_repo = self.remote_repos[remote_url] + commit = remote_repo.get_ref(branch) + self._local_repo.set_ref(branch, commit) + else: + self._local_repo.set_ref(branch, self._head) + else: + branch, _ = args + assert args == (branch, '--') + assert self._local_repo.has_ref(branch) + + # checkout + self._branch = branch + + def branch(self, *args): + if args[0] == "-D": + _, branch = args + assert self._branch != branch + self._local_repo.del_ref(branch) + else: + assert False + + def rev_parse(self, arg): + if arg == 'HEAD': + return self._head + + remote, branch = arg.split('/') + return self._remote_refs[remote].get_ref(branch) + + def rebase(self, arg): + remote, branch = arg.split('/') + new_base = self._remote_refs[remote].get_ref(branch) + if new_base != self._head: + new_sha = 'rebase(%s onto %s)' % (self._head, new_base) + self._local_repo.set_ref(self._branch, new_sha) + + def merge(self, arg): + remote, branch = arg.split('/') + + other_ref = self._remote_refs[remote].get_ref(branch) + if other_ref != self._head: + new_sha = 'merge(%s with %s)' % (self._head, other_ref) + self._local_repo.set_ref(self._branch, new_sha) + + def push(self, *args): + force_flag, remote_name, refspec = args + + assert force_flag in ('', '--force') + + branch, remote_branch = refspec.split(':') + remote_url = self._remotes[remote_name] + remote_repo = self.remote_repos[remote_url] + + old_sha = remote_repo.get_ref(remote_branch) + new_sha = self._local_repo.get_ref(branch) + + if force_flag: + remote_repo.set_ref(remote_branch, new_sha) + else: + expected_remote_sha = self._remote_refs[remote_name].get_ref(remote_branch) + if old_sha != expected_remote_sha: + raise git.GitError("conflict: can't push") + remote_repo.set_ref(remote_branch, new_sha) + + for callback in self.on_push_callbacks: + callback( + remote_url=remote_url, + remote_branch=remote_branch, + old_sha=old_sha, + new_sha=new_sha, + ) + + def config(self, *args): + assert len(args) == 2 and args[0] == '--get' + _, remote, _ = elems = args[1].split('.') + assert elems == ['remote', remote, 'url'], elems + return self._remotes[remote] + + def diff_index(self, *args): + assert args == ('--quiet', 'HEAD') + # we don't model dirty index + + def ls_files(self, *args): + assert args == ('--others',) + # we don't model untracked files + + def filter_branch(self, *args): + _, _, filter_cmd, commit_range = args + assert args == ('--force', '--msg-filter', filter_cmd, commit_range) + + trailers_var, python, script_path = shlex.split(filter_cmd) + _, trailers_str = trailers_var.split('=') + + assert trailers_var == "TRAILERS=%s" % trailers_str + assert python == "python3" + assert script_path.endswith("marge/trailerfilter.py") + + trailers = list(sorted(set(line.split(':')[0] for line in trailers_str.split('\n')))) + assert trailers + + new_sha = functools.reduce( + lambda x, f: "add-%s(%s)" % (f, x), + [trailer.lower() for trailer in trailers], + self._head + ) + self._local_repo.set_ref(self._branch, new_sha) + return new_sha diff --git a/tests/gitlab_api_mock.py b/tests/gitlab_api_mock.py index c8776c21..0f3259aa 100644 --- a/tests/gitlab_api_mock.py +++ b/tests/gitlab_api_mock.py @@ -23,8 +23,8 @@ def commit(commit_id, status): } -class MockLab(object): # pylint: disable=too-few-public-methods - def __init__(self, gitlab_url=None): +class MockLab: # pylint: disable=too-few-public-methods + def __init__(self, initial_master_sha='505e', gitlab_url=None, fork=False, merge_request_options=None): self.gitlab_url = gitlab_url = gitlab_url or 'http://git.example.com' self.api = api = Api(gitlab_url=gitlab_url, auth_token='no-token', initial_state='initial') @@ -47,7 +47,7 @@ def __init__(self, gitlab_url=None): 'title': 'a title', 'project_id': 1234, 'author': {'id': self.author_id}, - 'assignee': {'id': self.user_id}, + 'assignees': [{'id': self.user_id}], 'approved_by': [], 'state': 'opened', 'sha': self.commit_info['id'], @@ -58,9 +58,23 @@ def __init__(self, gitlab_url=None): 'work_in_progress': False, 'web_url': 'http://git.example.com/group/project/merge_request/666', } + if merge_request_options is not None: + self.merge_request_info.update(merge_request_options) + + if fork: + self.forked_project_info = dict( + self.project_info, + id=4321, + ssh_url_to_repo='ssh://some.other.project/stuff', + ) + api.add_project(self.forked_project_info) + self.merge_request_info.update({'iid': 55, 'source_project_id': '4321'}) + else: + self.forked_project_info = None + api.add_merge_request(self.merge_request_info) - self.initial_master_sha = '505e' + self.initial_master_sha = initial_master_sha self.approvals_info = dict( test_approvals.INFO, id=self.merge_request_info['id'], @@ -70,7 +84,11 @@ def __init__(self, gitlab_url=None): ) api.add_approvals(self.approvals_info) api.add_transition( - GET('/projects/1234/repository/branches/master'), + GET( + '/projects/1234/repository/branches/{target}'.format( + target=self.merge_request_info['target_branch'], + ), + ), Ok({'commit': {'id': self.initial_master_sha}}), ) @@ -91,7 +109,7 @@ def call(self, command, sudo=None, response_json=None): self.state, ) try: - response, next_state = self._find(command, sudo) + response, next_state, side_effect = self._find(command, sudo) except KeyError: page = command.args.get('page') if page == 0: @@ -114,13 +132,15 @@ def call(self, command, sudo=None, response_json=None): if next_state: self.state = next_state + if side_effect: + side_effect() return response() def _find(self, command, sudo): more_specific = self._transitions.get(_key(command, sudo, self.state)) return more_specific or self._transitions[_key(command, sudo, None)] - def add_transition(self, command, response, sudo=None, from_state=None, to_state=None): + def add_transition(self, command, response, sudo=None, from_state=None, to_state=None, side_effect=None): from_states = from_state if isinstance(from_state, list) else [from_state] for _from_state in from_states: @@ -132,7 +152,7 @@ def add_transition(self, command, response, sudo=None, from_state=None, to_state show_from, show_from if to_state is None else repr(to_state), ) - self._transitions[_key(command, sudo, _from_state)] = (response, to_state) + self._transitions[_key(command, sudo, _from_state)] = (response, to_state, side_effect) def add_resource(self, path, info, sudo=None, from_state=None, to_state=None, result=None): if result is None: diff --git a/tests/test_app.py b/tests/test_app.py index 10611158..822a42c9 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -65,6 +65,7 @@ def api_mock(gitlab_url, auth_token): api = gitlab_mock.Api(gitlab_url=gitlab_url, auth_token=auth_token, initial_state='initial') user_info_for_token = dict(user_info, is_admin=auth_token == 'ADMIN-TOKEN') api.add_user(user_info_for_token, is_current=True) + api.add_transition(gitlab_mock.GET('/version'), gitlab_mock.Ok({'version': '11.6.0-ce'})) return api class DoNothingBot(bot_module.Bot): @@ -92,6 +93,7 @@ def test_default_values(): assert bot.config.project_regexp == re.compile('.*') assert bot.config.git_timeout == datetime.timedelta(seconds=120) assert bot.config.merge_opts == job.MergeJobOptions.default() + assert bot.config.merge_order == 'created_at' def test_embargo(): @@ -102,11 +104,18 @@ def test_embargo(): ) +def test_rebase_remotely(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with main('--rebase-remotely') as bot: + assert bot.config.merge_opts != job.MergeJobOptions.default() + assert bot.config.merge_opts == job.MergeJobOptions.default(fusion=job.Fusion.gitlab_rebase) + + def test_use_merge_strategy(): with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): with main('--use-merge-strategy') as bot: assert bot.config.merge_opts != job.MergeJobOptions.default() - assert bot.config.merge_opts == job.MergeJobOptions.default(use_merge_strategy=True) + assert bot.config.merge_opts == job.MergeJobOptions.default(fusion=job.Fusion.merge) def test_add_tested(): @@ -118,7 +127,7 @@ def test_add_tested(): def test_use_merge_strategy_and_add_tested_are_mutualy_exclusive(): with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): - with pytest.raises(SystemExit): + with pytest.raises(app.MargeBotCliArgError): with main('--use-merge-strategy --add-tested'): pass @@ -142,10 +151,18 @@ def test_add_reviewers(): assert bot.config.merge_opts == job.MergeJobOptions.default(add_reviewers=True) +def test_rebase_remotely_option_conflicts(): + for conflicting_flag in ['--use-merge-strategy', '--add-tested', '--add-part-of', '--add-reviewers']: + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with pytest.raises(app.MargeBotCliArgError): + with main('--rebase-remotely %s' % conflicting_flag): + pass + + def test_impersonate_approvers(): with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): with pytest.raises(AssertionError): - with main('--impersonate-approvers') as bot: + with main('--impersonate-approvers'): pass with env(MARGE_AUTH_TOKEN="ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): @@ -199,6 +216,24 @@ def test_branch_regexp(): assert bot.config.branch_regexp == re.compile('foo.*bar') +def test_source_branch_regexp(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with main("--source-branch-regexp='foo.*bar'") as bot: + assert bot.config.source_branch_regexp == re.compile('foo.*bar') + + +def test_git_reference_repo(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with main("--git-reference-repo='/foo/reference_repo'") as bot: + assert bot.config.git_reference_repo == '/foo/reference_repo' + + +def test_merge_order(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with main("--merge-order='updated_at'") as bot: + assert bot.config.merge_order == 'updated_at' + + # FIXME: I'd reallly prefer this to be a doctest, but adding --doctest-modules # seems to seriously mess up the test run def test_time_interval(): diff --git a/tests/test_approvals.py b/tests/test_approvals.py index 4fcccb38..2366bbe2 100644 --- a/tests/test_approvals.py +++ b/tests/test_approvals.py @@ -1,11 +1,13 @@ from unittest.mock import call, Mock, patch +import pytest + from marge.gitlab import Api, GET, POST, Version from marge.approvals import Approvals from marge.merge_request import MergeRequest import marge.user # testing this here is more convenient -from marge.job import _get_reviewer_names_and_emails +from marge.job import CannotMerge, _get_reviewer_names_and_emails INFO = { "id": 5, @@ -62,7 +64,7 @@ # pylint: disable=attribute-defined-outside-init -class TestApprovals(object): +class TestApprovals: def setup_method(self, _method): self.api = Mock(Api) @@ -114,7 +116,27 @@ def test_reapprove(self): @patch('marge.user.User.fetch_by_id') def test_get_reviewer_names_and_emails(self, user_fetch_by_id): user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) - assert _get_reviewer_names_and_emails(approvals=self.approvals, api=self.api) == [ + assert _get_reviewer_names_and_emails(commits=[], approvals=self.approvals, api=self.api) == [ 'Administrator ', 'Roger Ebert ' ] + + @patch('marge.user.User.fetch_by_id') + def test_approvals_fails_when_same_author(self, user_fetch_by_id): + info = dict(INFO, approved_by=list(INFO['approved_by'])) + del info['approved_by'][1] + approvals = Approvals(self.api, info) + user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) + commits = [{'author_email': 'root@localhost'}] + with pytest.raises(CannotMerge): + _get_reviewer_names_and_emails(commits=commits, approvals=approvals, api=self.api) + + @patch('marge.user.User.fetch_by_id') + def test_approvals_succeeds_with_independent_author(self, user_fetch_by_id): + user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) + print(INFO['approved_by']) + commits = [{'author_email': 'root@localhost'}] + assert _get_reviewer_names_and_emails(commits=commits, approvals=self.approvals, api=self.api) == [ + 'Administrator ', + 'Roger Ebert ', + ] diff --git a/tests/test_batch_job.py b/tests/test_batch_job.py index 21270bf2..109b9056 100644 --- a/tests/test_batch_job.py +++ b/tests/test_batch_job.py @@ -1,5 +1,5 @@ # pylint: disable=protected-access -from unittest.mock import ANY, Mock, patch +from unittest.mock import ANY, patch, create_autospec import pytest @@ -13,15 +13,23 @@ from tests.gitlab_api_mock import MockLab, Ok, commit -# pylint: disable=attribute-defined-outside-init -class TestBatchJob(object): - def setup_method(self, _method): - self.mocklab = MockLab() - self.api = self.mocklab.api +class TestBatchJob: + @pytest.fixture(params=[True, False]) + def fork(self, request): + return request.param - def get_batch_merge_job(self, **batch_merge_kwargs): - api, mocklab = self.api, self.mocklab + @pytest.fixture() + def mocklab(self, fork): + return MockLab(fork=fork) + @pytest.fixture() + def api(self, mocklab): + return mocklab.api + + def _mock_merge_request(self, **options): + return create_autospec(marge.merge_request.MergeRequest, spec_set=True, **options) + + def get_batch_merge_job(self, api, mocklab, **batch_merge_kwargs): project_id = mocklab.project_info['id'] merge_request_iid = mocklab.merge_request_info['iid'] @@ -29,29 +37,29 @@ def get_batch_merge_job(self, **batch_merge_kwargs): params = { 'api': api, - 'user': marge.user.User.myself(self.api), + 'user': marge.user.User.myself(api), 'project': marge.project.Project.fetch_by_id(project_id, api), - 'repo': Mock(marge.git.Repo), + 'repo': create_autospec(marge.git.Repo, spec_set=True), 'options': MergeJobOptions.default(), 'merge_requests': [merge_request] } params.update(batch_merge_kwargs) return BatchMergeJob(**params) - def test_remove_batch_branch(self): - repo = Mock() - batch_merge_job = self.get_batch_merge_job(repo=repo) + def test_remove_batch_branch(self, api, mocklab): + repo = create_autospec(marge.git.Repo, spec_set=True) + batch_merge_job = self.get_batch_merge_job(api, mocklab, repo=repo) batch_merge_job.remove_batch_branch() repo.remove_branch.assert_called_once_with( BatchMergeJob.BATCH_BRANCH_NAME, ) - def test_close_batch_mr(self): + def test_close_batch_mr(self, api, mocklab): with patch('marge.batch_job.MergeRequest') as mr_class: - batch_mr = Mock() + batch_mr = self._mock_merge_request() mr_class.search.return_value = [batch_mr] - batch_merge_job = self.get_batch_merge_job() + batch_merge_job = self.get_batch_merge_job(api, mocklab) batch_merge_job.close_batch_mr() params = { @@ -68,12 +76,12 @@ def test_close_batch_mr(self): ) batch_mr.close.assert_called_once() - def test_create_batch_mr(self): + def test_create_batch_mr(self, api, mocklab): with patch('marge.batch_job.MergeRequest') as mr_class: - batch_mr = Mock() + batch_mr = self._mock_merge_request() mr_class.create.return_value = batch_mr - batch_merge_job = self.get_batch_merge_job() + batch_merge_job = self.get_batch_merge_job(api, mocklab) target_branch = 'master' r_batch_mr = batch_merge_job.create_batch_mr(target_branch) @@ -90,27 +98,28 @@ def test_create_batch_mr(self): ) assert r_batch_mr is batch_mr - def test_get_mrs_with_common_target_branch(self): + def test_get_mrs_with_common_target_branch(self, api, mocklab): master_mrs = [ - Mock(target_branch='master'), - Mock(target_branch='master'), + self._mock_merge_request(target_branch='master'), + self._mock_merge_request(target_branch='master'), ] non_master_mrs = [ - Mock(target_branch='non_master'), - Mock(target_branch='non_master'), + self._mock_merge_request(target_branch='non_master'), + self._mock_merge_request(target_branch='non_master'), ] batch_merge_job = self.get_batch_merge_job( + api, mocklab, merge_requests=non_master_mrs + master_mrs, ) r_maser_mrs = batch_merge_job.get_mrs_with_common_target_branch('master') assert r_maser_mrs == master_mrs @patch.object(BatchMergeJob, 'get_mr_ci_status') - def test_ensure_mergeable_mr_ci_not_ok(self, bmj_get_mr_ci_status): - batch_merge_job = self.get_batch_merge_job() + def test_ensure_mergeable_mr_ci_not_ok(self, bmj_get_mr_ci_status, api, mocklab): + batch_merge_job = self.get_batch_merge_job(api, mocklab) bmj_get_mr_ci_status.return_value = 'failed' - merge_request = Mock( - assignee_id=batch_merge_job._user.id, + merge_request = self._mock_merge_request( + assignee_ids=[batch_merge_job._user.id], state='opened', work_in_progress=False, squash=False, @@ -121,19 +130,19 @@ def test_ensure_mergeable_mr_ci_not_ok(self, bmj_get_mr_ci_status): assert str(exc_info.value) == 'This MR has not passed CI.' - def test_push_batch(self): - batch_merge_job = self.get_batch_merge_job() + def test_push_batch(self, api, mocklab): + batch_merge_job = self.get_batch_merge_job(api, mocklab) batch_merge_job.push_batch() batch_merge_job._repo.push.assert_called_once_with( BatchMergeJob.BATCH_BRANCH_NAME, force=True, ) - def test_ensure_mr_not_changed(self): + def test_ensure_mr_not_changed(self, api, mocklab): with patch('marge.batch_job.MergeRequest') as mr_class: - batch_merge_job = self.get_batch_merge_job() - merge_request = Mock() - changed_merge_request = Mock() + batch_merge_job = self.get_batch_merge_job(api, mocklab) + merge_request = self._mock_merge_request() + changed_merge_request = self._mock_merge_request() mr_class.fetch_by_iid.return_value = changed_merge_request with pytest.raises(CannotMerge): @@ -145,24 +154,27 @@ def test_ensure_mr_not_changed(self): batch_merge_job._api, ) - def test_fuse_mr_when_target_branch_was_moved(self): - batch_merge_job = self.get_batch_merge_job() - merge_request = Mock(target_branch='master') + def test_fuse_mr_when_target_branch_was_moved(self, api, mocklab): + batch_merge_job = self.get_batch_merge_job(api, mocklab) + merge_request = self._mock_merge_request(target_branch='master') with pytest.raises(CannotBatch) as exc_info: batch_merge_job.accept_mr(merge_request, 'abc') assert str(exc_info.value) == 'Someone was naughty and by-passed marge' - def test_fuse_mr_when_source_branch_was_moved(self): - api, mocklab = self.api, self.mocklab - batch_merge_job = self.get_batch_merge_job() - merge_request = Mock( - source_project_id=batch_merge_job._project.id, + def test_fuse_mr_when_source_branch_was_moved(self, api, mocklab): + batch_merge_job = self.get_batch_merge_job(api, mocklab) + merge_request = self._mock_merge_request( + source_project_id=mocklab.merge_request_info['source_project_id'], target_branch='master', - source_branch=self.mocklab.merge_request_info['source_branch'], + source_branch=mocklab.merge_request_info['source_branch'], ) api.add_transition( - GET('/projects/1234/repository/branches/useless_new_feature'), + GET( + '/projects/{project_iid}/repository/branches/useless_new_feature'.format( + project_iid=mocklab.merge_request_info['source_project_id'], + ), + ), Ok({'commit': commit(commit_id='abc', status='running')}), ) diff --git a/tests/test_commit.py b/tests/test_commit.py index 269ac2da..d595268a 100644 --- a/tests/test_commit.py +++ b/tests/test_commit.py @@ -29,7 +29,7 @@ # pylint: disable=attribute-defined-outside-init -class TestProjectWithCommits(object): +class TestProjectWithCommits: def setup_method(self, _method): self.api = Mock(Api) diff --git a/tests/test_git.py b/tests/test_git.py index 11f7763f..c41b71fe 100644 --- a/tests/test_git.py +++ b/tests/test_git.py @@ -13,7 +13,7 @@ # pylint: disable=attribute-defined-outside-init @mock.patch('marge.git._run') -class TestRepo(object): +class TestRepo: def setup_method(self, _method): self.repo = marge.git.Repo( @@ -21,6 +21,7 @@ def setup_method(self, _method): local_path='/tmp/local/path', ssh_key_file=None, timeout=datetime.timedelta(seconds=1), + reference=None, ) def test_clone(self, mocked_run): @@ -76,10 +77,9 @@ def test_reviewer_tagging_failure(self, mocked_run): def fail_on_filter_branch(*args, **unused_kwargs): if 'filter-branch' in args: raise subprocess.CalledProcessError(returncode=1, cmd='git rebase blah') - elif 'rev-parse' in args or 'reset' in args: + if 'rev-parse' in args or 'reset' in args: return mock.Mock() - else: - raise Exception('Unexpected call:', args) + raise Exception('Unexpected call:', args) mocked_run.side_effect = fail_on_filter_branch @@ -191,6 +191,14 @@ def test_passes_ssh_key(self, mocked_run): '%s git -C /tmp/local/path config user.name bart' % git_ssh, ] + def test_passes_reference_repo(self, mocked_run): + repo = self.repo._replace(reference='/foo/reference_repo') + repo.clone() + assert get_calls(mocked_run) == [ + 'git clone --origin=origin --reference=/foo/reference_repo ssh://git@git.foo.com/some/repo.git ' + + '/tmp/local/path', + ] + def get_calls(mocked_run): return [bashify(call) for call in mocked_run.call_args_list] diff --git a/tests/test_gitlab.py b/tests/test_gitlab.py index b2e9e7ce..27266516 100644 --- a/tests/test_gitlab.py +++ b/tests/test_gitlab.py @@ -1,7 +1,7 @@ import marge.gitlab as gitlab -class TestVersion(object): +class TestVersion: def test_parse(self): assert gitlab.Version.parse('9.2.2-ee') == gitlab.Version(release=(9, 2, 2), edition='ee') diff --git a/tests/test_interval.py b/tests/test_interval.py index 76f05b35..9f72b947 100644 --- a/tests/test_interval.py +++ b/tests/test_interval.py @@ -1,6 +1,8 @@ from datetime import time import maya +import pendulum +from pendulum.helpers import set_test_now from marge.interval import IntervalUnion, WeeklyInterval @@ -8,7 +10,7 @@ def date(spec): return maya.parse(spec).datetime() -class TestWeekly(object): +class TestWeekly: def test_on_same_week(self): interval = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) assert interval.covers(date('Tuesday 3pm')) @@ -39,8 +41,25 @@ def test_from_human(self): assert WeeklyInterval.from_human('Mon@9:00-Fri@17:00') == working_hours assert WeeklyInterval.from_human('Mon@9:00-Tue@17:00') != working_hours + def test_from_human_with_timezone(self): + working_hours = WeeklyInterval('Mon', time(9, 00), 'Fri', time(17, 0)) + + # During summer time + now = pendulum.datetime(2019, 8, 30, tz='Europe/London') + set_test_now(now) + assert WeeklyInterval.from_human( + "Mon 10:00 Europe/London - Fri 18:00 Europe/London" + ) == working_hours + + # Outside summer time + now = pendulum.datetime(2019, 12, 30, tz='Europe/London') + set_test_now(now) + assert WeeklyInterval.from_human( + "Mon 09:00 Europe/London - Fri 17:00 Europe/London" + ) == working_hours -class TestIntervalUnion(object): + +class TestIntervalUnion: def test_empty(self): empty_interval = IntervalUnion.empty() assert empty_interval == IntervalUnion([]) @@ -68,3 +87,24 @@ def test_from_human(self): assert interval == IntervalUnion.from_human('Mon@10am - Fri@6pm,Sat@12pm-Sunday 9am') assert IntervalUnion([weekly_1]) == IntervalUnion.from_human('Mon@10am - Fri@6pm') + + def test_from_human_with_timezone(self): + weekly_1 = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) + weekly_2 = WeeklyInterval('Sat', time(12, 00), 'Sun', time(9, 00)) + interval = IntervalUnion([weekly_1, weekly_2]) + + # During summer time + now = pendulum.datetime(2019, 8, 30, tz='Europe/London') + set_test_now(now) + assert IntervalUnion.from_human( + "Mon 11:00 Europe/London - Fri 19:00 Europe/London," + "Sat 13:00 Europe/London - Sun 10:00 Europe/London" + ) == interval + + # Outside summer time + now = pendulum.datetime(2019, 12, 30, tz='Europe/London') + set_test_now(now) + assert IntervalUnion.from_human( + "Mon 10:00 Europe/London - Fri 18:00 Europe/London," + "Sat 12:00 Europe/London - Sun 09:00 Europe/London" + ) == interval diff --git a/tests/test_job.py b/tests/test_job.py index a43dd8df..f8ff7208 100644 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -1,21 +1,29 @@ # pylint: disable=protected-access import re from datetime import timedelta -from unittest.mock import ANY, Mock, patch +from unittest.mock import ANY, Mock, patch, create_autospec import pytest +from marge.job import CannotMerge, Fusion, MergeJob, MergeJobOptions, SkipMerge import marge.interval -from marge.job import CannotMerge, MergeJob, MergeJobOptions, SkipMerge +import marge.git +import marge.gitlab +import marge.merge_request +import marge.project +import marge.user -class TestJob(object): +class TestJob: + def _mock_merge_request(self, **options): + return create_autospec(marge.merge_request.MergeRequest, spec_set=True, **options) + def get_merge_job(self, **merge_kwargs): params = { - 'api': Mock(), - 'user': Mock(), - 'project': Mock(), - 'repo': Mock(), + 'api': create_autospec(marge.gitlab.Api, spec_set=True), + 'user': create_autospec(marge.user.User, spec_set=True), + 'project': create_autospec(marge.project.Project, spec_set=True), + 'repo': create_autospec(marge.git.Repo, spec_set=True), 'options': MergeJobOptions.default(), } params.update(merge_kwargs) @@ -23,7 +31,7 @@ def get_merge_job(self, **merge_kwargs): def test_get_source_project_when_is_target_project(self): merge_job = self.get_merge_job() - merge_request = Mock() + merge_request = self._mock_merge_request() merge_request.source_project_id = merge_job._project.id r_source_project = merge_job.get_source_project(merge_request) assert r_source_project is merge_job._project @@ -31,7 +39,7 @@ def test_get_source_project_when_is_target_project(self): def test_get_source_project_when_is_fork(self): with patch('marge.job.Project') as project_class: merge_job = self.get_merge_job() - merge_request = Mock() + merge_request = self._mock_merge_request() r_source_project = merge_job.get_source_project(merge_request) project_class.fetch_by_id.assert_called_once_with( @@ -41,26 +49,41 @@ def test_get_source_project_when_is_fork(self): assert r_source_project is not merge_job._project assert r_source_project is project_class.fetch_by_id.return_value - def test_get_mr_ci_status(self): - with patch('marge.job.Pipeline') as pipeline_class: - pipeline = Mock(sha='abc', status='success') - pipeline_class.pipelines_by_branch.return_value = [pipeline] - pipeline.get_jobs.return_value = [{'name': 'job1'}] + @pytest.mark.parametrize( + 'version,use_merge_request_pipelines', + [('9.4.0-ee', False), ('10.5.0-ee', True)], + ) + def test_get_mr_ci_status(self, version, use_merge_request_pipelines): + with patch('marge.job.Pipeline', autospec=True) as pipeline_class: + pipeline_success = [ + Mock(spec=pipeline_class, sha='abc', status='success'), + ] + pipeline_class.pipelines_by_branch.return_value = pipeline_success + pipeline_class.pipelines_by_merge_request.return_value = pipeline_success + pipeline_success[0].get_jobs.return_value = [{'name': 'job1'}] merge_job = self.get_merge_job() - merge_request = Mock(sha='abc') + merge_job._api.version.return_value = marge.gitlab.Version.parse(version) + merge_request = self._mock_merge_request(sha='abc') r_ci_status = merge_job.get_mr_ci_status(merge_request) - pipeline_class.pipelines_by_branch.assert_called_once_with( - merge_request.source_project_id, - merge_request.source_branch, - merge_job._api, - ) + if use_merge_request_pipelines: + pipeline_class.pipelines_by_merge_request.assert_called_once_with( + merge_request.target_project_id, + merge_request.iid, + merge_job._api, + ) + else: + pipeline_class.pipelines_by_branch.assert_called_once_with( + merge_request.source_project_id, + merge_request.source_branch, + merge_job._api, + ) assert r_ci_status == 'success' def test_ensure_mergeable_mr_not_assigned(self): merge_job = self.get_merge_job() - merge_request = Mock( + merge_request = self._mock_merge_request( state='opened', work_in_progress=False, squash=False, @@ -71,8 +94,8 @@ def test_ensure_mergeable_mr_not_assigned(self): def test_ensure_mergeable_mr_state_not_ok(self): merge_job = self.get_merge_job() - merge_request = Mock( - assignee_id=merge_job._user.id, + merge_request = self._mock_merge_request( + assignee_ids=[merge_job._user.id], state='merged', work_in_progress=False, squash=False, @@ -83,8 +106,8 @@ def test_ensure_mergeable_mr_state_not_ok(self): def test_ensure_mergeable_mr_not_approved(self): merge_job = self.get_merge_job() - merge_request = Mock( - assignee_id=merge_job._user.id, + merge_request = self._mock_merge_request( + assignee_ids=[merge_job._user.id], state='opened', work_in_progress=False, squash=False, @@ -98,8 +121,8 @@ def test_ensure_mergeable_mr_not_approved(self): def test_ensure_mergeable_mr_wip(self): merge_job = self.get_merge_job() - merge_request = Mock( - assignee_id=merge_job._user.id, + merge_request = self._mock_merge_request( + assignee_ids=[merge_job._user.id], state='opened', work_in_progress=True, ) @@ -111,8 +134,8 @@ def test_ensure_mergeable_mr_wip(self): def test_ensure_mergeable_mr_squash_and_trailers(self): merge_job = self.get_merge_job(options=MergeJobOptions.default(add_reviewers=True)) - merge_request = Mock( - assignee_id=merge_job._user.id, + merge_request = self._mock_merge_request( + assignee_ids=[merge_job._user.id], state='opened', work_in_progress=False, squash=True, @@ -128,7 +151,7 @@ def test_ensure_mergeable_mr_squash_and_trailers(self): def test_unassign_from_mr(self): merge_job = self.get_merge_job() - merge_request = Mock() + merge_request = self._mock_merge_request() # when we are not the author merge_job.unassign_from_mr(merge_request) @@ -140,7 +163,7 @@ def test_unassign_from_mr(self): merge_request.unassign.assert_called_once() def test_fuse_using_rebase(self): - merge_job = self.get_merge_job(options=MergeJobOptions.default(use_merge_strategy=False)) + merge_job = self.get_merge_job(options=MergeJobOptions.default(fusion=Fusion.rebase)) branch_a = 'A' branch_b = 'B' @@ -154,7 +177,7 @@ def test_fuse_using_rebase(self): ) def test_fuse_using_merge(self): - merge_job = self.get_merge_job(options=MergeJobOptions.default(use_merge_strategy=True)) + merge_job = self.get_merge_job(options=MergeJobOptions.default(fusion=Fusion.merge)) branch_a = 'A' branch_b = 'B' @@ -168,7 +191,7 @@ def test_fuse_using_merge(self): ) -class TestMergeJobOptions(object): +class TestMergeJobOptions: def test_default(self): assert MergeJobOptions.default() == MergeJobOptions( add_tested=False, @@ -178,7 +201,7 @@ def test_default(self): approval_timeout=timedelta(seconds=0), embargo=marge.interval.IntervalUnion.empty(), ci_timeout=timedelta(minutes=15), - use_merge_strategy=False, + fusion=Fusion.rebase, job_regexp=re.compile('.*'), create_pipeline=False, ) diff --git a/tests/test_merge_request.py b/tests/test_merge_request.py index fdeaf6ee..ecd30923 100644 --- a/tests/test_merge_request.py +++ b/tests/test_merge_request.py @@ -1,7 +1,9 @@ -from unittest.mock import Mock +from unittest.mock import call, Mock + +import pytest from marge.gitlab import Api, GET, POST, PUT, Version -from marge.merge_request import MergeRequest +from marge.merge_request import MergeRequest, MergeRequestRebaseFailed _MARGE_ID = 77 @@ -10,7 +12,7 @@ 'iid': 54, 'title': 'a title', 'project_id': 1234, - 'assignee': {'id': _MARGE_ID}, + 'assignees': [{'id': _MARGE_ID}], 'author': {'id': 88}, 'state': 'opened', 'sha': 'dead4g00d', @@ -23,7 +25,7 @@ # pylint: disable=attribute-defined-outside-init -class TestMergeRequest(object): +class TestMergeRequest: def setup_method(self, _method): self.api = Mock(Api) @@ -52,7 +54,7 @@ def test_properties(self): assert self.merge_request.project_id == 1234 assert self.merge_request.iid == 54 assert self.merge_request.title == 'a title' - assert self.merge_request.assignee_id == 77 + assert self.merge_request.assignee_ids == [77] assert self.merge_request.author_id == 88 assert self.merge_request.state == 'opened' assert self.merge_request.source_branch == 'useless_new_feature' @@ -62,8 +64,8 @@ def test_properties(self): assert self.merge_request.target_project_id == 1234 assert self.merge_request.work_in_progress is False - self._load({'assignee': {}}) - assert self.merge_request.assignee_id is None + self._load({'assignees': []}) + assert self.merge_request.assignee_ids == [] def test_comment(self): self.merge_request.comment('blah') @@ -80,7 +82,72 @@ def test_assign(self): def test_unassign(self): self.merge_request.unassign() - self.api.call.assert_called_once_with(PUT('/projects/1234/merge_requests/54', {'assignee_id': None})) + self.api.call.assert_called_once_with(PUT('/projects/1234/merge_requests/54', {'assignee_id': 0})) + + def test_rebase_was_not_in_progress_no_error(self): + expected = [ + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> not in progress + INFO + ), + ( + PUT('/projects/1234/merge_requests/54/rebase'), + True + ), + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress + dict(INFO, rebase_in_progress=True) + ), + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> succeeded + dict(INFO, rebase_in_progress=False) + ), + ] + + self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) + self.merge_request.rebase() + self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) + + def test_rebase_was_not_in_progress_error(self): + expected = [ + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> not in progress + INFO + ), + ( + PUT('/projects/1234/merge_requests/54/rebase'), + True + ), + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> BOOM + dict(INFO, rebase_in_progress=False, merge_error="Rebase failed. Please rebase locally") + ), + ] + + self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) + + with pytest.raises(MergeRequestRebaseFailed): + self.merge_request.rebase() + self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) + + def test_rebase_was_in_progress_no_error(self): + expected = [ + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress + dict(INFO, rebase_in_progress=True) + ), + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress + dict(INFO, rebase_in_progress=True) + ), + ( + GET('/projects/1234/merge_requests/54'), # refetch_info -> succeeded + dict(INFO, rebase_in_progress=False) + ), + ] + self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) + self.merge_request.rebase() + self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) def test_accept(self): self._load(dict(INFO, sha='badc0de')) @@ -109,9 +176,11 @@ def test_accept(self): def test_fetch_all_opened_for_me(self): api = self.api - mr1, mr_not_me, mr2 = INFO, dict(INFO, assignee={'id': _MARGE_ID+1}, id=679), dict(INFO, id=678) + mr1, mr_not_me, mr2 = INFO, dict(INFO, assignees=[{'id': _MARGE_ID+1}], id=679), dict(INFO, id=678) api.collect_all_pages = Mock(return_value=[mr1, mr_not_me, mr2]) - result = MergeRequest.fetch_all_open_for_user(1234, user_id=_MARGE_ID, api=api) + result = MergeRequest.fetch_all_open_for_user( + 1234, user_id=_MARGE_ID, api=api, merge_order='created_at' + ) api.collect_all_pages.assert_called_once_with(GET( '/projects/1234/merge_requests', {'state': 'opened', 'order_by': 'created_at', 'sort': 'asc'}, diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py index 7effa177..c2e7e708 100644 --- a/tests/test_pipeline.py +++ b/tests/test_pipeline.py @@ -13,7 +13,7 @@ # pylint: disable=attribute-defined-outside-init -class TestPipeline(object): +class TestPipeline: def setup_method(self, _method): self.api = Mock(Api) @@ -30,6 +30,17 @@ def test_pipelines_by_branch(self): )) assert [pl.info for pl in result] == [pl1, pl2] + def test_pipelines_by_merge_request(self): + api = self.api + pl1, pl2 = INFO, dict(INFO, id=48) + api.call = Mock(return_value=[pl1, pl2]) + + result = Pipeline.pipelines_by_merge_request(project_id=1234, merge_request_iid=1, api=api) + api.call.assert_called_once_with(GET( + '/projects/1234/merge_requests/1/pipelines', + )) + assert [pl.info for pl in result] == [pl2, pl1] + def test_properties(self): pipeline = Pipeline(api=self.api, project_id=1234, info=INFO) assert pipeline.id == 47 diff --git a/tests/test_project.py b/tests/test_project.py index d4484f0c..a6190ddb 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -1,7 +1,7 @@ from unittest.mock import Mock import pytest -from marge.gitlab import Api, GET +from marge.gitlab import Api, GET, Version from marge.project import AccessLevel, Project @@ -11,6 +11,7 @@ 'ssh_url_to_repo': 'ssh://blah.com/cool/project.git', 'merge_requests_enabled': True, 'only_allow_merge_if_pipeline_succeeds': True, + 'only_allow_merge_if_all_discussions_are_resolved': False, 'permissions': { 'project_access': { 'access_level': AccessLevel.developer.value, @@ -35,7 +36,7 @@ # pylint: disable=attribute-defined-outside-init,duplicate-code -class TestProject(object): +class TestProject: def setup_method(self, _method): self.api = Mock(Api) @@ -61,18 +62,43 @@ def test_fetch_by_path_exists(self): api.collect_all_pages.assert_called_once_with(GET('/projects')) assert project and project.info == prj2 - def test_fetch_all_mine(self): + def fetch_all_mine_with_permissions(self): prj1, prj2 = INFO, dict(INFO, id=678) api = self.api api.collect_all_pages = Mock(return_value=[prj1, prj2]) + api.version = Mock(return_value=Version.parse("11.0.0-ee")) result = Project.fetch_all_mine(api) api.collect_all_pages.assert_called_once_with(GET( '/projects', - {'membership': True, 'with_merge_requests_enabled': True}, + { + 'membership': True, + 'with_merge_requests_enabled': True, + }, )) assert [prj.info for prj in result] == [prj1, prj2] + assert all(prj.access_level == AccessLevel.developer for prj in result) + + def fetch_all_mine_with_min_access_level(self): + prj1, prj2 = dict(INFO, permissions=NONE_ACCESS), dict(INFO, id=678, permissions=NONE_ACCESS) + + api = self.api + api.collect_all_pages = Mock(return_value=[prj1, prj2]) + api.version = Mock(return_value=Version.parse("11.2.0-ee")) + + result = Project.fetch_all_mine(api) + api.collect_all_pages.assert_called_once_with(GET( + '/projects', + { + 'membership': True, + 'with_merge_requests_enabled': True, + "min_access_level": AccessLevel.developer.value, + }, + )) + assert [prj.info for prj in result] == [prj1, prj2] + assert all(prj.info["permissions"]["marge"] for prj in result) + assert all(prj.access_level == AccessLevel.developer for prj in result) def test_properties(self): project = Project(api=self.api, info=INFO) @@ -81,6 +107,7 @@ def test_properties(self): assert project.ssh_url_to_repo == 'ssh://blah.com/cool/project.git' assert project.merge_requests_enabled is True assert project.only_allow_merge_if_pipeline_succeeds is True + assert project.only_allow_merge_if_all_discussions_are_resolved is False assert project.access_level == AccessLevel.developer def test_group_access(self): diff --git a/tests/test_single_job.py b/tests/test_single_job.py index 6db34dd2..18081863 100644 --- a/tests/test_single_job.py +++ b/tests/test_single_job.py @@ -1,6 +1,11 @@ +# pylint: disable=too-many-locals import contextlib +from collections import namedtuple from datetime import timedelta -from unittest.mock import ANY, Mock, patch +from functools import partial +from unittest.mock import ANY, patch + +import pytest import marge.commit import marge.interval @@ -11,8 +16,14 @@ import marge.single_merge_job import marge.user from marge.gitlab import GET, PUT +from marge.job import Fusion from marge.merge_request import MergeRequest +from tests.git_repo_mock import RepoMock from tests.gitlab_api_mock import Error, Ok, MockLab +import tests.test_commit as test_commit + + +INITIAL_MR_SHA = test_commit.INFO['id'] def _commit(commit_id, status): @@ -26,52 +37,109 @@ def _commit(commit_id, status): } -def _pipeline(sha1, status): +def _branch(name, protected=False): + return { + 'name': name, + 'protected': protected, + } + + +def _pipeline(sha1, status, ref='useless_new_feature'): return { 'id': 47, 'status': status, - 'ref': 'useless_new_feature', + 'ref': ref, 'sha': sha1, 'jobs': [{'name': 'job1'}, {'name': 'job2'}], } class SingleJobMockLab(MockLab): - def __init__(self, gitlab_url=None): - super().__init__(gitlab_url) + def __init__( + self, + *, + initial_master_sha, + rewritten_sha, + gitlab_url=None, + fork=False, + expect_gitlab_rebase=False, + merge_request_options=None, + ): + super().__init__( + initial_master_sha, + gitlab_url, + fork=fork, + merge_request_options=merge_request_options, + ) api = self.api - self.rewritten_sha = rewritten_sha = 'af7a' + self.rewritten_sha = rewritten_sha + if expect_gitlab_rebase: + api.add_transition( + PUT( + '/projects/{project_id}/merge_requests/{iid}/rebase'.format( + project_id=self.merge_request_info['project_id'], + iid=self.merge_request_info['iid'], + ), + ), + Ok(True), + from_state='initial', + to_state='rebase-in-progress', + ) + api.add_merge_request( + dict(self.merge_request_info, rebase_in_progress=True), + from_state='rebase-in-progress', + to_state='rebase-finished' + ) + api.add_merge_request( + dict( + self.merge_request_info, + rebase_in_progress=False, + sha=rewritten_sha, + ), + from_state='rebase-finished', + to_state='pushed', + ) + api.add_pipelines( - self.project_info['id'], - _pipeline(sha1=rewritten_sha, status='running'), + self.merge_request_info['source_project_id'], + _pipeline(sha1=rewritten_sha, status='running', ref=self.merge_request_info['source_branch']), from_state='pushed', to_state='passed', ) api.add_pipelines( - self.project_info['id'], - _pipeline(sha1=rewritten_sha, status='success'), + self.merge_request_info['source_project_id'], + _pipeline(sha1=rewritten_sha, status='success', ref=self.merge_request_info['source_branch']), from_state=['passed', 'merged'], ) + source_project_id = self.merge_request_info['source_project_id'] api.add_transition( - GET('/projects/1234/repository/branches/useless_new_feature'), + GET( + '/projects/{}/repository/branches/{}'.format( + source_project_id, self.merge_request_info['source_branch'], + ), + ), Ok({'commit': _commit(commit_id=rewritten_sha, status='running')}), from_state='pushed', ) api.add_transition( - GET('/projects/1234/repository/branches/useless_new_feature'), + GET( + '/projects/{}/repository/branches/{}'.format( + source_project_id, self.merge_request_info['source_branch'], + ), + ), Ok({'commit': _commit(commit_id=rewritten_sha, status='success')}), from_state='passed' ) api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=self.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Ok({}), - from_state='passed', to_state='merged', + from_state=['passed', 'skipped'], to_state='merged', ) api.add_merge_request(dict(self.merge_request_info, state='merged'), from_state='merged') api.add_transition( - GET('/projects/1234/repository/branches/master'), + GET('/projects/1234/repository/branches/{}'.format(self.merge_request_info['target_branch'])), Ok({'commit': {'id': self.rewritten_sha}}), from_state='merged' ) @@ -85,10 +153,13 @@ def __init__(self, gitlab_url=None): "I'm broken on the inside, please somebody fix me... :cry:" ) - def push_updated(self, *unused_args, **unused_kwargs): + def push_updated(self, remote_url, remote_branch, old_sha, new_sha): + source_project = self.forked_project_info or self.project_info + assert remote_url == source_project['ssh_url_to_repo'] + assert remote_branch == self.merge_request_info['source_branch'] + assert old_sha == INITIAL_MR_SHA + assert new_sha == self.rewritten_sha self.api.state = 'pushed' - updated_sha = 'deadbeef' - return self.initial_master_sha, updated_sha, self.rewritten_sha @contextlib.contextmanager def expected_failure(self, message): @@ -99,7 +170,10 @@ def assign_to_author(): author_assigned = True self.api.add_transition( - PUT('/projects/1234/merge_requests/54', args={'assignee_id': self.author_id}), + PUT( + '/projects/1234/merge_requests/{iid}'.format(iid=self.merge_request_info['iid']), + args={'assignee_id': self.author_id}, + ), assign_to_author, ) error_note = "I couldn't merge this branch: %s" % message @@ -110,83 +184,340 @@ def assign_to_author(): assert author_assigned assert error_note in self.api.notes - @contextlib.contextmanager - def branch_update(self, side_effect=None): - if side_effect is None: - side_effect = self.push_updated - with patch.object( - marge.single_merge_job.SingleMergeJob, - 'update_from_target_branch_and_push', - side_effect=side_effect, - ): + +class TestUpdateAndAccept: # pylint: disable=too-many-public-methods + Mocks = namedtuple('Mocks', 'mocklab api job') + + @pytest.fixture(params=[True, False]) + def fork(self, request): + return request.param + + @pytest.fixture(params=list(Fusion)) + def fusion(self, request): + return request.param + + @pytest.fixture(params=[True, False]) + def add_tested(self, request): + return request.param + + @pytest.fixture(params=[True, False]) + def add_part_of(self, request): + return request.param + + @pytest.fixture(params=[False]) # TODO: Needs support in mocklab + def add_reviewers(self, request): + return request.param + + @pytest.fixture() + def options_factory(self, fusion, add_tested, add_reviewers, add_part_of): + def make_options(**kwargs): + fixture_opts = { + 'fusion': fusion, + 'add_tested': add_tested, + 'add_part_of': add_part_of, + 'add_reviewers': add_reviewers, + } + assert not set(fixture_opts).intersection(kwargs) + kwargs.update(fixture_opts) + return marge.job.MergeJobOptions.default(**kwargs) + yield make_options + + @pytest.fixture() + def update_sha(self, fusion): + def new_sha(new, old): + pats = { + marge.job.Fusion.rebase: 'rebase(%s onto %s)', + marge.job.Fusion.merge: 'merge(%s with %s)', + marge.job.Fusion.gitlab_rebase: 'rebase(%s onto %s)', + } + return pats[fusion] % (new, old) + yield new_sha + + @pytest.fixture() + def rewrite_sha(self, fusion, add_tested, add_reviewers, add_part_of): + def new_sha(sha): + # NB. The order matches the one used in the Git mock to run filters + if add_tested and fusion == marge.job.Fusion.rebase: + sha = 'add-tested-by(%s)' % sha + + if add_reviewers and fusion != marge.job.Fusion.gitlab_rebase: + sha = 'add-reviewed-by(%s)' % sha + + if add_part_of and fusion != marge.job.Fusion.gitlab_rebase: + sha = 'add-part-of(%s)' % sha + + return sha + yield new_sha + + @pytest.fixture(autouse=True) + def patch_sleep(self): + with patch('time.sleep'): yield + @pytest.fixture() + def mocklab_factory(self, fork, fusion): + expect_rebase = fusion is Fusion.gitlab_rebase + return partial(SingleJobMockLab, fork=fork, expect_gitlab_rebase=expect_rebase) + + @pytest.fixture() + def mocks_factory(self, mocklab_factory, options_factory, update_sha, rewrite_sha): + # pylint: disable=too-many-locals + def make_mocks( + initial_master_sha=None, rewritten_sha=None, + extra_opts=None, extra_mocklab_opts=None, + on_push=None + ): + options = options_factory(**(extra_opts or {})) + initial_master_sha = initial_master_sha or'505050505e' + + if not rewritten_sha: + rewritten_sha = rewrite_sha(update_sha(INITIAL_MR_SHA, initial_master_sha)) + + mocklab = mocklab_factory( + initial_master_sha=initial_master_sha, + rewritten_sha=rewritten_sha, + **(extra_mocklab_opts or {}) + ) + api = mocklab.api + + project_id = mocklab.project_info['id'] + merge_request_iid = mocklab.merge_request_info['iid'] + + project = marge.project.Project.fetch_by_id(project_id, api) + forked_project = None + if mocklab.forked_project_info: + forked_project_id = mocklab.forked_project_info['id'] + forked_project = marge.project.Project.fetch_by_id(forked_project_id, api) + + merge_request = MergeRequest.fetch_by_iid(project_id, merge_request_iid, api) + + def assert_can_push(*_args, **_kwargs): + assert options.fusion is not Fusion.gitlab_rebase + + callback = on_push or mocklab.push_updated + repo = RepoMock.init_for_merge_request( + merge_request=merge_request, + initial_target_sha=mocklab.initial_master_sha, + project=project, + forked_project=forked_project, + ) + repo.mock_impl.on_push_callbacks.append(assert_can_push) + repo.mock_impl.on_push_callbacks.append(callback) + + user = marge.user.User.myself(api) + job = marge.single_merge_job.SingleMergeJob( + api=api, user=user, + project=project, merge_request=merge_request, repo=repo, + options=options, + ) + return self.Mocks(mocklab=mocklab, api=api, job=job) + + yield make_mocks + + @pytest.fixture() + def mocks(self, mocks_factory): + yield mocks_factory() + + def test_succeeds_first_time(self, mocks): + _, api, job = mocks + job.execute() + assert api.state == 'merged' + assert api.notes == [] -# pylint: disable=attribute-defined-outside-init -@patch('time.sleep') -class TestUpdateAndAccept(object): + def test_succeeds_with_updated_branch(self, mocks): + mocklab, api, job = mocks + api.add_transition( + GET( + '/projects/1234/repository/branches/{source}'.format( + source=mocklab.merge_request_info['source_branch'], + ), + ), + Ok({'commit': {'id': mocklab.rewritten_sha}}), + from_state='initial', to_state='pushed', + ) + job.execute() - def setup_method(self, _method): - self.mocklab = SingleJobMockLab() - self.api = self.mocklab.api + assert api.state == 'merged' + assert api.notes == [] - def make_job(self, options=None): - api, mocklab = self.api, self.mocklab + def test_succeeds_if_skipped(self, mocks): + mocklab, api, job = mocks + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='running'), + from_state='pushed', to_state='skipped', + ) + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='skipped'), + from_state=['skipped', 'merged'], + ) + job.execute() - project_id = mocklab.project_info['id'] - merge_request_iid = mocklab.merge_request_info['iid'] + assert api.state == 'merged' + assert api.notes == [] - project = marge.project.Project.fetch_by_id(project_id, api) - merge_request = MergeRequest.fetch_by_iid(project_id, merge_request_iid, api) + def test_succeeds_if_source_is_master(self, mocks_factory): + mocklab, api, job = mocks_factory( + extra_mocklab_opts=dict(merge_request_options={ + 'source_branch': 'master', + 'target_branch': 'production', + }), + ) + api.add_transition( + GET( + '/projects/1234/repository/branches/{source}'.format( + source=mocklab.merge_request_info['source_branch'], + ), + ), + Ok({'commit': {'id': mocklab.rewritten_sha}}), + from_state='initial', to_state='pushed', + ) + job.execute() - repo = Mock(marge.git.Repo) - options = options or marge.job.MergeJobOptions.default() - user = marge.user.User.myself(self.api) - return marge.single_merge_job.SingleMergeJob( - api=api, user=user, - project=project, merge_request=merge_request, repo=repo, - options=options, + assert api.state == 'merged' + assert api.notes == [] + + def test_fails_if_ci_fails(self, mocks): + mocklab, api, job = mocks + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='running'), + from_state='pushed', to_state='failed', + ) + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='failed'), + from_state=['failed'], ) - def test_succeeds_first_time(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab - with mocklab.branch_update(): - job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) + with mocklab.expected_failure("CI failed!"): job.execute() - assert api.state == 'merged' - assert api.notes == [] + assert api.state == 'failed' + + def test_fails_if_ci_canceled(self, mocks): + mocklab, api, job = mocks + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='running'), + from_state='pushed', to_state='canceled', + ) + api.add_pipelines( + mocklab.merge_request_info['source_project_id'], + _pipeline(sha1=mocklab.rewritten_sha, status='canceled'), + from_state=['canceled'], + ) + + with mocklab.expected_failure("Someone canceled the CI."): + job.execute() + + assert api.state == 'canceled' - def test_fails_on_not_acceptable_if_master_did_not_move(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_fails_on_not_acceptable_if_master_did_not_move(self, mocks): + mocklab, api, job = mocks new_branch_head_sha = '99ba110035' api.add_transition( - GET('/projects/1234/repository/branches/useless_new_feature'), + GET( + '/projects/{source_project_id}/repository/branches/useless_new_feature'.format( + source_project_id=mocklab.merge_request_info['source_project_id'], + ), + ), Ok({'commit': _commit(commit_id=new_branch_head_sha, status='success')}), from_state='pushed', to_state='pushed_but_head_changed' ) - with mocklab.branch_update(): - with mocklab.expected_failure("Someone pushed to branch while we were trying to merge"): - job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) - job.execute() + with mocklab.expected_failure("Someone pushed to branch while we were trying to merge"): + job.execute() assert api.state == 'pushed_but_head_changed' assert api.notes == [ "I couldn't merge this branch: Someone pushed to branch while we were trying to merge", ] - def test_succeeds_second_time_if_master_moved(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_fails_if_branch_is_protected(self, mocks_factory, fusion): + def reject_push(*_args, **_kwargs): + raise marge.git.GitError() + + mocklab, api, job = mocks_factory(on_push=reject_push) + api.add_transition( + GET( + '/projects/{source_project_id}/repository/branches/useless_new_feature'.format( + source_project_id=mocklab.merge_request_info['source_project_id'], + ), + ), + Ok(_branch('useless_new_feature', protected=True)), + from_state='initial', to_state='protected' + ) + + if fusion is Fusion.gitlab_rebase: + api.add_transition( + PUT( + '/projects/{project_id}/merge_requests/{iid}/rebase'.format( + project_id=mocklab.merge_request_info['project_id'], + iid=mocklab.merge_request_info['iid'], + ), + ), + Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), + from_state='initial', + ) + + with mocklab.expected_failure("Sorry, I can't modify protected branches!"): + job.execute() + + assert api.state == 'protected' + + def test_second_time_if_master_moved(self, mocks_factory, fusion, update_sha, rewrite_sha): + initial_master_sha = 'eaeaea9e9e' moved_master_sha = 'fafafa' - first_rewritten_sha = '1o1' + first_rewritten_sha = rewrite_sha(update_sha(INITIAL_MR_SHA, initial_master_sha)) + second_rewritten_sha = rewrite_sha(update_sha(first_rewritten_sha, moved_master_sha)) + + # pylint: disable=unused-argument + def push_effects(remote_url, remote_branch, old_sha, new_sha): + nonlocal mocklab, target_branch, remote_target_repo + + if api.state == 'initial': + assert old_sha == INITIAL_MR_SHA + assert new_sha == first_rewritten_sha + api.state = 'pushed_but_master_moved' + remote_target_repo.set_ref(target_branch, moved_master_sha) + elif api.state == 'merge_rejected': + assert new_sha == second_rewritten_sha + api.state = 'pushed' + + mocklab, api, job = mocks_factory( + initial_master_sha=initial_master_sha, + rewritten_sha=second_rewritten_sha, + on_push=push_effects, + ) + + source_project_info = mocklab.forked_project_info or mocklab.project_info + target_project_info = mocklab.project_info + + source_project_url = source_project_info['ssh_url_to_repo'] + target_project_url = target_project_info['ssh_url_to_repo'] + + source_branch = mocklab.merge_request_info['source_branch'] + target_branch = mocklab.merge_request_info['target_branch'] + + remote_source_repo = job.repo.mock_impl.remote_repos[source_project_url] + remote_target_repo = job.repo.mock_impl.remote_repos[target_project_url] + + api.add_merge_request( + dict( + mocklab.merge_request_info, + sha=first_rewritten_sha, + ), + from_state=['pushed_but_master_moved', 'merge_rejected'], + ) api.add_pipelines( - mocklab.project_info['id'], + mocklab.merge_request_info['source_project_id'], _pipeline(sha1=first_rewritten_sha, status='success'), - from_state=['pushed_but_master_moved', 'merged_rejected'], + from_state=['pushed_but_master_moved', 'merge_rejected'], ) api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict( sha=first_rewritten_sha, should_remove_source_branch=True, @@ -197,7 +528,11 @@ def test_succeeds_second_time_if_master_moved(self, unused_time_sleep): from_state='pushed_but_master_moved', to_state='merge_rejected', ) api.add_transition( - GET('/projects/1234/repository/branches/useless_new_feature'), + GET( + '/projects/{source_project_id}/repository/branches/useless_new_feature'.format( + source_project_id=mocklab.merge_request_info['source_project_id'], + ), + ), Ok({'commit': _commit(commit_id=first_rewritten_sha, status='success')}), from_state='pushed_but_master_moved' ) @@ -206,31 +541,38 @@ def test_succeeds_second_time_if_master_moved(self, unused_time_sleep): Ok({'commit': _commit(commit_id=moved_master_sha, status='success')}), from_state='merge_rejected' ) - - def push_effects(): - assert api.state == 'initial' - api.state = 'pushed_but_master_moved' - yield mocklab.initial_master_sha, 'f00ba4', first_rewritten_sha - - assert api.state == 'merge_rejected' - api.state = 'pushed' - yield moved_master_sha, 'deadbeef', mocklab.rewritten_sha - - with mocklab.branch_update(side_effect=push_effects()): - job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) - job.execute() - + if fusion is Fusion.gitlab_rebase: + rebase_url = '/projects/{project_id}/merge_requests/{iid}/rebase'.format( + project_id=mocklab.merge_request_info['project_id'], + iid=mocklab.merge_request_info['iid'], + ) + + api.add_transition( + PUT(rebase_url), Ok(True), + from_state='initial', to_state='pushed_but_master_moved', + side_effect=lambda: ( + remote_source_repo.set_ref(source_branch, first_rewritten_sha), + remote_target_repo.set_ref(target_branch, moved_master_sha) + ) + ) + api.add_transition( + PUT(rebase_url), Ok(True), + from_state='merge_rejected', to_state='rebase-in-progress', + side_effect=lambda: remote_source_repo.set_ref(source_branch, second_rewritten_sha) + ) + + job.execute() assert api.state == 'merged' assert api.notes == [ "My job would be easier if people didn't jump the queue and push directly... *sigh*", ] - def test_handles_races_for_merging(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_handles_races_for_merging(self, mocks): + mocklab, api, job = mocks rewritten_sha = mocklab.rewritten_sha api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Error(marge.gitlab.NotFound(404, {'message': '404 Branch Not Found'})), @@ -240,18 +582,16 @@ def test_handles_races_for_merging(self, unused_time_sleep): dict(mocklab.merge_request_info, state='merged'), from_state='someone_else_merged', ) - with mocklab.branch_update(): - job = self.make_job() - job.execute() + job.execute() assert api.state == 'someone_else_merged' assert api.notes == [] - def test_handles_request_becoming_wip_after_push(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_handles_request_becoming_wip_after_push(self, mocks): + mocklab, api, job = mocks rewritten_sha = mocklab.rewritten_sha api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), @@ -262,18 +602,17 @@ def test_handles_request_becoming_wip_after_push(self, unused_time_sleep): from_state='now_is_wip', ) message = 'The request was marked as WIP as I was processing it (maybe a WIP commit?)' - with mocklab.branch_update(), mocklab.expected_failure(message): - job = self.make_job() + with mocklab.expected_failure(message): job.execute() assert api.state == 'now_is_wip' assert api.notes == ["I couldn't merge this branch: %s" % message] - def test_guesses_git_hook_error_on_merge_refusal(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_guesses_git_hook_error_on_merge_refusal(self, mocks): + mocklab, api, job = mocks rewritten_sha = mocklab.rewritten_sha api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), @@ -287,18 +626,42 @@ def test_guesses_git_hook_error_on_merge_refusal(self, unused_time_sleep): 'GitLab refused to merge this branch. I suspect that a Push Rule or a git-hook ' 'is rejecting my commits; maybe my email needs to be white-listed?' ) - with mocklab.branch_update(), mocklab.expected_failure(message): - job = self.make_job() + with mocklab.expected_failure(message): job.execute() assert api.state == 'rejected_by_git_hook' assert api.notes == ["I couldn't merge this branch: %s" % message] - def test_discovers_if_someone_closed_the_merge_request(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_assumes_unresolved_discussions_on_merge_refusal(self, mocks): + mocklab, api, job = mocks rewritten_sha = mocklab.rewritten_sha api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), + dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), + ), + Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), + from_state='passed', to_state='unresolved_discussions', + ) + api.add_merge_request( + dict(mocklab.merge_request_info), + from_state='unresolved_discussions', + ) + message = ( + "Gitlab refused to merge this request and I don't know why! " + "Maybe you have unresolved discussions?" + ) + with mocklab.expected_failure(message): + with patch.dict(mocklab.project_info, only_allow_merge_if_all_discussions_are_resolved=True): + job.execute() + assert api.state == 'unresolved_discussions' + assert api.notes == ["I couldn't merge this branch: %s" % message] + + def test_discovers_if_someone_closed_the_merge_request(self, mocks): + mocklab, api, job = mocks + rewritten_sha = mocklab.rewritten_sha + api.add_transition( + PUT( + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), @@ -309,37 +672,34 @@ def test_discovers_if_someone_closed_the_merge_request(self, unused_time_sleep): from_state='oops_someone_closed_it', ) message = 'Someone closed the merge request while I was attempting to merge it.' - with mocklab.branch_update(), mocklab.expected_failure(message): - job = self.make_job() + with mocklab.expected_failure(message): job.execute() assert api.state == 'oops_someone_closed_it' assert api.notes == ["I couldn't merge this branch: %s" % message] - def test_tells_explicitly_that_gitlab_refused_to_merge(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_tells_explicitly_that_gitlab_refused_to_merge(self, mocks): + mocklab, api, job = mocks rewritten_sha = mocklab.rewritten_sha api.add_transition( PUT( - '/projects/1234/merge_requests/54/merge', + '/projects/1234/merge_requests/{iid}/merge'.format(iid=mocklab.merge_request_info['iid']), dict(sha=rewritten_sha, should_remove_source_branch=True, merge_when_pipeline_succeeds=True), ), Error(marge.gitlab.MethodNotAllowed(405, {'message': '405 Method Not Allowed'})), from_state='passed', to_state='rejected_for_mysterious_reasons', ) message = "GitLab refused to merge this request and I don't know why!" - with mocklab.branch_update(), mocklab.expected_failure(message): - job = self.make_job() + with mocklab.expected_failure(message): job.execute() assert api.state == 'rejected_for_mysterious_reasons' assert api.notes == ["I couldn't merge this branch: %s" % message] - def test_wont_merge_wip_stuff(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_wont_merge_wip_stuff(self, mocks): + mocklab, api, job = mocks wip_merge_request = dict(mocklab.merge_request_info, work_in_progress=True) api.add_merge_request(wip_merge_request, from_state='initial') with mocklab.expected_failure("Sorry, I can't merge requests marked as Work-In-Progress!"): - job = self.make_job() job.execute() assert api.state == 'initial' @@ -347,48 +707,51 @@ def test_wont_merge_wip_stuff(self, unused_time_sleep): "I couldn't merge this branch: Sorry, I can't merge requests marked as Work-In-Progress!", ] - def test_wont_merge_branches_with_autosquash_if_rewriting(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab + def test_wont_merge_branches_with_autosquash_if_rewriting(self, mocks): + mocklab, api, job = mocks + autosquash_merge_request = dict(mocklab.merge_request_info, squash=True) api.add_merge_request(autosquash_merge_request, from_state='initial') + admin_user = dict(mocklab.user_info, is_admin=True) api.add_user(admin_user, is_current=True) - message = "Sorry, merging requests marked as auto-squash would ruin my commit tagging!" - - for rewriting_opt in ('add_tested', 'add_reviewers'): + if job.opts.requests_commit_tagging: + message = "Sorry, merging requests marked as auto-squash would ruin my commit tagging!" with mocklab.expected_failure(message): - job = self.make_job(marge.job.MergeJobOptions.default(**{rewriting_opt: True})) job.execute() - assert api.state == 'initial' - - with mocklab.branch_update(): - job = self.make_job() + else: job.execute() - assert api.state == 'merged' + assert api.state == 'merged' - @patch('marge.job.log') - def test_waits_for_approvals(self, mock_log, unused_time_sleep): - api, mocklab = self.api, self.mocklab - with mocklab.branch_update(): - job = self.make_job( - marge.job.MergeJobOptions.default(approval_timeout=timedelta(seconds=5), reapprove=True)) - job.execute() + @patch('marge.job.log', autospec=True) + def test_waits_for_approvals(self, mock_log, mocks_factory): + five_secs = timedelta(seconds=5) + _, api, job = mocks_factory( + extra_opts=dict(approval_timeout=five_secs, reapprove=True) + ) + job.execute() mock_log.info.assert_any_call('Checking if approvals have reset') mock_log.debug.assert_any_call('Approvals haven\'t reset yet, sleeping for %s secs', ANY) assert api.state == 'merged' - def test_fails_if_changes_already_exist(self, unused_time_sleep): - api, mocklab = self.api, self.mocklab - expected_message = 'These changes already exist in branch `{}`.'.format( - mocklab.merge_request_info['target_branch'], - ) + def test_fails_if_changes_already_exist(self, mocks): + mocklab, api, job = mocks + + source_project_info = mocklab.forked_project_info or mocklab.project_info + source_project_url = source_project_info['ssh_url_to_repo'] + target_project_url = mocklab.project_info['ssh_url_to_repo'] + remote_source_repo = job.repo.mock_impl.remote_repos[source_project_url] + remote_target_repo = job.repo.mock_impl.remote_repos[target_project_url] + source_branch = mocklab.merge_request_info['source_branch'] + target_branch = mocklab.merge_request_info['target_branch'] + + remote_target_repo.set_ref(target_branch, remote_source_repo.get_ref(source_branch)) + expected_message = 'These changes already exist in branch `%s`.' % target_branch + with mocklab.expected_failure(expected_message): - job = self.make_job() - job.repo.rebase.return_value = mocklab.initial_master_sha - job.repo.get_commit_hash.return_value = mocklab.initial_master_sha job.execute() assert api.state == 'initial' diff --git a/tests/test_store.py b/tests/test_store.py index 20ff3d6e..a9f3a698 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -13,7 +13,7 @@ # pylint: disable=attribute-defined-outside-init @mock.patch('marge.git._run') -class TestRepoManager(object): +class TestRepoManager: def setup_method(self, _method): user = marge.user.User(api=None, info=dict(USER_INFO, name='Peter Parker', email='pparker@bugle.com')) diff --git a/tests/test_user.py b/tests/test_user.py index 1f9eb081..a5bccb9a 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -14,7 +14,7 @@ # pylint: disable=attribute-defined-outside-init -class TestProjectWithUser(object): +class TestProjectWithUser: def setup_method(self, _method): self.api = Mock(Api) diff --git a/version b/version index ee6cdce3..f374f666 100644 --- a/version +++ b/version @@ -1 +1 @@ -0.6.1 +0.9.1