Compare commits

..

102 Commits

Author SHA1 Message Date
0581559783 Bump to the next version 2021-07-06 12:34:15 +02:00
cdd196346a Add label generation checking
Closes https://git.autonomic.zone/coop-cloud/abra/issues/186.
2021-07-06 12:27:06 +02:00
78287fec37 Add change log entry 2021-07-06 11:51:03 +02:00
4b820457de Output diff before committing changes
See https://git.autonomic.zone/coop-cloud/abra/issues/174.
2021-07-06 11:50:09 +02:00
8519cb8661 Appease shellcheck and revert to original quote handling 2021-07-06 11:18:50 +02:00
3c30d3621b Support restarting a service
Closes https://git.autonomic.zone/coop-cloud/abra/issues/200.
2021-07-06 11:12:49 +02:00
76a0badc5a Add new change log entries 2021-07-06 10:53:31 +02:00
b2e0a95a11 Override ARGS and fail correctly
See https://github.com/Coop-Cloud/peertube/issues/1.
2021-07-06 10:53:08 +02:00
ace854e1d7 Don't describe the what here 2021-07-06 10:40:09 +02:00
bd7688f9e7 Do not install deps on CLI upgrade [ci skip] 2021-07-06 00:13:22 +02:00
d7a4c2cebe This has to be already installed [ci skip] 2021-07-06 00:12:02 +02:00
d9a0922b2c Add upgrade note [ci skip] 2021-07-06 00:09:25 +02:00
32a86e0317 Add change log entry 2021-07-06 00:06:39 +02:00
b1c5391a91 Use latest yq 2021-07-06 00:06:07 +02:00
b813f6b90e Drop additional check
This forces the `require yq` to only happen at the start of the
top-level functions which makes more sense and is easier to manage.

Closes https://git.autonomic.zone/coop-cloud/abra/issues/183.
2021-07-06 00:04:19 +02:00
73de76fc04 Remove unused function 2021-07-06 00:04:14 +02:00
5c5cbbf20f Add ASCII radness [ci skip] 2021-07-06 00:01:32 +02:00
19498d9494 Fix typo [ci skip] 2021-07-05 23:58:39 +02:00
6f6a9ab413 Update change log entry 2021-07-05 23:56:37 +02:00
aa81d26d08 Use pwgen/pwqgen if installed
Closes https://git.autonomic.zone/coop-cloud/abra/issues/197.
2021-07-05 23:55:23 +02:00
cc4efe69bf Merge pull request 'Install requirements via install script' (#198) from requirements-install-script into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/198
2021-07-05 23:49:26 +02:00
6c7b53f585 Install requirements via install script
Closes https://git.autonomic.zone/coop-cloud/abra/issues/196.
2021-07-05 23:48:03 +02:00
32bf28e7a9 Follow our usual convention of writing it now 2021-07-05 23:41:45 +02:00
624815e5b1 Use global ignore and avoid this hack 2021-07-05 23:35:48 +02:00
e9fb9e56ad Drop pwgen/pwqgen requirements
Closes https://git.autonomic.zone/coop-cloud/abra/issues/167.
2021-07-04 23:06:20 +02:00
283eb21e29 Add log entry 2021-07-04 22:42:44 +02:00
92f49d56dd Use case insensitive awk/sed
Closes https://git.autonomic.zone/coop-cloud/abra/issues/170.
2021-07-04 22:41:48 +02:00
d9ff48b55b Add change log entry 2021-07-04 22:14:42 +02:00
3d8ce3492e Reflect chaos deploy when selecting recipe version
Closes https://git.autonomic.zone/coop-cloud/abra/issues/185.
2021-07-04 22:13:57 +02:00
07696760b7 Output all elements when debugging 2021-07-04 21:59:47 +02:00
43b4a01f8a Make logging reflect reality + do more debugging
See https://git.autonomic.zone/coop-cloud/abra/issues/193.
2021-07-04 21:51:55 +02:00
bb3b324e07 Add change log entry 2021-07-04 21:45:02 +02:00
eb9d1b883b Ignore this warning for now 2021-07-04 21:44:13 +02:00
6f6140ced2 Merge pull request 'Don't generate commented out secrets. Throw an error when can't put the secret in docker' (#195) from knoflook/abra:main into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/195
2021-07-04 21:18:13 +02:00
cb225908d0 Don't generate commented out secrets. Throw an error when can't put the secret in docker 2021-07-03 19:43:42 +02:00
f2892bad6f Fix that sentence [ci skip] 2021-06-27 21:14:19 +02:00
480b1453ec Add change log entry [ci skip] 2021-06-27 21:13:21 +02:00
0ab2b3a652 Merge pull request 'Make ensure_stack_deployed more reliable' (#177) from improved-stack-deploy-guarantees into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/177
2021-06-27 21:03:48 +02:00
93714a593b ensure_stack_deployed is now somewhat more reliable
Closes https://git.autonomic.zone/coop-cloud/abra/issues/165.
2021-06-27 21:03:24 +02:00
57f3f96bbc Use new name 2021-06-17 21:48:55 +02:00
e1959506c7 Add change log entry [ci skip] 2021-06-17 21:42:33 +02:00
7482362af1 Support logging in via Skopeo
See https://git.autonomic.zone/coop-cloud/auto-apps-json/issues/1.
2021-06-17 21:40:58 +02:00
e8510c8aeb Add change log and --output for app-json.py
Closes https://git.autonomic.zone/coop-cloud/auto-apps-json/issues/2.
2021-06-17 21:25:29 +02:00
4042e10985 Use new image 2021-06-17 21:00:43 +02:00
f7cd0eb54c Use new name 2021-06-17 16:34:03 +02:00
a571b839a8 Use proper jq path
Closes https://git.autonomic.zone/coop-cloud/abra/issues/184.
2021-06-17 09:34:36 +02:00
fae13d9af8 Define our own repos to skip here for mirroring 2021-06-17 07:54:54 +02:00
9c9f7225e7 Use new user/org for mirroring 2021-06-17 07:43:00 +02:00
3wc
352cc0939b Fix typo in missing version error message 2021-06-13 20:48:20 +02:00
2ca7884bbe Fix bump releases
Closes https://git.autonomic.zone/coop-cloud/abra/issues/180.
2021-06-11 00:36:40 +02:00
fa54705f79 Merge pull request 'Prefer --fast for skipping all checks' (#175) from prefer-fast-option into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/175
2021-06-11 00:29:20 +02:00
8d802c78aa Prefer --fast for skipping all checks
Closes https://git.autonomic.zone/coop-cloud/abra/issues/169
2021-06-11 00:27:50 +02:00
1c022fb616 Merge pull request 'Add --bump release logic' (#176) from add-bump-logic into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/176
2021-06-10 12:19:35 +02:00
0655c03434 Add --bump release logic
Closes https://git.autonomic.zone/coop-cloud/abra/issues/173.
2021-06-10 12:18:27 +02:00
f6bdf596f5 Bump to next version 2021-06-10 11:43:00 +02:00
6c6e6808c9 Merge pull request 'Add --chaos flag' (#179) from chaos-deploy-flag into main
Reviewed-on: https://git.autonomic.zone/coop-cloud/abra/pulls/179
2021-06-10 11:41:26 +02:00
a3ffd7f239 Add --chaos flag
Closes https://git.autonomic.zone/coop-cloud/abra/issues/178.
2021-06-10 11:40:25 +02:00
a019417fd2 Avoid PR publishing 2021-06-08 12:27:50 +02:00
743600b94e Use access token for pushing 2021-06-05 22:54:16 +02:00
f4c2da894b Add change log entry [ci skip] 2021-06-05 22:43:50 +02:00
4ef433312d Add mirroring script 2021-06-05 22:41:50 +02:00
9f69532dca Use vendored JQ 2021-06-05 08:57:35 +02:00
3a97358f30 Vendor later versions of jq/yq 2021-06-05 08:51:10 +02:00
1e19805757 Drop trailing slash 2021-06-05 08:51:02 +02:00
389ad9d049 Drop non-existant flag 2021-06-05 08:41:14 +02:00
93ffc633f3 Prepare more things for this image 2021-06-05 08:39:13 +02:00
b61c9410a0 Drop the apps.json in the cwd 2021-06-05 08:26:43 +02:00
bbab900ebc Move apps.json generation stuff out of abra
See https://git.autonomic.zone/coop-cloud/abra/issues/125.
2021-06-05 08:22:01 +02:00
36d4dbc5cf Ignore that new repo 2021-06-05 08:15:11 +02:00
a4ade1463f Copy over app-json script 2021-06-05 08:05:40 +02:00
20af4666c6 Remove and ignore pycache folders 2021-06-05 08:05:29 +02:00
d15b031f33 Ignore those pyc files 2021-06-05 08:04:58 +02:00
a7f0bbde62 Add openssh machinery 2021-06-05 08:03:42 +02:00
76d5a1026a Support HTTPS/SSH cloning 2021-06-05 07:58:24 +02:00
7b0fb50e7f Abstract common functions into a library 2021-06-05 07:55:05 +02:00
f92364af80 Run downstream builds
See https://git.autonomic.zone/coop-cloud/abra/issues/171.
2021-06-05 07:25:41 +02:00
ca2a3c8b58 Add notify failures 2021-06-03 23:31:31 +02:00
a5c5526948 Add log entry [ci skip] 2021-06-03 23:15:08 +02:00
d16eb0e309 Drop force and keep going on non-interctive git stuff 2021-06-03 22:12:47 +02:00
3cff8aaada Better grep and apps folder 2021-06-03 21:52:51 +02:00
4ff4c83154 Use dev all the time 2021-06-03 21:04:58 +02:00
f953743a7c Let the plugin do tagging for us as well 2021-06-03 21:01:39 +02:00
e84062e67c Use success instead (helpful for automation) 2021-06-03 20:58:38 +02:00
e573794367 Skip those repos too 2021-06-03 11:45:30 +02:00
87f9c16db4 Add log entry [ci skip] 2021-06-03 11:41:47 +02:00
9fadc430a7 Add renovate script 2021-06-03 11:40:55 +02:00
53cec2469b Handle forcing re-upload 2021-06-03 10:34:19 +02:00
a1de7f10cb Don't edit git stuff when running non-interactively 2021-06-03 10:17:09 +02:00
ece968478d Add log entry 2021-06-03 10:07:23 +02:00
3759bcd641 Support unattended mode for recipe releasing 2021-06-03 10:06:40 +02:00
0ff08b5d34 Add missing dep and make special place in docs 2021-06-03 09:58:28 +02:00
8b541623ad Add change log entry [ci skip] 2021-06-03 09:55:35 +02:00
f24259dbfc Sort on lines [ci skip] 2021-06-03 09:54:32 +02:00
40259f5e97 Add git also 2021-06-03 09:53:56 +02:00
fd471eb3f1 Install dependencies 2021-06-03 09:52:14 +02:00
a4633f06bd Add note about container [ci skip] 2021-06-03 09:47:46 +02:00
0d6031fef9 Also depend on tests [ci skip] 2021-06-03 09:44:47 +02:00
64d578cf91 Add docker image publishing 2021-06-03 09:43:44 +02:00
e216fe290b Actually use that image as it is required 2021-06-03 09:11:50 +02:00
207278af75 Use same language 2021-06-03 09:09:44 +02:00
ff309182ea Drop kcov/codecov for now, use upstream bats 2021-06-03 09:07:55 +02:00
542cf793d2 Remove app which is gone away now 2021-06-01 00:08:21 +02:00
18 changed files with 995 additions and 2754 deletions

View File

@ -3,47 +3,86 @@ kind: pipeline
name: linters name: linters
steps: steps:
- name: run shellcheck - name: run shellcheck
image: koalaman/shellcheck-alpine:v0.7.1 image: koalaman/shellcheck-alpine
commands: commands:
- shellcheck abra - shellcheck abra
- shellcheck bin/*.sh - shellcheck bin/*.sh
- shellcheck deploy/install.abra.coopcloud.tech/installer
- name: run flake8 - name: run flake8
image: alpine/flake8:3.9.0 image: alpine/flake8
commands: commands:
- flake8 --max-line-length 100 bin/app-json.py - flake8 --max-line-length 100 bin/*.py
- name: run unit tests - name: run unit tests
image: decentral1se/docker-dind-bats-kcov image: decentral1se/docker-dind-bats-kcov
commands: commands:
- bats tests - bats tests
- name: collect code coverage - name: test installation script
failure: ignore # until we fix this image: debian:buster
image: decentral1se/docker-dind-bats-kcov
commands: commands:
- kcov . bats tests || true - apt update && apt install -yqq sudo lsb-release
- deploy/install.abra.coopcloud.tech/installer --no-prompt
- ~/.local/bin/abra version
- name: send code coverage report to codecov - name: publish image
failure: ignore # until we fix this image: plugins/docker
image: plugins/codecov
settings: settings:
auto_tag: true
username: thecoopcloud
password:
from_secret: thecoopcloud_password
repo: thecoopcloud/abra
tags: latest
depends_on:
- run shellcheck
- run flake8
- run unit tests
- test installation script
when:
event:
exclude:
- pull_request
- name: trigger downstream builds
image: plugins/downstream
settings:
server: https://drone.autonomic.zone
token: token:
from_secret: codecov_token from_secret: decentral1se_token
required: true fork: true
repositories:
- coop-cloud/drone-abra
depends_on:
- run shellcheck
- run flake8
- run unit tests
- test installation script
- publish image
when:
event:
exclude:
- pull_request
- name: notify rocket chat - name: notify on failure
image: plugins/slack image: plugins/matrix
settings: settings:
webhook: homeserver: https://matrix.autonomic.zone
from_secret: rc_builds_url roomid: "IFazIpLtxiScqbHqoa:autonomic.zone"
username: comradebritney userid: "@autono-bot:autonomic.zone"
channel: "internal.builds" accesstoken:
template: "{{repo.owner}}/{{repo.name}} build failed: {{build.link}}" from_secret: autono_bot_access_token
depends_on:
- run shellcheck
- run flake8
- run unit tests
- test installation script
- publish image
- trigger downstream builds
when: when:
status: status:
- failure - failure
trigger: trigger:
branch: branch:
- main - main

3
.gitignore vendored
View File

@ -1,2 +1,5 @@
*.json
*.pyc
/.venv /.venv
__pycache__
coverage/ coverage/

View File

@ -9,6 +9,35 @@
# abra x.x.x (UNRELEASED) # abra x.x.x (UNRELEASED)
# abra 10.0.0 (2021-07-06)
- Add `--bump` to `deploy` command to allow packagers to make minor package related releases ([#173](https://git.autonomic.zone/coop-cloud/abra/issues/173))
- Drop `--skip-version-check`/`--no-domain-poll`/`--no-state-poll` in favour of `--fast` ([#169](https://git.autonomic.zone/coop-cloud/abra/issues/169))
- Move `abra` image under the new `thecoopcloud/...` namespace ([#1](https://git.autonomic.zone/coop-cloud/auto-apps-json/issues/1))
- Add a `--output` flag to the `app-json.py` app generator for the CI environment ([#2](https://git.autonomic.zone/coop-cloud/auto-apps-json/issues/2))
- Support logging in as new `thecoopcloud` Docker account via `skopeo` when generating new `apps.json` ([7482362af1](https://git.autonomic.zone/coop-cloud/abra/commit/7482362af1d01cc02828abd45b1222fa643d1f80))
- App deployment checks are somewhat more reliable (see [#193](https://git.autonomic.zone/coop-cloud/abra/issues/193) for remaining work) ([#165](https://git.autonomic.zone/coop-cloud/abra/issues/165))
- Skip generation of commented out secrets and correctly fail deploy when secret generation fails ([#133](https://git.autonomic.zone/coop-cloud/abra/issues/133))
- Fix logging for chaos deploys and recipe selection logic ([#185](https://git.autonomic.zone/coop-cloud/abra/issues/185))
- Improve reliability of selecting when to download a new `apps.json` ([#170](https://git.autonomic.zone/coop-cloud/abra/issues/170))
- Implement `pwgen`/`pwqgen` native fallback for password generation ([#167](https://git.autonomic.zone/coop-cloud/abra/issues/167) / [#197](https://git.autonomic.zone/coop-cloud/abra/issues/197))
- `abra` installer script will now try to install system requirements ([#196](https://git.autonomic.zone/coop-cloud/abra/issues/196))
- Use latest [v4.9.6](https://github.com/mikefarah/yq/releases/tag/v4.9.6) install of `yq` for vendoring (**upgrade HOWTO:** `rm -rf ~/.abra/vendor/*`)
- Support overriding `$ARGS` from `abra.sh` custom commands and error out correctly when these commands fail ([#1](https://github.com/Coop-Cloud/peertube/issues/1))
- Add `abra <app> restart <service>` to support restarting individual services ([#200](https://git.autonomic.zone/coop-cloud/abra/issues/200))
- Output changes when asking to commit during release logic ([3f0a907b5](https://git.autonomic.zone/coop-cloud/rocketchat/commit/3f0a907b57acf960d6befab3d943982d956e2474))
- Add validation for generated output when making new release labels ([#186](https://git.autonomic.zone/coop-cloud/abra/issues/186))
# abra 9.0.0 (2021-06-10)
- Add Docker image for `abra` ([64d578cf91](https://git.autonomic.zone/coop-cloud/abra/commit/64d578cf914bd2bad378ea4ef375747d10b33191))
- Support unattended mode for recipe releasing ([3759bcd641](https://git.autonomic.zone/coop-cloud/abra/commit/3759bcd641cf60611c13927e83425e773d2bb629))
- Add Renovate bot configuraiton script ([9fadc430a7](https://git.autonomic.zone/coop-cloud/abra/commit/9fadc430a7bb2d554c0ee26c0f9b6c51dc5b0475))
- Add release automation via [drone-abra](https://git.autonomic.zone/coop-cloud/drone-abra) ([#56](https://git.autonomic.zone/coop-cloud/organising/issues/56))
- Move `apps.json` generation to [auto-apps-json](https://git.autonomic.zone/coop-cloud/auto-apps-json) ([#125](https://git.autonomic.zone/coop-cloud/abra/issues/125))
- Add Github mirroring script ([4ef433312d](https://git.autonomic.zone/coop-cloud/abra/commit/4ef433312dd0b0ace91b3c285f82f3973093d92d))
- Add `--chaos` flag to deploy (always choose latest Git commit) ([#178](https://git.autonomic.zone/coop-cloud/abra/issues/178))
# abra 8.0.1 (2021-05-31) # abra 8.0.1 (2021-05-31)
- Fix help for `... app ... volume ls` ([efad71c470](https://git.autonomic.zone/coop-cloud/abra/commits/branch/main)) - Fix help for `... app ... volume ls` ([efad71c470](https://git.autonomic.zone/coop-cloud/abra/commits/branch/main))

33
Dockerfile Normal file
View File

@ -0,0 +1,33 @@
FROM alpine:latest
RUN apk add --upgrade --no-cache \
bash \
curl \
git \
grep \
openssh-client \
py3-requests \
skopeo \
util-linux
RUN mkdir -p ~./local/bin
RUN mkdir -p ~/.abra/apps
RUN mkdir -p ~/.abra/vendor
RUN mkdir -p ~/.ssh/
RUN ssh-keyscan -p 2222 git.autonomic.zone > ~/.ssh/known_hosts
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 --output ~/.abra/vendor/jq
RUN chmod +x ~/.abra/vendor/jq
RUN curl -L https://github.com/mikefarah/yq/releases/download/v4.9.3/yq_linux_amd64 --output ~/.abra/vendor/yq
RUN chmod +x ~/.abra/vendor/yq
# Note(decentral1se): it is fine to always use the development branch because
# our Drone CI docker auto-tagger will publish official release tags and
# otherwise give us the latest abra on the latest tag
RUN curl https://install.abra.coopcloud.tech | bash -s -- --dev
COPY bin/* /root/.local/bin/
ENTRYPOINT ["/root/.local/bin/abra"]

View File

@ -1,11 +1,10 @@
# abra # abra
[![Build Status](https://drone.autonomic.zone/api/badges/coop-cloud/abra/status.svg)](https://drone.autonomic.zone/coop-cloud/abra) [![Build Status](https://drone.autonomic.zone/api/badges/coop-cloud/abra/status.svg)](https://drone.autonomic.zone/coop-cloud/abra)
[![codecov](https://codecov.io/gh/Autonomic-Cooperative/abra/branch/main/graph/badge.svg?token=aX3I5NMRsj)](undefined)
> https://coopcloud.tech > https://coopcloud.tech
The cooperative cloud utility belt 🎩🐇 The Co-op Cloud utility belt 🎩🐇
`abra` is a command-line tool for managing your own [Co-op Cloud](https://coopcloud.tech). It can provision new servers, create applications, deploy them, run backup and restore operations and a whole lot of other things. It is the go-to tool for day-to-day operations when managing a Co-op Cloud instance. `abra` is a command-line tool for managing your own [Co-op Cloud](https://coopcloud.tech). It can provision new servers, create applications, deploy them, run backup and restore operations and a whole lot of other things. It is the go-to tool for day-to-day operations when managing a Co-op Cloud instance.
@ -26,29 +25,40 @@ See [CHANGELOG.md](./CHANGELOG.md).
> [docs.coopcloud.tech](https://docs.coopcloud.tech) > [docs.coopcloud.tech](https://docs.coopcloud.tech)
## Install ## Requirements
Requirements:
- `pwqgen` (optional)
- `pwgen` (optional)
- `curl` - `curl`
- `docker` - `docker`
- `bash` >= 4 - `bash` >= 4
## Install
Install the latest stable release: Install the latest stable release:
```sh ```sh
curl https://install.abra.coopcloud.tech | bash curl https://install.abra.coopcloud.tech | bash
``` ```
or the bleeding-edge development version: The source for this script is [here](./deploy/install.abra.coopcloud.tech/installer).
You can pass options to the script like so (e.g. install the bleeding edge development version):
```sh ```sh
curl https://install.abra.coopcloud.tech | bash -s -- --dev curl https://install.abra.coopcloud.tech | bash -s -- --dev
``` ```
The source for this script is [here](./deploy/install.abra.coopcloud.tech/installer). Other options available are as follows:
- **--no-prompt**: non-interactive installation
- **--no-deps**: do not attempt to install [requirements](#requirements)
## Container
An [image](https://hub.docker.com/r/thecoopcloud/abra) is also provided.
```
docker run thecoopcloud/abra app ls
```
## Update ## Update
@ -67,17 +77,6 @@ The command-line interface is generated via [docopt](http://docopt.org/). If you
Please remember to update the [CHANGELOG](./CHANGELOG.md) when you make a change. Please remember to update the [CHANGELOG](./CHANGELOG.md) when you make a change.
## Generating a new apps.json
You'll need to install the following requirements:
- [requests](https://docs.python-requests.org/en/master/) (`apt install python3-requests` / `pip install requests`)
- [skopeo](https://github.com/containers/skopeo) (check [the install docs](https://github.com/containers/skopeo/blob/master/install.md))
- [jq](https://stedolan.github.io/jq/tutorial/) (`sudo apt-get install jq` or see [the install docs](https://stedolan.github.io/jq/download/))
- [yq](https://mikefarah.gitbook.io/yq/) (see [the install docs](https://mikefarah.gitbook.io/yq/#install))
Then run `./bin/app-json.py` ([source](./bin/app-json.py)) and it will spit out the JSON file into [deploy/apps.coopcloud.tech/apps.json](./deploy/apps.coopcloud.tech/apps.json).
## Releasing ## Releasing
### `abra` ### `abra`
@ -91,11 +90,3 @@ Then run `./bin/app-json.py` ([source](./bin/app-json.py)) and it will spit out
- `git push` and `git push --tags` - `git push` and `git push --tags`
- Deploy a new installer script `make release-installer` - Deploy a new installer script `make release-installer`
- Tell the world (CoTech forum, Matrix public channel, Autonomic mastodon, etc.) - Tell the world (CoTech forum, Matrix public channel, Autonomic mastodon, etc.)
### apps.coopcloud.tech
> [apps.coopcloud.tech](https://apps.coopcloud.tech)
```bash
$ make release-apps
```

554
abra
View File

@ -1,9 +1,11 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# shellcheck disable=SC2154
GIT_URL="https://git.autonomic.zone/coop-cloud/" GIT_URL="https://git.autonomic.zone/coop-cloud/"
ABRA_APPS_URL="https://apps.coopcloud.tech" ABRA_APPS_URL="https://apps.coopcloud.tech"
ABRA_DIR="${ABRA_DIR:-$HOME/.abra}" ABRA_DIR="${ABRA_DIR:-$HOME/.abra}"
ABRA_VERSION="8.0.1" ABRA_VERSION="10.0.0"
ABRA_BACKUP_DIR="${ABRA_BACKUP_DIR:-$ABRA_DIR/backups}" ABRA_BACKUP_DIR="${ABRA_BACKUP_DIR:-$ABRA_DIR/backups}"
ABRA_VENDOR_DIR="$ABRA_DIR/vendor" ABRA_VENDOR_DIR="$ABRA_DIR/vendor"
ABRA_APPS_JSON="${ABRA_DIR}/apps.json" ABRA_APPS_JSON="${ABRA_DIR}/apps.json"
@ -13,13 +15,20 @@ ABRA_APPS_JSON="${ABRA_DIR}/apps.json"
####################################### #######################################
DOC=" DOC="
The cooperative cloud utility belt 🎩🐇 The Co-op Cloud utility belt 🎩🐇
____ ____ _ _
/ ___|___ ___ _ __ / ___| | ___ _ _ __| |
| | / _ \ _____ / _ \| '_ \ | | | |/ _ \| | | |/ _' |
| |__| (_) |_____| (_) | |_) | | |___| | (_) | |_| | (_| |
\____\___/ \___/| .__/ \____|_|\___/ \__,_|\__,_|
|_|
Usage: Usage:
abra [options] app (list|ls) [--status] [--server=<server>] [--type=<type>] abra [options] app (list|ls) [--status] [--server=<server>] [--type=<type>]
abra [options] app new [--server=<server>] [--domain=<domain>] [--app-name=<app_name>] [--pass] [--secrets] <type> abra [options] app new [--server=<server>] [--domain=<domain>] [--app-name=<app_name>] [--pass] [--secrets] <type>
abra [options] app <app> backup (<service>|--all) abra [options] app <app> backup (<service>|--all)
abra [options] app <app> deploy [--update] [--force] [--fast] [--skip-version-check] [--no-domain-poll] [--no-state-poll] [<version>] abra [options] app <app> deploy [--update] [--force] [--fast] [--chaos] [<version>]
abra [options] app <app> check abra [options] app <app> check
abra [options] app <app> version abra [options] app <app> version
abra [options] app <app> config abra [options] app <app> config
@ -31,6 +40,7 @@ Usage:
abra [options] app <app> restore <service> [<backup file>] abra [options] app <app> restore <service> [<backup file>]
abra [options] app <app> run [--no-tty] [--user=<user>] <service> <args>... abra [options] app <app> run [--no-tty] [--user=<user>] <service> <args>...
abra [options] app <app> rollback [<version>] abra [options] app <app> rollback [<version>]
abra [options] app <app> restart <service>
abra [options] app <app> secret generate (<secret> <version>|--all) [<cmd>] [--pass] abra [options] app <app> secret generate (<secret> <version>|--all) [<cmd>] [--pass]
abra [options] app <app> secret insert <secret> <version> <data> [--pass] abra [options] app <app> secret insert <secret> <version> <data> [--pass]
abra [options] app <app> secret (rm|delete) (<secret>|--all) [--pass] abra [options] app <app> secret (rm|delete) (<secret>|--all) [--pass]
@ -40,7 +50,7 @@ Usage:
abra [options] app <app> <command> [<args>...] abra [options] app <app> <command> [<args>...]
abra [options] recipe ls abra [options] recipe ls
abra [options] recipe create <recipe> abra [options] recipe create <recipe>
abra [options] recipe <recipe> release [--force] abra [options] recipe <recipe> release [--force] [--bump]
abra [options] recipe <recipe> versions abra [options] recipe <recipe> versions
abra [options] server add <host> [<user>] [<port>] abra [options] server add <host> [<user>] [<port>]
abra [options] server new <provider> -- <args> abra [options] server new <provider> -- <args>
@ -167,38 +177,38 @@ eval "var_$1+=($value)"; else eval "var_$1=$value"; fi; return 0; fi; done
return 1; }; stdout() { printf -- "cat <<'EOM'\n%s\nEOM\n" "$1"; }; stderr() { return 1; }; stdout() { printf -- "cat <<'EOM'\n%s\nEOM\n" "$1"; }; stderr() {
printf -- "cat <<'EOM' >&2\n%s\nEOM\n" "$1"; }; error() { printf -- "cat <<'EOM' >&2\n%s\nEOM\n" "$1"; }; error() {
[[ -n $1 ]] && stderr "$1"; stderr "$usage"; _return 1; }; _return() { [[ -n $1 ]] && stderr "$1"; stderr "$usage"; _return 1; }; _return() {
printf -- "exit %d\n" "$1"; exit "$1"; }; set -e; trimmed_doc=${DOC:1:2492} printf -- "exit %d\n" "$1"; exit "$1"; }; set -e; trimmed_doc=${DOC:1:2824}
usage=${DOC:40:1883}; digest=f774b usage=${DOC:368:1887}; digest=6ab46
shorts=(-e -s -U -h -C -v -d -n -b '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '') shorts=(-n -b -v -U -e -h -d -C -s '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '' '')
longs=(--env --stack --skip-update --help --skip-check --verbose --debug --no-prompt --branch --status --server --type --domain --app-name --pass --secrets --all --update --force --fast --skip-version-check --no-domain-poll --no-state-poll --volumes --no-tty --user --dev) longs=(--no-prompt --branch --verbose --skip-update --env --help --debug --skip-check --stack --status --server --type --domain --app-name --pass --secrets --all --update --force --fast --chaos --volumes --no-tty --user --bump --dev)
argcounts=(1 1 0 0 0 0 0 0 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0); node_0(){ argcounts=(0 1 0 0 1 0 0 0 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0); node_0(){
value __env 0; }; node_1(){ value __stack 1; }; node_2(){ switch __skip_update 2 switch __no_prompt 0; }; node_1(){ value __branch 1; }; node_2(){
}; node_3(){ switch __help 3; }; node_4(){ switch __skip_check 4; }; node_5(){ switch __verbose 2; }; node_3(){ switch __skip_update 3; }; node_4(){
switch __verbose 5; }; node_6(){ switch __debug 6; }; node_7(){ value __env 4; }; node_5(){ switch __help 5; }; node_6(){ switch __debug 6; }
switch __no_prompt 7; }; node_8(){ value __branch 8; }; node_9(){ node_7(){ switch __skip_check 7; }; node_8(){ value __stack 8; }; node_9(){
switch __status 9; }; node_10(){ value __server 10; }; node_11(){ switch __status 9; }; node_10(){ value __server 10; }; node_11(){
value __type 11; }; node_12(){ value __domain 12; }; node_13(){ value __type 11; }; node_12(){ value __domain 12; }; node_13(){
value __app_name 13; }; node_14(){ switch __pass 14; }; node_15(){ value __app_name 13; }; node_14(){ switch __pass 14; }; node_15(){
switch __secrets 15; }; node_16(){ switch __all 16; }; node_17(){ switch __secrets 15; }; node_16(){ switch __all 16; }; node_17(){
switch __update 17; }; node_18(){ switch __force 18; }; node_19(){ switch __update 17; }; node_18(){ switch __force 18; }; node_19(){
switch __fast 19; }; node_20(){ switch __skip_version_check 20; }; node_21(){ switch __fast 19; }; node_20(){ switch __chaos 20; }; node_21(){
switch __no_domain_poll 21; }; node_22(){ switch __no_state_poll 22; } switch __volumes 21; }; node_22(){ switch __no_tty 22; }; node_23(){
node_23(){ switch __volumes 23; }; node_24(){ switch __no_tty 24; }; node_25(){ value __user 23; }; node_24(){ switch __bump 24; }; node_25(){ switch __dev 25
value __user 25; }; node_26(){ switch __dev 26; }; node_27(){ value _type_ a; } }; node_26(){ value _type_ a; }; node_27(){ value _app_ a; }; node_28(){
node_28(){ value _app_ a; }; node_29(){ value _service_ a; }; node_30(){ value _service_ a; }; node_29(){ value _version_ a; }; node_30(){ value _src_ a
value _version_ a; }; node_31(){ value _src_ a; }; node_32(){ value _dst_ a; } }; node_31(){ value _dst_ a; }; node_32(){ value _backup_file_ a; }; node_33(){
node_33(){ value _backup_file_ a; }; node_34(){ value _args_ a true; } value _args_ a true; }; node_34(){ value _secret_ a; }; node_35(){ value _cmd_ a
node_35(){ value _secret_ a; }; node_36(){ value _cmd_ a; }; node_37(){ }; node_36(){ value _data_ a; }; node_37(){ value _volume_ a; }; node_38(){
value _data_ a; }; node_38(){ value _volume_ a; }; node_39(){ value _command_ a value _command_ a; }; node_39(){ value _recipe_ a; }; node_40(){ value _host_ a
}; node_40(){ value _recipe_ a; }; node_41(){ value _host_ a; }; node_42(){ }; node_41(){ value _user_ a; }; node_42(){ value _port_ a; }; node_43(){
value _user_ a; }; node_43(){ value _port_ a; }; node_44(){ value _provider_ a value _provider_ a; }; node_44(){ value _subcommands_ a true; }; node_45(){
}; node_45(){ value _subcommands_ a true; }; node_46(){ _command app; } _command app; }; node_46(){ _command list; }; node_47(){ _command ls; }
node_47(){ _command list; }; node_48(){ _command ls; }; node_49(){ _command new node_48(){ _command new; }; node_49(){ _command backup; }; node_50(){
}; node_50(){ _command backup; }; node_51(){ _command deploy; }; node_52(){ _command deploy; }; node_51(){ _command check; }; node_52(){ _command version; }
_command check; }; node_53(){ _command version; }; node_54(){ _command config; } node_53(){ _command config; }; node_54(){ _command cp; }; node_55(){
node_55(){ _command cp; }; node_56(){ _command logs; }; node_57(){ _command ps _command logs; }; node_56(){ _command ps; }; node_57(){ _command restore; }
}; node_58(){ _command restore; }; node_59(){ _command rm; }; node_60(){ node_58(){ _command rm; }; node_59(){ _command delete; }; node_60(){
_command delete; }; node_61(){ _command run; }; node_62(){ _command rollback; } _command run; }; node_61(){ _command rollback; }; node_62(){ _command restart; }
node_63(){ _command secret; }; node_64(){ _command generate; }; node_65(){ node_63(){ _command secret; }; node_64(){ _command generate; }; node_65(){
_command insert; }; node_66(){ _command undeploy; }; node_67(){ _command volume _command insert; }; node_66(){ _command undeploy; }; node_67(){ _command volume
}; node_68(){ _command recipe; }; node_69(){ _command create; }; node_70(){ }; node_68(){ _command recipe; }; node_69(){ _command create; }; node_70(){
@ -207,90 +217,88 @@ _command release; }; node_71(){ _command versions; }; node_72(){ _command server
_command init; }; node_76(){ _command apps; }; node_77(){ _command upgrade; } _command init; }; node_76(){ _command apps; }; node_77(){ _command upgrade; }
node_78(){ _command doctor; }; node_79(){ _command help; }; node_80(){ node_78(){ _command doctor; }; node_79(){ _command help; }; node_80(){
optional 0 1 2 3 4 5 6 7 8; }; node_81(){ optional 80; }; node_82(){ optional 0 1 2 3 4 5 6 7 8; }; node_81(){ optional 80; }; node_82(){
either 47 48; }; node_83(){ required 82; }; node_84(){ optional 9; }; node_85(){ either 46 47; }; node_83(){ required 82; }; node_84(){ optional 9; }; node_85(){
optional 10; }; node_86(){ optional 11; }; node_87(){ required 81 46 83 84 85 86 optional 10; }; node_86(){ optional 11; }; node_87(){ required 81 45 83 84 85 86
}; node_88(){ optional 12; }; node_89(){ optional 13; }; node_90(){ optional 14 }; node_88(){ optional 12; }; node_89(){ optional 13; }; node_90(){ optional 14
}; node_91(){ optional 15; }; node_92(){ required 81 46 49 85 88 89 90 91 27; } }; node_91(){ optional 15; }; node_92(){ required 81 45 48 85 88 89 90 91 26; }
node_93(){ either 29 16; }; node_94(){ required 93; }; node_95(){ node_93(){ either 28 16; }; node_94(){ required 93; }; node_95(){
required 81 46 28 50 94; }; node_96(){ optional 17; }; node_97(){ optional 18; } required 81 45 27 49 94; }; node_96(){ optional 17; }; node_97(){ optional 18; }
node_98(){ optional 19; }; node_99(){ optional 20; }; node_100(){ optional 21; } node_98(){ optional 19; }; node_99(){ optional 20; }; node_100(){ optional 29; }
node_101(){ optional 22; }; node_102(){ optional 30; }; node_103(){ node_101(){ required 81 45 27 50 96 97 98 99 100; }; node_102(){
required 81 46 28 51 96 97 98 99 100 101 102; }; node_104(){ required 81 45 27 51; }; node_103(){ required 81 45 27 52; }; node_104(){
required 81 46 28 52; }; node_105(){ required 81 46 28 53; }; node_106(){ required 81 45 27 53; }; node_105(){ required 81 45 27 54 30 31; }; node_106(){
required 81 46 28 54; }; node_107(){ required 81 46 28 55 31 32; }; node_108(){ optional 28; }; node_107(){ required 81 45 27 55 106; }; node_108(){
optional 29; }; node_109(){ required 81 46 28 56 108; }; node_110(){ required 81 45 27 56; }; node_109(){ required 81 45 27 57 94; }; node_110(){
required 81 46 28 57; }; node_111(){ required 81 46 28 58 94; }; node_112(){ either 58 59; }; node_111(){ required 110; }; node_112(){ optional 21; }
either 59 60; }; node_113(){ required 112; }; node_114(){ optional 23; } node_113(){ required 81 45 27 111 112 91; }; node_114(){ optional 32; }
node_115(){ required 81 46 28 113 114 91; }; node_116(){ optional 33; } node_115(){ required 81 45 27 57 28 114; }; node_116(){ optional 22; }
node_117(){ required 81 46 28 58 29 116; }; node_118(){ optional 24; } node_117(){ optional 23; }; node_118(){ oneormore 33; }; node_119(){
node_119(){ optional 25; }; node_120(){ oneormore 34; }; node_121(){ required 81 45 27 60 116 117 28 118; }; node_120(){ required 81 45 27 61 100; }
required 81 46 28 61 118 119 29 120; }; node_122(){ required 81 46 28 62 102; } node_121(){ required 81 45 27 62 28; }; node_122(){ required 34 29; }
node_123(){ required 35 30; }; node_124(){ either 123 16; }; node_125(){ node_123(){ either 122 16; }; node_124(){ required 123; }; node_125(){
required 124; }; node_126(){ optional 36; }; node_127(){ optional 35; }; node_126(){ required 81 45 27 63 64 124 125 90; }; node_127(){
required 81 46 28 63 64 125 126 90; }; node_128(){ required 81 45 27 63 65 34 29 36 90; }; node_128(){ either 34 16; }; node_129(){
required 81 46 28 63 65 35 30 37 90; }; node_129(){ either 35 16; }; node_130(){ required 128; }; node_130(){ required 81 45 27 63 111 129 90; }; node_131(){
required 129; }; node_131(){ required 81 46 28 63 113 130 90; }; node_132(){ required 81 45 27 66; }; node_132(){ required 81 45 27 67 47; }; node_133(){
required 81 46 28 66; }; node_133(){ required 81 46 28 67 48; }; node_134(){ either 37 16; }; node_134(){ required 133; }; node_135(){
either 38 16; }; node_135(){ required 134; }; node_136(){ required 81 45 27 67 111 134; }; node_136(){ optional 118; }; node_137(){
required 81 46 28 67 113 135; }; node_137(){ optional 120; }; node_138(){ required 81 45 27 38 136; }; node_138(){ required 81 68 47; }; node_139(){
required 81 46 28 39 137; }; node_139(){ required 81 68 48; }; node_140(){ required 81 68 69 39; }; node_140(){ optional 24; }; node_141(){
required 81 68 69 40; }; node_141(){ required 81 68 40 70 97; }; node_142(){ required 81 68 39 70 97 140; }; node_142(){ required 81 68 39 71; }; node_143(){
required 81 68 40 71; }; node_143(){ optional 42; }; node_144(){ optional 43; } optional 41; }; node_144(){ optional 42; }; node_145(){
node_145(){ required 81 72 73 41 143 144; }; node_146(){ required 81 72 73 40 143 144; }; node_146(){ required 81 72 48 43 74 33; }
required 81 72 49 44 74 34; }; node_147(){ required 81 72 83; }; node_148(){ node_147(){ required 81 72 83; }; node_148(){ required 81 72 40 58; }
required 81 72 41 59; }; node_149(){ required 81 72 41 75; }; node_150(){ node_149(){ required 81 72 40 75; }; node_150(){ required 81 72 40 76 84; }
required 81 72 41 76 84; }; node_151(){ optional 26; }; node_152(){ node_151(){ optional 25; }; node_152(){ required 81 77 151; }; node_153(){
required 81 77 151; }; node_153(){ required 81 53; }; node_154(){ required 81 78 required 81 52; }; node_154(){ required 81 78; }; node_155(){ oneormore 44; }
}; node_155(){ oneormore 45; }; node_156(){ optional 155; }; node_157(){ node_156(){ optional 155; }; node_157(){ required 81 79 156; }; node_158(){
required 81 79 156; }; node_158(){ required 81; }; node_159(){ required 81; }; node_159(){
either 87 92 95 103 104 105 106 107 109 110 111 115 117 121 122 127 128 131 132 133 136 138 139 140 141 142 145 146 147 148 149 150 152 153 154 157 158 either 87 92 95 101 102 103 104 105 107 108 109 113 115 119 120 121 126 127 130 131 132 135 137 138 139 141 142 145 146 147 148 149 150 152 153 154 157 158
}; node_160(){ required 159; }; cat <<<' docopt_exit() { }; node_160(){ required 159; }; cat <<<' docopt_exit() {
[[ -n $1 ]] && printf "%s\n" "$1" >&2; printf "%s\n" "${DOC:40:1883}" >&2 [[ -n $1 ]] && printf "%s\n" "$1" >&2; printf "%s\n" "${DOC:368:1887}" >&2
exit 1; }'; unset var___env var___stack var___skip_update var___help \ exit 1; }'; unset var___no_prompt var___branch var___verbose var___skip_update \
var___skip_check var___verbose var___debug var___no_prompt var___branch \ var___env var___help var___debug var___skip_check var___stack var___status \
var___status var___server var___type var___domain var___app_name var___pass \ var___server var___type var___domain var___app_name var___pass var___secrets \
var___secrets var___all var___update var___force var___fast \ var___all var___update var___force var___fast var___chaos var___volumes \
var___skip_version_check var___no_domain_poll var___no_state_poll \ var___no_tty var___user var___bump var___dev var__type_ var__app_ \
var___volumes var___no_tty var___user var___dev var__type_ var__app_ \
var__service_ var__version_ var__src_ var__dst_ var__backup_file_ var__args_ \ var__service_ var__version_ var__src_ var__dst_ var__backup_file_ var__args_ \
var__secret_ var__cmd_ var__data_ var__volume_ var__command_ var__recipe_ \ var__secret_ var__cmd_ var__data_ var__volume_ var__command_ var__recipe_ \
var__host_ var__user_ var__port_ var__provider_ var__subcommands_ var_app \ var__host_ var__user_ var__port_ var__provider_ var__subcommands_ var_app \
var_list var_ls var_new var_backup var_deploy var_check var_version var_config \ var_list var_ls var_new var_backup var_deploy var_check var_version var_config \
var_cp var_logs var_ps var_restore var_rm var_delete var_run var_rollback \ var_cp var_logs var_ps var_restore var_rm var_delete var_run var_rollback \
var_secret var_generate var_insert var_undeploy var_volume var_recipe \ var_restart var_secret var_generate var_insert var_undeploy var_volume \
var_create var_release var_versions var_server var_add var___ var_init \ var_recipe var_create var_release var_versions var_server var_add var___ \
var_apps var_upgrade var_doctor var_help; parse 160 "$@" var_init var_apps var_upgrade var_doctor var_help; parse 160 "$@"
local prefix=${DOCOPT_PREFIX:-''}; unset "${prefix}__env" "${prefix}__stack" \ local prefix=${DOCOPT_PREFIX:-''}; unset "${prefix}__no_prompt" \
"${prefix}__skip_update" "${prefix}__help" "${prefix}__skip_check" \ "${prefix}__branch" "${prefix}__verbose" "${prefix}__skip_update" \
"${prefix}__verbose" "${prefix}__debug" "${prefix}__no_prompt" \ "${prefix}__env" "${prefix}__help" "${prefix}__debug" "${prefix}__skip_check" \
"${prefix}__branch" "${prefix}__status" "${prefix}__server" "${prefix}__type" \ "${prefix}__stack" "${prefix}__status" "${prefix}__server" "${prefix}__type" \
"${prefix}__domain" "${prefix}__app_name" "${prefix}__pass" \ "${prefix}__domain" "${prefix}__app_name" "${prefix}__pass" \
"${prefix}__secrets" "${prefix}__all" "${prefix}__update" "${prefix}__force" \ "${prefix}__secrets" "${prefix}__all" "${prefix}__update" "${prefix}__force" \
"${prefix}__fast" "${prefix}__skip_version_check" "${prefix}__no_domain_poll" \ "${prefix}__fast" "${prefix}__chaos" "${prefix}__volumes" "${prefix}__no_tty" \
"${prefix}__no_state_poll" "${prefix}__volumes" "${prefix}__no_tty" \ "${prefix}__user" "${prefix}__bump" "${prefix}__dev" "${prefix}_type_" \
"${prefix}__user" "${prefix}__dev" "${prefix}_type_" "${prefix}_app_" \ "${prefix}_app_" "${prefix}_service_" "${prefix}_version_" "${prefix}_src_" \
"${prefix}_service_" "${prefix}_version_" "${prefix}_src_" "${prefix}_dst_" \ "${prefix}_dst_" "${prefix}_backup_file_" "${prefix}_args_" \
"${prefix}_backup_file_" "${prefix}_args_" "${prefix}_secret_" \ "${prefix}_secret_" "${prefix}_cmd_" "${prefix}_data_" "${prefix}_volume_" \
"${prefix}_cmd_" "${prefix}_data_" "${prefix}_volume_" "${prefix}_command_" \ "${prefix}_command_" "${prefix}_recipe_" "${prefix}_host_" "${prefix}_user_" \
"${prefix}_recipe_" "${prefix}_host_" "${prefix}_user_" "${prefix}_port_" \ "${prefix}_port_" "${prefix}_provider_" "${prefix}_subcommands_" \
"${prefix}_provider_" "${prefix}_subcommands_" "${prefix}app" "${prefix}list" \ "${prefix}app" "${prefix}list" "${prefix}ls" "${prefix}new" "${prefix}backup" \
"${prefix}ls" "${prefix}new" "${prefix}backup" "${prefix}deploy" \ "${prefix}deploy" "${prefix}check" "${prefix}version" "${prefix}config" \
"${prefix}check" "${prefix}version" "${prefix}config" "${prefix}cp" \ "${prefix}cp" "${prefix}logs" "${prefix}ps" "${prefix}restore" "${prefix}rm" \
"${prefix}logs" "${prefix}ps" "${prefix}restore" "${prefix}rm" \ "${prefix}delete" "${prefix}run" "${prefix}rollback" "${prefix}restart" \
"${prefix}delete" "${prefix}run" "${prefix}rollback" "${prefix}secret" \ "${prefix}secret" "${prefix}generate" "${prefix}insert" "${prefix}undeploy" \
"${prefix}generate" "${prefix}insert" "${prefix}undeploy" "${prefix}volume" \ "${prefix}volume" "${prefix}recipe" "${prefix}create" "${prefix}release" \
"${prefix}recipe" "${prefix}create" "${prefix}release" "${prefix}versions" \ "${prefix}versions" "${prefix}server" "${prefix}add" "${prefix}__" \
"${prefix}server" "${prefix}add" "${prefix}__" "${prefix}init" "${prefix}apps" \ "${prefix}init" "${prefix}apps" "${prefix}upgrade" "${prefix}doctor" \
"${prefix}upgrade" "${prefix}doctor" "${prefix}help" "${prefix}help"; eval "${prefix}"'__no_prompt=${var___no_prompt:-false}'
eval "${prefix}"'__env=${var___env:-}'
eval "${prefix}"'__stack=${var___stack:-}'
eval "${prefix}"'__skip_update=${var___skip_update:-false}'
eval "${prefix}"'__help=${var___help:-false}'
eval "${prefix}"'__skip_check=${var___skip_check:-false}'
eval "${prefix}"'__verbose=${var___verbose:-false}'
eval "${prefix}"'__debug=${var___debug:-false}'
eval "${prefix}"'__no_prompt=${var___no_prompt:-false}'
eval "${prefix}"'__branch=${var___branch:-}' eval "${prefix}"'__branch=${var___branch:-}'
eval "${prefix}"'__verbose=${var___verbose:-false}'
eval "${prefix}"'__skip_update=${var___skip_update:-false}'
eval "${prefix}"'__env=${var___env:-}'
eval "${prefix}"'__help=${var___help:-false}'
eval "${prefix}"'__debug=${var___debug:-false}'
eval "${prefix}"'__skip_check=${var___skip_check:-false}'
eval "${prefix}"'__stack=${var___stack:-}'
eval "${prefix}"'__status=${var___status:-false}' eval "${prefix}"'__status=${var___status:-false}'
eval "${prefix}"'__server=${var___server:-}' eval "${prefix}"'__server=${var___server:-}'
eval "${prefix}"'__type=${var___type:-}' eval "${prefix}"'__type=${var___type:-}'
@ -302,12 +310,11 @@ eval "${prefix}"'__all=${var___all:-false}'
eval "${prefix}"'__update=${var___update:-false}' eval "${prefix}"'__update=${var___update:-false}'
eval "${prefix}"'__force=${var___force:-false}' eval "${prefix}"'__force=${var___force:-false}'
eval "${prefix}"'__fast=${var___fast:-false}' eval "${prefix}"'__fast=${var___fast:-false}'
eval "${prefix}"'__skip_version_check=${var___skip_version_check:-false}' eval "${prefix}"'__chaos=${var___chaos:-false}'
eval "${prefix}"'__no_domain_poll=${var___no_domain_poll:-false}'
eval "${prefix}"'__no_state_poll=${var___no_state_poll:-false}'
eval "${prefix}"'__volumes=${var___volumes:-false}' eval "${prefix}"'__volumes=${var___volumes:-false}'
eval "${prefix}"'__no_tty=${var___no_tty:-false}' eval "${prefix}"'__no_tty=${var___no_tty:-false}'
eval "${prefix}"'__user=${var___user:-}' eval "${prefix}"'__user=${var___user:-}'
eval "${prefix}"'__bump=${var___bump:-false}'
eval "${prefix}"'__dev=${var___dev:-false}' eval "${prefix}"'__dev=${var___dev:-false}'
eval "${prefix}"'_type_=${var__type_:-}'; eval "${prefix}"'_app_=${var__app_:-}' eval "${prefix}"'_type_=${var__type_:-}'; eval "${prefix}"'_app_=${var__app_:-}'
eval "${prefix}"'_service_=${var__service_:-}' eval "${prefix}"'_service_=${var__service_:-}'
@ -342,6 +349,7 @@ eval "${prefix}"'rm=${var_rm:-false}'
eval "${prefix}"'delete=${var_delete:-false}' eval "${prefix}"'delete=${var_delete:-false}'
eval "${prefix}"'run=${var_run:-false}' eval "${prefix}"'run=${var_run:-false}'
eval "${prefix}"'rollback=${var_rollback:-false}' eval "${prefix}"'rollback=${var_rollback:-false}'
eval "${prefix}"'restart=${var_restart:-false}'
eval "${prefix}"'secret=${var_secret:-false}' eval "${prefix}"'secret=${var_secret:-false}'
eval "${prefix}"'generate=${var_generate:-false}' eval "${prefix}"'generate=${var_generate:-false}'
eval "${prefix}"'insert=${var_insert:-false}' eval "${prefix}"'insert=${var_insert:-false}'
@ -359,16 +367,15 @@ eval "${prefix}"'upgrade=${var_upgrade:-false}'
eval "${prefix}"'doctor=${var_doctor:-false}' eval "${prefix}"'doctor=${var_doctor:-false}'
eval "${prefix}"'help=${var_help:-false}'; local docopt_i=1 eval "${prefix}"'help=${var_help:-false}'; local docopt_i=1
[[ $BASH_VERSION =~ ^4.3 ]] && docopt_i=2; for ((;docopt_i>0;docopt_i--)); do [[ $BASH_VERSION =~ ^4.3 ]] && docopt_i=2; for ((;docopt_i>0;docopt_i--)); do
declare -p "${prefix}__env" "${prefix}__stack" "${prefix}__skip_update" \ declare -p "${prefix}__no_prompt" "${prefix}__branch" "${prefix}__verbose" \
"${prefix}__help" "${prefix}__skip_check" "${prefix}__verbose" \ "${prefix}__skip_update" "${prefix}__env" "${prefix}__help" "${prefix}__debug" \
"${prefix}__debug" "${prefix}__no_prompt" "${prefix}__branch" \ "${prefix}__skip_check" "${prefix}__stack" "${prefix}__status" \
"${prefix}__status" "${prefix}__server" "${prefix}__type" "${prefix}__domain" \ "${prefix}__server" "${prefix}__type" "${prefix}__domain" \
"${prefix}__app_name" "${prefix}__pass" "${prefix}__secrets" "${prefix}__all" \ "${prefix}__app_name" "${prefix}__pass" "${prefix}__secrets" "${prefix}__all" \
"${prefix}__update" "${prefix}__force" "${prefix}__fast" \ "${prefix}__update" "${prefix}__force" "${prefix}__fast" "${prefix}__chaos" \
"${prefix}__skip_version_check" "${prefix}__no_domain_poll" \ "${prefix}__volumes" "${prefix}__no_tty" "${prefix}__user" "${prefix}__bump" \
"${prefix}__no_state_poll" "${prefix}__volumes" "${prefix}__no_tty" \ "${prefix}__dev" "${prefix}_type_" "${prefix}_app_" "${prefix}_service_" \
"${prefix}__user" "${prefix}__dev" "${prefix}_type_" "${prefix}_app_" \ "${prefix}_version_" "${prefix}_src_" "${prefix}_dst_" \
"${prefix}_service_" "${prefix}_version_" "${prefix}_src_" "${prefix}_dst_" \
"${prefix}_backup_file_" "${prefix}_args_" "${prefix}_secret_" \ "${prefix}_backup_file_" "${prefix}_args_" "${prefix}_secret_" \
"${prefix}_cmd_" "${prefix}_data_" "${prefix}_volume_" "${prefix}_command_" \ "${prefix}_cmd_" "${prefix}_data_" "${prefix}_volume_" "${prefix}_command_" \
"${prefix}_recipe_" "${prefix}_host_" "${prefix}_user_" "${prefix}_port_" \ "${prefix}_recipe_" "${prefix}_host_" "${prefix}_user_" "${prefix}_port_" \
@ -376,11 +383,12 @@ declare -p "${prefix}__env" "${prefix}__stack" "${prefix}__skip_update" \
"${prefix}ls" "${prefix}new" "${prefix}backup" "${prefix}deploy" \ "${prefix}ls" "${prefix}new" "${prefix}backup" "${prefix}deploy" \
"${prefix}check" "${prefix}version" "${prefix}config" "${prefix}cp" \ "${prefix}check" "${prefix}version" "${prefix}config" "${prefix}cp" \
"${prefix}logs" "${prefix}ps" "${prefix}restore" "${prefix}rm" \ "${prefix}logs" "${prefix}ps" "${prefix}restore" "${prefix}rm" \
"${prefix}delete" "${prefix}run" "${prefix}rollback" "${prefix}secret" \ "${prefix}delete" "${prefix}run" "${prefix}rollback" "${prefix}restart" \
"${prefix}generate" "${prefix}insert" "${prefix}undeploy" "${prefix}volume" \ "${prefix}secret" "${prefix}generate" "${prefix}insert" "${prefix}undeploy" \
"${prefix}recipe" "${prefix}create" "${prefix}release" "${prefix}versions" \ "${prefix}volume" "${prefix}recipe" "${prefix}create" "${prefix}release" \
"${prefix}server" "${prefix}add" "${prefix}__" "${prefix}init" "${prefix}apps" \ "${prefix}versions" "${prefix}server" "${prefix}add" "${prefix}__" \
"${prefix}upgrade" "${prefix}doctor" "${prefix}help"; done; } "${prefix}init" "${prefix}apps" "${prefix}upgrade" "${prefix}doctor" \
"${prefix}help"; done; }
# docopt parser above, complete command for generating this parser is `docopt.sh abra` # docopt parser above, complete command for generating this parser is `docopt.sh abra`
PROGRAM_NAME=$(basename "$0") PROGRAM_NAME=$(basename "$0")
@ -492,13 +500,7 @@ require_docker_version() {
done done
} }
require_valid_json() {
require_jq
$JQ "$1" > /dev/null || error "Invalid JSON '$1'"
}
require_valid_yaml() { require_valid_yaml() {
require_yq
$YQ e "$1" > /dev/null || error "Invalid YAML '$1'" $YQ e "$1" > /dev/null || error "Invalid YAML '$1'"
} }
@ -513,8 +515,8 @@ require_apps_json() {
if [ -f "$ABRA_APPS_JSON" ]; then if [ -f "$ABRA_APPS_JSON" ]; then
modified=$(curl --silent --head "$ABRA_APPS_URL" | \ modified=$(curl --silent --head "$ABRA_APPS_URL" | \
awk '/^Last-Modified/{print $0}' | \ awk '/^last-modified/{print tolower($0)}' | \
sed 's/^Last-Modified: //') sed 's/^last-modified: //I')
remote_ctime=$(date --date="$modified" +%s) remote_ctime=$(date --date="$modified" +%s)
local_ctime=$(stat -c %Z "$ABRA_APPS_JSON") local_ctime=$(stat -c %Z "$ABRA_APPS_JSON")
@ -604,7 +606,11 @@ require_app_version() {
if [ -z "$VERSION" ]; then if [ -z "$VERSION" ]; then
warning "No version specified, dangerously using latest git 😨" warning "No version specified, dangerously using latest git 😨"
else else
git checkout -q "$VERSION" || error "Can't find version $VERSION" if [ "$abra___chaos" = "false" ]; then
git checkout -q "$VERSION" || error "Can't find version $VERSION"
else
warning "Chaos deploy specified, dangerously using latest git 😨"
fi
fi fi
} }
@ -646,13 +652,31 @@ require_jq() {
} }
require_yq() { require_yq() {
vendor_binary "https://github.com/mikefarah/yq" "v4.6.1" "yq_linux_amd64" vendor_binary "https://github.com/mikefarah/yq" "v4.9.6" "yq_linux_amd64"
} }
checkout_main_or_master() { checkout_main_or_master() {
git checkout main > /dev/null 2>&1 || git checkout master > /dev/null 2>&1 git checkout main > /dev/null 2>&1 || git checkout master > /dev/null 2>&1
} }
pwgen_native() {
if type pwgen > /dev/null 2>&1; then
pwgen -s "$length" 1
return
fi
tr -dc 'a-zA-Z0-9' < /dev/urandom | head -c "$1"
}
pwqgen_native() {
if type pwqgen > /dev/null 2>&1; then
pwqgen
return
fi
shuf -n 3 /usr/share/dict/words | tr -dc 'a-zA-Z0-9' | tr -d '\n'
}
# FIXME 3wc: update or remove # FIXME 3wc: update or remove
if [ -z "$ABRA_ENV" ] && [ -f .env ] && type direnv > /dev/null 2>&1 && ! direnv status | grep -q 'Found RC allowed true'; then if [ -z "$ABRA_ENV" ] && [ -f .env ] && type direnv > /dev/null 2>&1 && ! direnv status | grep -q 'Found RC allowed true'; then
error "direnv is blocked, run direnv allow" error "direnv is blocked, run direnv allow"
@ -686,7 +710,11 @@ get_recipe_version_latest() {
info "No versions found" info "No versions found"
else else
VERSION="${RECIPE_VERSIONS[-1]}" VERSION="${RECIPE_VERSIONS[-1]}"
info "Chose version $VERSION" if [ "$abra___chaos" = "true" ]; then
info "Not choosing a version and instead deploying from latest commit"
else
info "Chose version $VERSION"
fi
fi fi
} }
@ -713,6 +741,7 @@ output_version_summary() {
CONSENT_TO_UPDATE=$abra___update CONSENT_TO_UPDATE=$abra___update
FORCE_DEPLOY=$abra___force FORCE_DEPLOY=$abra___force
CHAOS_DEPLOY=$abra___chaos
local -a IS_AN_UPDATE="false" local -a IS_AN_UPDATE="false"
local -a UNABLE_TO_DETECT="false" local -a UNABLE_TO_DETECT="false"
@ -754,7 +783,11 @@ output_version_summary() {
if [ "$live_version" != "$service_tag" ] || [ "$live_digest" != "$service_digest" ]; then if [ "$live_version" != "$service_tag" ] || [ "$live_digest" != "$service_digest" ]; then
IS_AN_UPDATE="true" IS_AN_UPDATE="true"
fi fi
echo " to be deployed: $(tput setaf 1)$service_tag ($service_digest)$(tput sgr0)" if [ "$abra___chaos" = "true" ]; then
echo " to be deployed: $(tput setaf 1)$service_tag ($service_digest) (+ latest git)$(tput sgr0)"
else
echo " to be deployed: $(tput setaf 1)$service_tag ($service_digest)$(tput sgr0)"
fi
fi fi
else else
if [[ $UNDEPLOYED_STATE == "true" ]]; then if [[ $UNDEPLOYED_STATE == "true" ]]; then
@ -780,56 +813,64 @@ output_version_summary() {
else else
if [[ $UNABLE_TO_DETECT == "false" ]] && \ if [[ $UNABLE_TO_DETECT == "false" ]] && \
[[ $UNDEPLOYED_STATE == "false" ]] && \ [[ $UNDEPLOYED_STATE == "false" ]] && \
[[ $FORCE_DEPLOY == "false" ]]; then [[ $FORCE_DEPLOY == "false" ]] && \
[[ $CHAOS_DEPLOY = "false" ]]; then
success "Nothing to deploy, you're on latest (use --force to re-deploy anyway)" success "Nothing to deploy, you're on latest (use --force to re-deploy anyway)"
exit 0 exit 0
fi fi
fi fi
} }
# Note(decentral1se): inspired by https://github.com/vitalets/docker-stack-wait-deploy
ensure_stack_deployed() { ensure_stack_deployed() {
STACK_NAME=$1 local -a HEALTHY # mapping
local -a MISSING # mapping
warning "Polling deploy state to check for success" TIMEOUT=60
idx=0
while true; do IFS=' ' read -r -a SERVICES <<< "$(docker stack services "${STACK_NAME}" --format "{{.ID}}" | tr '\n' ' ')"
all_services_done=1 debug "Considering the following service IDs: ${SERVICES[*]} for ${STACK_NAME} deployment"
has_errors=0
service_ids=$(docker stack services -q "$STACK_NAME") while [ ! $(( ${#HEALTHY[@]} + ${#MISSING[@]} )) -eq ${#SERVICES[@]} ]; do
for service in $(docker ps -f "name=$STACK_NAME" -q); do
debug "Polling $service for deployment status"
for service_id in $service_ids; do healthcheck=$(docker inspect --format "{{ json .State }}" "$service" | jq "try(.Health.Status // \"missing\")")
# see: https://github.com/moby/moby/issues/28012 name=$(docker inspect --format '{{ index .Config.Labels "com.docker.swarm.service.name" }}' "$service")
service_state=$(docker service inspect --format "{{if .UpdateStatus}}{{.UpdateStatus.State}}{{else}}created{{end}}" "$service_id")
debug "$service_id has state: $service_state" if [[ ${MISSING[*]} =~ ${name} ]] || [[ ${HEALTHY[*]} =~ ${name} ]]; then
debug "$name already marked as missing healthcheck / healthy status"
continue
fi
case "$service_state" in if [[ "$healthcheck" == "\"missing\"" ]] && [[ ! "${MISSING[*]}" =~ $name ]]; then
created|completed) MISSING+=("$name")
;; debug "No healthcheck configured for $name"
paused|rollback_completed) continue
has_errors=1 fi
;;
*) if [[ "$healthcheck" == "\"healthy\"" ]] && [[ ! "${HEALTHY[*]}" =~ $name ]]; then
all_services_done=0 HEALTHY+=("$name")
;; debug "Marking $name with healthy status"
esac continue
fi
if [[ "$healthcheck" == \""unhealthy"\" ]]; then
logs=$(docker inspect --format "{{ json .State.Health.Log }}" "$service")
exitcode="$(echo "$logs" | $JQ '.[-1] | .ExitCode')"
warning "Healthcheck for new instance of $name is failing (exit code: $exitcode)"
warning "$(echo "$logs" | $JQ -r '.[-1] | .Output')"
error "healthcheck for $name is failing, this deployment did not succeed :("
fi
done done
if [ "$all_services_done" == "1" ]; then idx=$(("$idx" + 1))
if [ "$has_errors" == "1" ]; then if [[ $idx -eq "$TIMEOUT" ]]; then
warning "Deployment appears to have failed" error "Waiting for healthy status timed out, this deployment did not succeed :("
warning "Run \"abra app ${STACK_NAME} logs \" to see app logs"
warning "Run \"abra app ${STACK_NAME} ps \" to see app status"
break
else
warning "Deployment appears to have suceeded"
break
fi
else
sleep 1
fi fi
sleep 1
debug "Deploying: $(( ${#HEALTHY[@]} + ${#MISSING[@]} ))/${#SERVICES[@]} (timeout: $idx/$TIMEOUT)"
done done
} }
@ -858,14 +899,14 @@ get_servers() {
get_app_secrets() { get_app_secrets() {
# FIXME 3wc: requires bash 4, use for loop instead # FIXME 3wc: requires bash 4, use for loop instead
mapfile -t PASSWORDS < <(grep "SECRET.*VERSION.*" "$ENV_FILE") mapfile -t PASSWORDS < <(grep "^SECRET.*VERSION.*" "$ENV_FILE")
} }
load_instance() { load_instance() {
APP="$abra__app_" APP="$abra__app_"
# load all files matching "$APP.env" into ENV_FILES array # load all files matching "$APP.env" into ENV_FILES array
mapfile -t ENV_FILES < <(find -L "$ABRA_DIR" -name "$APP.env") mapfile -t ENV_FILES < <(find -L "$ABRA_DIR/servers/" -name "$APP.env")
# FIXME 3wc: requires bash 4, use for loop instead # FIXME 3wc: requires bash 4, use for loop instead
case "${#ENV_FILES[@]}" in case "${#ENV_FILES[@]}" in
@ -1304,17 +1345,15 @@ _abra_backup_mysql() {
###### .. app deploy ###### .. app deploy
help_app_deploy (){ help_app_deploy (){
echo "abra [options] app <app> deploy [--update] [--force] [--fast] [--skip-version-check] [--no-domain-poll] [--no-state-poll] echo "abra [options] app <app> deploy [--update] [--force] [--fast]
Deploy app <app> to the configured server. Deploy app <app> to the configured server.
OPTIONS OPTIONS
--update Consent to deploying an updated app version --update Consent to deploying an updated app version
--force Force a deployment regardless of state --force Force a deployment regardless of state
--skip-version-check Don't try and detect deployed version --fast Run deployment without various safety checks
--no-domain-poll Don't wait for the configured domain to come up --chaos Deploy straight from latest Git version (potentially chaotic!)
--no-state-poll Don't watch deployment state for success/failure
--fast Alias for --skip-version-check --no-domain-poll
POWERED BY POWERED BY
docker stack deploy -c compose.yml <app>" docker stack deploy -c compose.yml <app>"
@ -1328,9 +1367,9 @@ sub_app_deploy (){
NO_DOMAIN_POLL=true NO_DOMAIN_POLL=true
NO_STATE_POLL=true NO_STATE_POLL=true
else else
SKIP_VERSION_CHECK=$abra___skip_version_check SKIP_VERSION_CHECK=false
NO_DOMAIN_POLL=$abra___no_domain_poll NO_DOMAIN_POLL=false
NO_STATE_POLL=$abra___no_state_poll NO_STATE_POLL=false
fi fi
if [ ! "$abra__version_" = "dev" ]; then if [ ! "$abra__version_" = "dev" ]; then
@ -1339,7 +1378,7 @@ sub_app_deploy (){
if [ -n "$abra__version_" ]; then if [ -n "$abra__version_" ]; then
VERSION="$abra__version_" VERSION="$abra__version_"
if ! printf '%s\0' "${RECIPE_VERSIONS[@]}" | grep -Fqxz -- "$VERSION"; then if ! printf '%s\0' "${RECIPE_VERSIONS[@]}" | grep -Fqxz -- "$VERSION"; then
error "'$version' doesn't appear to be a valid version of $TYPE" error "'$VERSION' doesn't appear to be a valid version of $TYPE"
fi fi
info "Chose version $VERSION" info "Chose version $VERSION"
else else
@ -1595,6 +1634,9 @@ sub_app_secret_insert() {
# shellcheck disable=SC2059 # shellcheck disable=SC2059
printf "$PW" | docker secret create "${STACK_NAME}_${SECRET}_${VERSION}" - > /dev/null printf "$PW" | docker secret create "${STACK_NAME}_${SECRET}_${VERSION}" - > /dev/null
# shellcheck disable=SC2181
if [[ $? != 0 ]]; then exit 1; fi
if [ "$STORE_WITH_PASS" == "true" ] && type pass > /dev/null 2>&1; then if [ "$STORE_WITH_PASS" == "true" ] && type pass > /dev/null 2>&1; then
echo "$PW" | pass insert "hosts/$DOCKER_CONTEXT/${STACK_NAME}/${SECRET}" -m > /dev/null echo "$PW" | pass insert "hosts/$DOCKER_CONTEXT/${STACK_NAME}/${SECRET}" -m > /dev/null
success "pass: hosts/$DOCKER_CONTEXT/${STACK_NAME}/${SECRET}" success "pass: hosts/$DOCKER_CONTEXT/${STACK_NAME}/${SECRET}"
@ -1689,11 +1731,9 @@ sub_app_secret_generate(){
fi fi
if [[ -n "$length" ]]; then if [[ -n "$length" ]]; then
require_binary pwgen abra__cmd_="pwgen_native $length"
abra__cmd_="pwgen -s $length 1"
else else
require_binary pwqgen abra__cmd_=pwqgen_native
abra__cmd_=pwqgen
fi fi
PWGEN=${abra__cmd_} PWGEN=${abra__cmd_}
@ -1703,7 +1743,7 @@ sub_app_secret_generate(){
error "Required arguments missing" error "Required arguments missing"
fi fi
PW=$($PWGEN|tr -d "\n") PW=$($PWGEN)
success "Password: $PW" success "Password: $PW"
@ -1804,10 +1844,12 @@ sub_app_run(){
RUN_USER="-u $abra___user" RUN_USER="-u $abra___user"
fi fi
local -a ARGS="${ARGS:-""}"
if [ "$abra___no_tty" = "true" ]; then if [ "$abra___no_tty" = "true" ]; then
ARGS="-i" ARGS+=" -i"
else else
ARGS="-it" ARGS+=" -it"
fi fi
CONTAINER=$(docker container ls --format "table {{.ID}},{{.Names}}" \ CONTAINER=$(docker container ls --format "table {{.ID}},{{.Names}}" \
@ -1819,11 +1861,14 @@ sub_app_run(){
fi fi
debug "Using container ID ${CONTAINER}" debug "Using container ID ${CONTAINER}"
# shellcheck disable=SC2145
debug "Running command: docker exec $RUN_USER $ARGS $CONTAINER $@"
exit
# 3wc: we want the "splitting" that shellcheck warns us about, so that -u and
# $RUN_USER aren't treated as a single argument:
# shellcheck disable=SC2086 # shellcheck disable=SC2086
docker exec $RUN_USER $ARGS "$CONTAINER" "$@" if ! docker exec $RUN_USER $ARGS "$CONTAINER" "$@"; then
exit 1
fi
return return
} }
@ -1867,6 +1912,31 @@ sub_app_rollback(){
sub_app_deploy sub_app_deploy
} }
###### .. app restart
help_app_restart (){
echo "abra [options] app <app> restart <service>
Restart a service.
EXAMPLES
abra app peertube restart app
POWERED BY
docker service scale foo=bar"
}
sub_app_restart(){
SERVICE="${abra__service_}"
debug "Scaling ${STACK_NAME}_${SERVICE} to 0 running containers..."
docker service scale "${STACK_NAME}_${SERVICE}=0"
debug "Scaling ${STACK_NAME}_${SERVICE} back to 1 running container..."
docker service scale "${STACK_NAME}_${SERVICE}=1"
success "${STACK_NAME}_${SERVICE} was succesfully restarted!"
}
###### .. app logs ###### .. app logs
help_app_logs (){ help_app_logs (){
echo "abra [options] app <app> logs [<service>] echo "abra [options] app <app> logs [<service>]
@ -2053,7 +2123,7 @@ sub_recipe_versions() {
###### .. recipe <recipe> release ###### .. recipe <recipe> release
help_recipe_release() { help_recipe_release() {
echo "abra [options] recipe <recipe> release echo "abra [options] recipe <recipe> release [--force] [--bump]
(For recipe maintainers) (For recipe maintainers)
@ -2066,6 +2136,7 @@ any of the images in <recipe>.
OPTIONS OPTIONS
--force Over-write existing tag; use this if you have a git tag for the --force Over-write existing tag; use this if you have a git tag for the
recipe version already, to make sure labels are in sync. recipe version already, to make sure labels are in sync.
--bump Make an n+1 release (packager specific changes to recipe)
POWERED BY POWERED BY
skopeo inspect docker://image:tag skopeo inspect docker://image:tag
@ -2080,8 +2151,13 @@ sub_recipe_release() {
recipe="$abra__recipe_" recipe="$abra__recipe_"
force="$abra___force" force="$abra___force"
bump="$abra___bump"
recipe_dir="$ABRA_DIR/apps/$recipe" recipe_dir="$ABRA_DIR/apps/$recipe"
if [ "$bump" = "true" ] && [ "$force" = "true" ]; then
error "--bump and --force don't play nicely together"
fi
cd "$recipe_dir" || error "Can't find recipe dir '$recipe_dir'" cd "$recipe_dir" || error "Can't find recipe dir '$recipe_dir'"
get_recipe_versions "$recipe" get_recipe_versions "$recipe"
@ -2094,22 +2170,29 @@ sub_recipe_release() {
latest_version_message=$(git show -s --format=%s) latest_version_message=$(git show -s --format=%s)
fi fi
info "Latest available version: '$latest_version'" info "Latest available version: '$latest_version'"
info "Latest verion message: '$latest_version_message'" info "Latest version message: '$latest_version_message'"
else else
latest_version="" latest_version=""
latest_version_message="Initial tagged release" latest_version_message="Initial tagged release"
info "No previous releases found" info "No previous releases found"
if [ "$bump" = "true" ]; then
error "--bump can't do its work when there are no existing release versions"
fi
fi fi
current_tag=$(git tag --points-at HEAD) current_tag=$(git tag --points-at HEAD)
if [ "$force" = "false" ] && [ -n "$current_tag" ]; then if [ "$force" = "false" ] && [ -n "$current_tag" ] && [ "$bump" = "false" ]; then
error "$recipe is already on $current_tag, no release needed" success "$recipe is already on $current_tag, no release needed"
fi fi
if [ "$(git rev-parse --abbrev-ref --symbolic-full-name HEAD)" = "HEAD" ]; then if [ "$(git rev-parse --abbrev-ref --symbolic-full-name HEAD)" = "HEAD" ]; then
warning "It looks like $recipe_dir is in 'detached HEAD' state" warning "It looks like $recipe_dir is in 'detached HEAD' state"
read -rp "Check out main/master branch first? [Y/n] " if [ "$abra___no_prompt" = "false" ]; then
if [ "${choice,,}" != "n" ]; then read -rp "Check out main/master branch first? [Y/n] "
if [ "${choice,,}" != "n" ]; then
checkout_main_or_master
fi
else
checkout_main_or_master checkout_main_or_master
fi fi
fi fi
@ -2121,7 +2204,12 @@ sub_recipe_release() {
new_version="false" new_version="false"
for compose_file in "${compose_files[@]}"; do for compose_file in "${compose_files[@]}"; do
if [ "$bump" = "true" ]; then
continue # skip trying to upgrade labels for --bump logic
fi
mapfile -t services < <($YQ e -N '.services | keys | .[]' "$compose_file" | sort -u) mapfile -t services < <($YQ e -N '.services | keys | .[]' "$compose_file" | sort -u)
if [ "$compose_file" = "compose.yml" ] && ! printf '%s\0' "${services[@]}" | grep -Fqxz -- "app"; then if [ "$compose_file" = "compose.yml" ] && ! printf '%s\0' "${services[@]}" | grep -Fqxz -- "app"; then
# shellcheck disable=SC2016 # shellcheck disable=SC2016
warning 'No `app` service found; which one should we use for the version number?' warning 'No `app` service found; which one should we use for the version number?'
@ -2162,7 +2250,7 @@ sub_recipe_release() {
fi fi
info "Fetching $service_image metadata from Docker Hub" info "Fetching $service_image metadata from Docker Hub"
service_data=$(skopeo inspect "docker://$service_image") service_data=$(skopeo inspect "docker://$service_image")
service_digest=$(echo "$service_data" | jq -r '.Digest' | cut -d':' -f2 | cut -c-8) service_digest=$(echo "$service_data" | $JQ -r '.Digest' | cut -d':' -f2 | cut -c-8)
label="coop-cloud.\${STACK_NAME}.$service.version=${service_tag}-${service_digest}" label="coop-cloud.\${STACK_NAME}.$service.version=${service_tag}-${service_digest}"
@ -2195,24 +2283,57 @@ sub_recipe_release() {
error "Hmm, something went wrong generating a new version number.." error "Hmm, something went wrong generating a new version number.."
fi fi
local -a expected_format="^coop-cloud.[a-z]*.[a-z]*.[a-z]*=v\\d*.\\d*.\\d*-\\S{8}"
if [[ ! "$new_version" =~ $expected_format ]]; then
error "'$new_version' does not match the expected label format, bailing out..."
fi
success "All compose files updated; new version is $new_version" success "All compose files updated; new version is $new_version"
read -rp "Commit your changes to git? [y/N]? " choice if [ "$abra___no_prompt" = "false" ] && [ "$bump" = "false" ]; then
git --no-pager diff
read -rp "Commit your changes to git? [y/N]? " choice
if [ "${choice,,}" != "y" ]; then if [ "${choice,,}" != "y" ]; then
return return
fi
fi fi
git commit -avem "Version $new_version; sync labels" || exit if [ "$abra___no_prompt" = "false" ] && [ "$bump" = "false" ]; then
git commit -avem "Version $new_version; sync labels" || exit
read -rp "Tag this as \`$new_version\`? [y/N]? " choice else
git commit -am "Version $new_version; sync labels" || true
if [ "${choice,,}" != "y" ]; then
return
fi fi
test "$force" = "true" && git tag -d "$new_version" if [ "$abra___no_prompt" = "false" ]; then
git tag -aem "$latest_version_message" "$new_version" read -rp "Tag this as \`$new_version\`? [y/N]? " choice
if [ "${choice,,}" != "y" ]; then
return
fi
fi
if [ "$force" = "true" ]; then
git tag -d "$new_version" || true
git push origin --delete "$new_version" || true
debug "Deleted local tag and remote tag if present"
fi
if [ "$abra___no_prompt" = "false" ]; then
git tag -aem "$latest_version_message" "$new_version"
else
git tag -am "$latest_version_message" "$new_version" || true
fi
if [ "$abra___no_prompt" = "false" ]; then
read -rp "Git push this new tag? [y/N]? " choice
if [ "${choice,,}" = "y" ]; then
git push && git push --tags
fi
else
git push && git push --tags
fi
} }
####################################### #######################################
@ -2409,9 +2530,9 @@ OPTIONS
sub_upgrade() { sub_upgrade() {
if [[ "$abra___dev" == "true" ]]; then if [[ "$abra___dev" == "true" ]]; then
curl https://install.abra.coopcloud.tech | bash -s -- --dev curl https://install.abra.coopcloud.tech | bash -s -- --dev --no-deps
else else
curl https://install.abra.coopcloud.tech | bash curl https://install.abra.coopcloud.tech | bash --no-deps
fi fi
} }
@ -2494,19 +2615,6 @@ sub_network() {
abra() { abra() {
require_bash_4 require_bash_4
# TODO (3wc): we either need to do this, or add 'shellcheck disable' all over
# the place to handle the dynamically-defined vars
declare abra___stack abra___env abra__command_ abra__args_ \
abra__secret_ abra__version_ abra__data_ abra___user abra__host_ \
abra__type_ abra__port_ abra__user_ abra__service_ abra__src_ abra__dst_ \
abra___server abra___domain abra___pass abra___secrets abra___status \
abra___no_tty abra___app_name abra__subcommands_ abra___skip_update \
abra___skip_check abra__backup_file_ abra___verbose abra___debug \
abra___help abra___branch abra___volumes abra__provider_ abra___type \
abra___dev abra___update abra___no_prompt abra___force \
abra___skip_version_check abra__recipe_ abra___no_domain_poll \
abra___fast abra__volume_ abra___no_state_poll
if ! type tput > /dev/null 2>&1; then if ! type tput > /dev/null 2>&1; then
tput() { tput() {
echo -n echo -n
@ -2570,7 +2678,7 @@ abra() {
# Use abra__command_ in case `command` is provided (i.e. `volume` or `stack`) # Use abra__command_ in case `command` is provided (i.e. `volume` or `stack`)
CMD="sub_${abra__command_}" CMD="sub_${abra__command_}"
if type "$CMD" > /dev/null 2>&1; then if type "$CMD" > /dev/null 2>&1; then
# shellcheck disable=SC2086 # shellcheck disable=SC2086,SC2048
"$CMD" ${abra__args_[*]} "$CMD" ${abra__args_[*]}
else else
docopt_exit docopt_exit

108
bin/abralib.py Normal file
View File

@ -0,0 +1,108 @@
"""Shared utilities for bin/*.py scripts."""
from logging import DEBUG, basicConfig, getLogger
from os import chdir, mkdir
from os.path import exists, expanduser
from pathlib import Path
from shlex import split
from subprocess import check_output
from sys import exit
from requests import get
HOME_PATH = expanduser("~/")
CLONES_PATH = Path(f"{HOME_PATH}/.abra/apps").absolute()
REPOS_TO_SKIP = (
"abra",
"abra-apps",
"abra-gandi",
"abra-hetzner",
"auto-apps-json",
"auto-mirror",
"backup-bot",
"coopcloud.tech",
"coturn",
"docker-cp-deploy",
"docker-dind-bats-kcov",
"docs.coopcloud.tech",
"example",
"gardening",
"organising",
"pyabra",
"radicle-seed-node",
"stack-ssh-deploy",
"swarm-cronjob",
)
YQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/yq")
JQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/jq")
log = getLogger(__name__)
basicConfig()
log.setLevel(DEBUG)
def _run_cmd(cmd, shell=False, **kwargs):
"""Run a shell command."""
args = [split(cmd)]
if shell:
args = [cmd]
kwargs = {"shell": shell}
try:
return check_output(*args, **kwargs).decode("utf-8").strip()
except Exception as exception:
log.error(f"Failed to run {cmd}, saw {str(exception)}")
exit(1)
def get_repos_json():
""" Retrieve repo list from Gitea """
url = "https://git.autonomic.zone/api/v1/orgs/coop-cloud/repos"
log.info(f"Retrieving {url}")
repos = []
response = True
page = 1
try:
while response:
log.info(f"Trying to fetch page {page}")
response = get(url + f"?page={page}", timeout=10).json()
repos.extend(response)
page += 1
return repos
except Exception as exception:
log.error(f"Failed to retrieve {url}, saw {str(exception)}")
exit(1)
def clone_all_apps(repos_json, ssh=False):
"""Clone all Co-op Cloud apps to ~/.abra/apps."""
if not exists(CLONES_PATH):
mkdir(CLONES_PATH)
if ssh:
repos = [[p["name"], p["ssh_url"]] for p in repos_json]
else:
repos = [[p["name"], p["clone_url"]] for p in repos_json]
for name, url in repos:
if name in REPOS_TO_SKIP:
continue
if not exists(f"{CLONES_PATH}/{name}"):
log.info(f"Retrieving {url}")
_run_cmd(f"git clone {url} {CLONES_PATH}/{name}")
chdir(f"{CLONES_PATH}/{name}")
if not int(_run_cmd("git branch --list | wc -l", shell=True)):
log.info(f"Guessing main branch is HEAD for {name}")
_run_cmd("git checkout main")
else:
log.info(f"Updating {name}")
chdir(f"{CLONES_PATH}/{name}")
_run_cmd("git fetch -a")

View File

@ -6,84 +6,44 @@
# ~/.abra/apps), and format it as JSON so that it can be hosted here: # ~/.abra/apps), and format it as JSON so that it can be hosted here:
# https://apps.coopcloud.tech # https://apps.coopcloud.tech
import argparse
from json import dump from json import dump
from logging import DEBUG, basicConfig, getLogger from os import chdir, environ, getcwd, listdir
from os import chdir, listdir, mkdir from os.path import basename
from os.path import basename, exists, expanduser
from pathlib import Path from pathlib import Path
from re import findall, search from re import findall, search
from shlex import split from subprocess import DEVNULL
from subprocess import DEVNULL, check_output
from sys import exit
from requests import get from requests import get
HOME_PATH = expanduser("~/") from abralib import (
CLONES_PATH = Path(f"{HOME_PATH}/.abra/apps").absolute() CLONES_PATH,
YQ_PATH = Path(f"{HOME_PATH}/.abra/vendor/yq") JQ_PATH,
SCRIPT_PATH = Path(__file__).absolute().parent REPOS_TO_SKIP,
REPOS_TO_SKIP = ( YQ_PATH,
"abra", _run_cmd,
"abra-apps", clone_all_apps,
"abra-gandi", get_repos_json,
"abra-hetzner", log,
"backup-bot",
"coopcloud.tech",
"coturn",
"docker-cp-deploy",
"docker-dind-bats-kcov",
"docs.coopcloud.tech",
"example",
"gardening",
"organising",
"pyabra",
"radicle-seed-node",
"stack-ssh-deploy",
"swarm-cronjob",
) )
log = getLogger(__name__) parser = argparse.ArgumentParser(description="Generate a new apps.json")
basicConfig() parser.add_argument("--output", type=Path, default=f"{getcwd()}/apps.json")
log.setLevel(DEBUG)
def _run_cmd(cmd, shell=False, **kwargs): def skopeo_login():
"""Run a shell command.""" """Log into the docker registry to avoid rate limits."""
args = [split(cmd)] user = environ.get("SKOPEO_USER")
password = environ.get("SKOPEO_PASSWORD")
registry = environ.get("SKOPEO_REGISTRY", "docker.io")
if shell: if not user or not password:
args = [cmd] log.info("Failed to log in via Skopeo due to missing env vars")
kwargs = {"shell": shell} return
try: login_cmd = f"skopeo login {registry} -u {user} -p {password}"
return check_output(*args, **kwargs).decode("utf-8").strip() output = _run_cmd(login_cmd, shell=True)
except Exception as exception: log.info(f"Skopeo login attempt: {output}")
log.error(f"Failed to run {cmd}, saw {str(exception)}")
exit(1)
def get_repos_json():
""" Retrieve repo list from Gitea """
url = "https://git.autonomic.zone/api/v1/orgs/coop-cloud/repos"
log.info(f"Retrieving {url}")
repos = []
response = True
page = 1
try:
while response:
log.info(f"Trying to fetch page {page}")
response = get(url + f"?page={page}", timeout=10).json()
repos.extend(response)
page += 1
return repos
except Exception as exception:
log.error(f"Failed to retrieve {url}, saw {str(exception)}")
exit(1)
def get_published_apps_json(): def get_published_apps_json():
@ -99,31 +59,6 @@ def get_published_apps_json():
return {} return {}
def clone_all_apps(repos_json):
"""Clone all Co-op Cloud apps to ~/.abra/apps."""
if not exists(CLONES_PATH):
mkdir(CLONES_PATH)
repos = [[p["name"], p["ssh_url"]] for p in repos_json]
for name, url in repos:
if name in REPOS_TO_SKIP:
continue
if not exists(f"{CLONES_PATH}/{name}"):
log.info(f"Retrieving {url}")
_run_cmd(f"git clone {url} {CLONES_PATH}/{name}")
chdir(f"{CLONES_PATH}/{name}")
if not int(_run_cmd("git branch --list | wc -l", shell=True)):
log.info(f"Guessing main branch is HEAD for {name}")
_run_cmd("git checkout main")
else:
log.info(f"Updating {name}")
chdir(f"{CLONES_PATH}/{name}")
_run_cmd("git fetch -a")
def generate_apps_json(repos_json): def generate_apps_json(repos_json):
"""Generate the abra-apps.json application versions file.""" """Generate the abra-apps.json application versions file."""
apps_json = {} apps_json = {}
@ -257,7 +192,7 @@ def get_app_versions(app_path, cached_apps_json):
if image in ("null", "---"): if image in ("null", "---"):
continue continue
images_cmd = f"skopeo inspect docker://{image} | jq '.Digest'" images_cmd = f"skopeo inspect docker://{image} | {JQ_PATH} '.Digest'"
output = _run_cmd(images_cmd, shell=True) output = _run_cmd(images_cmd, shell=True)
service_version_info = { service_version_info = {
@ -280,11 +215,14 @@ def get_app_versions(app_path, cached_apps_json):
def main(): def main():
"""Run the script.""" """Run the script."""
args = parser.parse_args()
skopeo_login()
repos_json = get_repos_json() repos_json = get_repos_json()
clone_all_apps(repos_json) clone_all_apps(repos_json)
target = f"{SCRIPT_PATH}/../deploy/apps.coopcloud.tech/apps.json" with open(args.output, "w", encoding="utf-8") as handle:
with open(target, "w", encoding="utf-8") as handle:
dump( dump(
generate_apps_json(repos_json), generate_apps_json(repos_json),
handle, handle,
@ -293,7 +231,7 @@ def main():
sort_keys=True, sort_keys=True,
) )
log.info(f"Successfully generated {target}") log.info(f"Successfully generated {args.output}")
main() main()

16
bin/clone-all-apps.py Executable file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3
# Usage: ./clone-all-apps.py
#
# Clone all available apps into ~/.abra/apps using ssh:// URLs
from abralib import clone_all_apps, get_repos_json
def main():
"""Run the script."""
repos_json = get_repos_json()
clone_all_apps(repos_json, ssh=True)
main()

47
bin/github-sync.py Executable file
View File

@ -0,0 +1,47 @@
#!/usr/bin/env python3
# Usage: ./github-sync.py
#
# Mirror repositories to Github (Fuck M$, get it straight)
from os import chdir, environ, listdir
from abralib import CLONES_PATH, _run_cmd, clone_all_apps, get_repos_json, log
REPOS_TO_SKIP = (
"backup-bot",
"docker-dind-bats-kcov",
"docs.coopcloud.tech",
"pyabra",
"radicle-seed-node",
"swarm-cronjob",
)
def main():
"""Run the script."""
repos_json = get_repos_json()
clone_all_apps(repos_json)
for app in listdir(CLONES_PATH):
if app in REPOS_TO_SKIP:
log.info(f"Skipping {app}")
continue
app_path = f"{CLONES_PATH}/{app}"
chdir(app_path)
log.info(f"Mirroring {app}...")
token = environ.get("GITHUB_ACCESS_TOKEN")
remote = f"https://coopcloudbot:{token}@github.com/Coop-Cloud/{app}.git"
_run_cmd(
f"git remote add github {remote} || true",
shell=True,
)
_run_cmd("git push github --all")
main()

20
bin/renovate-ls-apps.py Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
# Usage: ./renovate-ls-apps.py
#
# Output list of apps for Renovate bot configuration
from abralib import REPOS_TO_SKIP, get_repos_json
def main():
"""Run the script."""
repos = [p["full_name"] for p in get_repos_json()]
repos.sort()
for repo in repos:
if repo.split("/")[-1] in REPOS_TO_SKIP:
continue
print(f'"{repo}",')
main()

File diff suppressed because it is too large Load Diff

View File

@ -1,41 +0,0 @@
---
version: "3.8"
services:
app:
image: "nginx:stable"
configs:
- source: abra_conf
target: /etc/nginx/conf.d/abra.conf
- source: abra_apps_json
target: /var/www/abra-apps/apps.json
volumes:
- "public:/var/www/abra-apps"
networks:
- proxy
deploy:
update_config:
failure_action: rollback
order: start-first
labels:
- "traefik.enable=true"
- "traefik.http.services.abra-apps.loadbalancer.server.port=80"
- "traefik.http.routers.abra-apps.rule=Host(`apps.coopcloud.tech`, `abra-apps.cloud.autonomic.zone`)"
- "traefik.http.routers.abra-apps.entrypoints=web-secure"
- "traefik.http.routers.abra-apps.tls.certresolver=production"
- "traefik.http.routers.abra-apps.middlewares=abra-apps-redirect"
- "traefik.http.middlewares.abra-apps-redirect.headers.SSLForceHost=true"
- "traefik.http.middlewares.abra-apps-redirect.headers.SSLHost=apps.coopcloud.tech"
configs:
abra_apps_json:
file: apps.json
abra_conf:
file: nginx.conf
networks:
proxy:
external: true
volumes:
public:

View File

@ -1,10 +0,0 @@
server {
listen 80 default_server;
server_name apps.coopcloud.tech;
location / {
root /var/www/abra-apps;
add_header Content-Type application/json;
index apps.json;
}
}

View File

@ -1,9 +1,177 @@
#!/bin/bash #!/usr/bin/env bash
ABRA_VERSION="8.0.1" # shellcheck disable=SC2154,SC2034
ABRA_VERSION="10.0.0"
GIT_URL="https://git.autonomic.zone/coop-cloud/abra" GIT_URL="https://git.autonomic.zone/coop-cloud/abra"
ABRA_SRC="$GIT_URL/raw/tag/$ABRA_VERSION/abra" ABRA_SRC="$GIT_URL/raw/tag/$ABRA_VERSION/abra"
ABRA_DIR="${ABRA_DIR:-$HOME/.abra/}" ABRA_DIR="${ABRA_DIR:-$HOME/.abra}"
DOC="
abra command-line installer script
Usage:
installer [options]
Options:
-h, --help Show this message and exit
-d, --dev Install bleeding edge development version
-n, --no-prompt Don't prompt for input and run non-interactively
-p, --no-deps Don't attempt to install system dependencies
"
# docopt parser below, refresh this parser with `docopt.sh installer`
# shellcheck disable=2016,1075
docopt() { parse() { if ${DOCOPT_DOC_CHECK:-true}; then local doc_hash
if doc_hash=$(printf "%s" "$DOC" | (sha256sum 2>/dev/null || shasum -a 256)); then
if [[ ${doc_hash:0:5} != "$digest" ]]; then
stderr "The current usage doc (${doc_hash:0:5}) does not match \
what the parser was generated with (${digest})
Run \`docopt.sh\` to refresh the parser."; _return 70; fi; fi; fi
local root_idx=$1; shift; argv=("$@"); parsed_params=(); parsed_values=()
left=(); testdepth=0; local arg; while [[ ${#argv[@]} -gt 0 ]]; do
if [[ ${argv[0]} = "--" ]]; then for arg in "${argv[@]}"; do
parsed_params+=('a'); parsed_values+=("$arg"); done; break
elif [[ ${argv[0]} = --* ]]; then parse_long
elif [[ ${argv[0]} = -* && ${argv[0]} != "-" ]]; then parse_shorts
elif ${DOCOPT_OPTIONS_FIRST:-false}; then for arg in "${argv[@]}"; do
parsed_params+=('a'); parsed_values+=("$arg"); done; break; else
parsed_params+=('a'); parsed_values+=("${argv[0]}"); argv=("${argv[@]:1}"); fi
done; local idx; if ${DOCOPT_ADD_HELP:-true}; then
for idx in "${parsed_params[@]}"; do [[ $idx = 'a' ]] && continue
if [[ ${shorts[$idx]} = "-h" || ${longs[$idx]} = "--help" ]]; then
stdout "$trimmed_doc"; _return 0; fi; done; fi
if [[ ${DOCOPT_PROGRAM_VERSION:-false} != 'false' ]]; then
for idx in "${parsed_params[@]}"; do [[ $idx = 'a' ]] && continue
if [[ ${longs[$idx]} = "--version" ]]; then stdout "$DOCOPT_PROGRAM_VERSION"
_return 0; fi; done; fi; local i=0; while [[ $i -lt ${#parsed_params[@]} ]]; do
left+=("$i"); ((i++)) || true; done
if ! required "$root_idx" || [ ${#left[@]} -gt 0 ]; then error; fi; return 0; }
parse_shorts() { local token=${argv[0]}; local value; argv=("${argv[@]:1}")
[[ $token = -* && $token != --* ]] || _return 88; local remaining=${token#-}
while [[ -n $remaining ]]; do local short="-${remaining:0:1}"
remaining="${remaining:1}"; local i=0; local similar=(); local match=false
for o in "${shorts[@]}"; do if [[ $o = "$short" ]]; then similar+=("$short")
[[ $match = false ]] && match=$i; fi; ((i++)) || true; done
if [[ ${#similar[@]} -gt 1 ]]; then
error "${short} is specified ambiguously ${#similar[@]} times"
elif [[ ${#similar[@]} -lt 1 ]]; then match=${#shorts[@]}; value=true
shorts+=("$short"); longs+=(''); argcounts+=(0); else value=false
if [[ ${argcounts[$match]} -ne 0 ]]; then if [[ $remaining = '' ]]; then
if [[ ${#argv[@]} -eq 0 || ${argv[0]} = '--' ]]; then
error "${short} requires argument"; fi; value=${argv[0]}; argv=("${argv[@]:1}")
else value=$remaining; remaining=''; fi; fi; if [[ $value = false ]]; then
value=true; fi; fi; parsed_params+=("$match"); parsed_values+=("$value"); done
}; parse_long() { local token=${argv[0]}; local long=${token%%=*}
local value=${token#*=}; local argcount; argv=("${argv[@]:1}")
[[ $token = --* ]] || _return 88; if [[ $token = *=* ]]; then eq='='; else eq=''
value=false; fi; local i=0; local similar=(); local match=false
for o in "${longs[@]}"; do if [[ $o = "$long" ]]; then similar+=("$long")
[[ $match = false ]] && match=$i; fi; ((i++)) || true; done
if [[ $match = false ]]; then i=0; for o in "${longs[@]}"; do
if [[ $o = $long* ]]; then similar+=("$long"); [[ $match = false ]] && match=$i
fi; ((i++)) || true; done; fi; if [[ ${#similar[@]} -gt 1 ]]; then
error "${long} is not a unique prefix: ${similar[*]}?"
elif [[ ${#similar[@]} -lt 1 ]]; then
[[ $eq = '=' ]] && argcount=1 || argcount=0; match=${#shorts[@]}
[[ $argcount -eq 0 ]] && value=true; shorts+=(''); longs+=("$long")
argcounts+=("$argcount"); else if [[ ${argcounts[$match]} -eq 0 ]]; then
if [[ $value != false ]]; then
error "${longs[$match]} must not have an argument"; fi
elif [[ $value = false ]]; then
if [[ ${#argv[@]} -eq 0 || ${argv[0]} = '--' ]]; then
error "${long} requires argument"; fi; value=${argv[0]}; argv=("${argv[@]:1}")
fi; if [[ $value = false ]]; then value=true; fi; fi; parsed_params+=("$match")
parsed_values+=("$value"); }; required() { local initial_left=("${left[@]}")
local node_idx; ((testdepth++)) || true; for node_idx in "$@"; do
if ! "node_$node_idx"; then left=("${initial_left[@]}"); ((testdepth--)) || true
return 1; fi; done; if [[ $((--testdepth)) -eq 0 ]]; then
left=("${initial_left[@]}"); for node_idx in "$@"; do "node_$node_idx"; done; fi
return 0; }; optional() { local node_idx; for node_idx in "$@"; do
"node_$node_idx"; done; return 0; }; switch() { local i
for i in "${!left[@]}"; do local l=${left[$i]}
if [[ ${parsed_params[$l]} = "$2" ]]; then
left=("${left[@]:0:$i}" "${left[@]:((i+1))}")
[[ $testdepth -gt 0 ]] && return 0; if [[ $3 = true ]]; then
eval "((var_$1++))" || true; else eval "var_$1=true"; fi; return 0; fi; done
return 1; }; stdout() { printf -- "cat <<'EOM'\n%s\nEOM\n" "$1"; }; stderr() {
printf -- "cat <<'EOM' >&2\n%s\nEOM\n" "$1"; }; error() {
[[ -n $1 ]] && stderr "$1"; stderr "$usage"; _return 1; }; _return() {
printf -- "exit %d\n" "$1"; exit "$1"; }; set -e; trimmed_doc=${DOC:1:333}
usage=${DOC:37:28}; digest=36916; shorts=(-h -d -n -p)
longs=(--help --dev --no-prompt --no-deps); argcounts=(0 0 0 0); node_0(){
switch __help 0; }; node_1(){ switch __dev 1; }; node_2(){ switch __no_prompt 2
}; node_3(){ switch __no_deps 3; }; node_4(){ optional 0 1 2 3; }; node_5(){
optional 4; }; node_6(){ required 5; }; node_7(){ required 6; }
cat <<<' docopt_exit() { [[ -n $1 ]] && printf "%s\n" "$1" >&2
printf "%s\n" "${DOC:37:28}" >&2; exit 1; }'; unset var___help var___dev \
var___no_prompt var___no_deps; parse 7 "$@"; local prefix=${DOCOPT_PREFIX:-''}
unset "${prefix}__help" "${prefix}__dev" "${prefix}__no_prompt" \
"${prefix}__no_deps"; eval "${prefix}"'__help=${var___help:-false}'
eval "${prefix}"'__dev=${var___dev:-false}'
eval "${prefix}"'__no_prompt=${var___no_prompt:-false}'
eval "${prefix}"'__no_deps=${var___no_deps:-false}'; local docopt_i=1
[[ $BASH_VERSION =~ ^4.3 ]] && docopt_i=2; for ((;docopt_i>0;docopt_i--)); do
declare -p "${prefix}__help" "${prefix}__dev" "${prefix}__no_prompt" \
"${prefix}__no_deps"; done; }
# docopt parser above, complete command for generating this parser is `docopt.sh installer`
function prompt_confirm {
if [ "$no_prompt" == "true" ]; then
return
fi
read -rp "Continue? [y/N]? " choice
case "$choice" in
y|Y ) return ;;
* ) exit;;
esac
}
function show_banner {
echo ""
echo " ____ ____ _ _ "
echo " / ___|___ ___ _ __ / ___| | ___ _ _ __| |"
echo " | | / _ \ _____ / _ \| '_ \ | | | |/ _ \| | | |/ _' |"
echo " | |__| (_) |_____| (_) | |_) | | |___| | (_) | |_| | (_| |"
echo " \____\___/ \___/| .__/ \____|_|\___/ \__,_|\__,_|"
echo " |_|"
echo ""
}
function install_docker {
sudo apt-get remove docker docker-engine docker.io containerd runc
sudo apt-get install -yq \
apt-transport-https \
ca-certificates \
gnupg \
lsb-release
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install -yq docker-ce docker-ce-cli containerd.io
}
function install_requirements {
if [ -f "/etc/debian_version" ]; then
echo "Detected Debian based distribution, attempting to install system requirements..."
sudo apt update && sudo apt install -y \
passwdqc \
pwgen
echo "Install Docker (https://docs.docker.com/engine/install/debian/)?"
prompt_confirm
install_docker
else
echo "Sorry, we only support Debian based distributions at the moment"
echo "You'll have to install the requirements manually for your distribution"
echo "See https://git.autonomic.zone/coop-cloud/abra#requirements for more"
fi
}
function install_abra_release { function install_abra_release {
mkdir -p "$HOME/.local/bin" mkdir -p "$HOME/.local/bin"
@ -24,7 +192,25 @@ function install_abra_dev {
} }
function run_installation { function run_installation {
if [ "$1" = "--dev" ]; then show_banner
DOCOPT_PREFIX=installer_
DOCOPT_ADD_HELP=false
eval "$(docopt "$@")"
dev="$installer___dev"
no_prompt="$installer___no_prompt"
no_deps="$installer___no_deps"
if [ "$no_deps" == "false" ]; then
install_requirements
fi
if ! type curl > /dev/null 2>&1; then
error "'curl' program is not installed, cannot continue..."
fi
if [ "$dev" == "true" ]; then
install_abra_dev install_abra_dev
else else
install_abra_release install_abra_release

View File

@ -1,4 +1,4 @@
.PHONY: test shellcheck docopt kcov codecov release-installer .PHONY: test shellcheck docopt release-installer build push deploy-docopt symlink
test: test:
@sudo DOCKER_CONTEXT=default docker run \ @sudo DOCKER_CONTEXT=default docker run \
@ -7,7 +7,7 @@ test:
-d \ -d \
--name=abra-test-dind \ --name=abra-test-dind \
-e DOCKER_TLS_CERTDIR="" \ -e DOCKER_TLS_CERTDIR="" \
decentral1se/docker-dind-bats-kcov decentral1se/docker-dind-bats-kcov \
@DOCKER_CONTEXT=default sudo docker exec \ @DOCKER_CONTEXT=default sudo docker exec \
-it \ -it \
abra-test-dind \ abra-test-dind \
@ -21,8 +21,9 @@ shellcheck:
--rm \ --rm \
-v $$(pwd):/workdir \ -v $$(pwd):/workdir \
koalaman/shellcheck-alpine \ koalaman/shellcheck-alpine \
shellcheck /workdir/abra && \ sh -c "shellcheck /workdir/abra && \
shellcheck /workdir/bin/*.sh shellcheck /workdir/bin/*.sh && \
shellcheck /workdir/deploy/install.abra.coopcloud.tech/installer"
docopt: docopt:
@if [ ! -d ".venv" ]; then \ @if [ ! -d ".venv" ]; then \
@ -32,18 +33,13 @@ docopt:
fi fi
.venv/bin/docopt.sh abra .venv/bin/docopt.sh abra
kcov: deploy-docopt:
@docker run \ @if [ ! -d ".venv" ]; then \
-it \ python3 -m venv .venv && \
--rm \ .venv/bin/pip install -U pip setuptools wheel && \
-v $$(pwd):/workdir \ .venv/bin/pip install docopt-sh; \
kcov/kcov:latest \ fi
sh -c "kcov /workdir/coverage /workdir/abra || true" .venv/bin/docopt.sh deploy/install.abra.coopcloud.tech/installer
codecov: SHELL:=/bin/bash
codecov:
@bash <(curl -s https://codecov.io/bash) \
-s coverage -t $$(pass show hosts/swarm.autonomic.zone/drone/codecov/token)
release-installer: release-installer:
@DOCKER_CONTEXT=swarm.autonomic.zone \ @DOCKER_CONTEXT=swarm.autonomic.zone \
@ -51,8 +47,13 @@ release-installer:
cd deploy/install.abra.coopcloud.tech && \ cd deploy/install.abra.coopcloud.tech && \
DOCKER_CONTEXT=swarm.autonomic.zone docker stack deploy -c compose.yml abra-installer-script DOCKER_CONTEXT=swarm.autonomic.zone docker stack deploy -c compose.yml abra-installer-script
release-apps: build:
@DOCKER_CONTEXT=swarm.autonomic.zone \ @docker build -t thecoopcloud/abra .
docker stack rm abra-apps-json && \
cd deploy/apps.coopcloud.tech && \ push: build
DOCKER_CONTEXT=swarm.autonomic.zone docker stack deploy -c compose.yml abra-apps-json @docker push thecoopcloud/abra
symlink:
@mkdir -p ~/.abra/servers/ && \
ln -srf tests/default ~/.abra/servers && \
ln -srf tests/apps/* ~/.abra/apps

View File

@ -0,0 +1,84 @@
---
# The goal of this compose file is to have a testing ground for understanding
# what cases we need to handle to get stable deployments. For that, we need to
# work with healthchecks and deploy configurations quite closely. If you run
# the `make symlink` target then this will be loaded into a "fake" app on your
# local machine which you can deploy with `abra`.
version: "3.8"
services:
r1_should_work:
image: redis:alpine
deploy:
update_config:
failure_action: rollback
order: start-first
rollback_config:
order: start-first
restart_policy:
max_attempts: 1
healthcheck:
test: redis-cli ping
interval: 2s
retries: 3
start_period: 1s
timeout: 3s
r2_broken_health_check:
image: redis:alpine
deploy:
update_config:
failure_action: rollback
order: start-first
rollback_config:
order: start-first
restart_policy:
max_attempts: 3
healthcheck:
test: foobar
interval: 2s
retries: 3
start_period: 1s
timeout: 3s
r3_no_health_check:
image: redis:alpine
deploy:
update_config:
failure_action: rollback
order: start-first
rollback_config:
order: start-first
restart_policy:
max_attempts: 3
r4_disabled_health_check:
image: redis:alpine
deploy:
update_config:
failure_action: rollback
order: start-first
rollback_config:
order: start-first
restart_policy:
max_attempts: 3
healthcheck:
disable: true
r5_should_also_work:
image: redis:alpine
deploy:
update_config:
failure_action: rollback
order: start-first
rollback_config:
order: start-first
restart_policy:
max_attempts: 1
healthcheck:
test: redis-cli ping
interval: 2s
retries: 3
start_period: 1s
timeout: 3s

1
tests/default/works.env Normal file
View File

@ -0,0 +1 @@
TYPE=works