generated from coop-cloud/example
Compare commits
75 Commits
old-databa
...
main
Author | SHA1 | Date | |
---|---|---|---|
6c4fb2c70b | |||
ca0eebcc64 | |||
400445f8f5 | |||
73ed83f5bf | |||
91e73387bf | |||
4005524b7b | |||
ab70b3c4ca | |||
59fdc6481a | |||
fbcdc57b9d | |||
37c8cf8141 | |||
cd3d754eb5 | |||
fbe1a99054 | |||
529e0d9d26 | |||
5f300f945b | |||
3a06d0d9a0 | |||
10c074a96b | |||
33c53a5d94 | |||
a2713a9a64 | |||
f6536067ee | |||
93b670b8f4 | |||
1212c295b9 | |||
6a1fb0e4f3 | |||
9ee27ac443 | |||
287654b8d3 | |||
79c10ed214 | |||
8ddceb9017 | |||
c1d6826d82 | |||
ac7fb7c3dd | |||
ba63176598 | |||
6eee864ba1 | |||
f3c8e08441 | |||
6da688ad1b | |||
ea012f2628 | |||
1705383533 | |||
6575668586 | |||
39f3a61ce0 | |||
158a3c8b1c | |||
03b7d984f0 | |||
907597aab3 | |||
d9ce8fb168 | |||
73de12d12f | |||
8c3521b87f | |||
4273faad76 | |||
0e84bbc2ee | |||
ca60a0f1a3 | |||
3b1eeb6160 | |||
52f3bf31ed | |||
e229ab6e30 | |||
0ab6c0c244 | |||
5e2a13a93a | |||
8796269ad2 | |||
a4f75f2da0 | |||
76523535ca | |||
68f23084aa | |||
e487a36bb5 | |||
8426058959 | |||
277a5d2343 | |||
56be9efd22 | |||
7d2f35277b | |||
53913b92cd | |||
e0cc0ff9af | |||
5e3e8a655b | |||
53e4d82aa3 | |||
361908fe84 | |||
ecb5314fe7 | |||
a016995516 | |||
2daf487bb8 | |||
0354892c74 | |||
86c215cbc9 | |||
8e7a7b9932 | |||
d1f7c765dd | |||
4f923ad0c1 | |||
672557c7fb | |||
00576231e5 | |||
beeffe65f6 |
44
.drone.yml
Normal file
44
.drone.yml
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: deploy to swarm-test.autonomic.zone
|
||||||
|
steps:
|
||||||
|
- name: deployment
|
||||||
|
image: git.coopcloud.tech/coop-cloud/stack-ssh-deploy:latest
|
||||||
|
settings:
|
||||||
|
host: swarm-test.autonomic.zone
|
||||||
|
stack: outline
|
||||||
|
generate_secrets: true
|
||||||
|
purge: true
|
||||||
|
deploy_key:
|
||||||
|
from_secret: drone_ssh_swarm_test
|
||||||
|
networks:
|
||||||
|
- proxy
|
||||||
|
environment:
|
||||||
|
DOMAIN: outline.swarm-test.autonomic.zone
|
||||||
|
STACK_NAME: outline
|
||||||
|
LETS_ENCRYPT_ENV: production
|
||||||
|
APP_ENTRYPOINT_VERSION: v1
|
||||||
|
DB_ENTRYPOINT_VERSION: v1
|
||||||
|
PG_BACKUP_VERSION: v1
|
||||||
|
SECRET_DB_PASSWORD_VERSION: v1
|
||||||
|
SECRET_SECRET_KEY_VERSION: v1 # length=64
|
||||||
|
SECRET_UTILS_SECRET_VERSION: v1 # length=64
|
||||||
|
trigger:
|
||||||
|
branch:
|
||||||
|
- main
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: generate recipe catalogue
|
||||||
|
steps:
|
||||||
|
- name: release a new version
|
||||||
|
image: plugins/downstream
|
||||||
|
settings:
|
||||||
|
server: https://build.coopcloud.tech
|
||||||
|
token:
|
||||||
|
from_secret: drone_abra-bot_token
|
||||||
|
fork: true
|
||||||
|
repositories:
|
||||||
|
- toolshed/auto-recipes-catalogue-json
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
event: tag
|
37
.env.sample
37
.env.sample
@ -8,24 +8,18 @@ DOMAIN=outline.example.com
|
|||||||
#EXTRA_DOMAINS=', `www.outline.example.com`'
|
#EXTRA_DOMAINS=', `www.outline.example.com`'
|
||||||
LETS_ENCRYPT_ENV=production
|
LETS_ENCRYPT_ENV=production
|
||||||
|
|
||||||
|
ENABLE_BACKUPS=true
|
||||||
|
|
||||||
COMPOSE_FILE="compose.yml"
|
COMPOSE_FILE="compose.yml"
|
||||||
#COMPOSE_YML="compose.yml:compose.oidc.yml"
|
|
||||||
#COMPOSE_YML="compose.yml:compose.google.yml"
|
|
||||||
|
|
||||||
# –––––––––––––––– REQUIRED ––––––––––––––––
|
# –––––––––––––––– REQUIRED ––––––––––––––––
|
||||||
|
|
||||||
SECRET_DB_PASSWORD_VERSION=v1
|
SECRET_DB_PASSWORD_VERSION=v1
|
||||||
SECRET_SECRET_KEY_VERSION=v1 # length=64
|
SECRET_SECRET_KEY_VERSION=v1 # length=64
|
||||||
SECRET_UTILS_SECRET_VERSION=v1 # length=64
|
SECRET_UTILS_SECRET_VERSION=v1 # length=64
|
||||||
SECRET_AWS_SECRET_KEY_VERSION=v1
|
|
||||||
|
|
||||||
AWS_ACCESS_KEY_ID=
|
# Set to s3 to use AWS S3 bucket
|
||||||
AWS_REGION=
|
FILE_STORAGE=local
|
||||||
AWS_S3_UPLOAD_BUCKET_URL=
|
|
||||||
AWS_S3_UPLOAD_BUCKET_NAME=
|
|
||||||
AWS_S3_UPLOAD_MAX_SIZE=26214400
|
|
||||||
AWS_S3_FORCE_PATH_STYLE=true
|
|
||||||
AWS_S3_ACL=private
|
|
||||||
|
|
||||||
# –––––––––––––––– OPTIONAL ––––––––––––––––
|
# –––––––––––––––– OPTIONAL ––––––––––––––––
|
||||||
|
|
||||||
@ -47,7 +41,7 @@ WEB_CONCURRENCY=1
|
|||||||
|
|
||||||
# Override the maxium size of document imports, could be required if you have
|
# Override the maxium size of document imports, could be required if you have
|
||||||
# especially large Word documents with embedded imagery
|
# especially large Word documents with embedded imagery
|
||||||
MAXIMUM_IMPORT_SIZE=5120000
|
FILE_STORAGE_IMPORT_MAX_SIZE=5120000
|
||||||
|
|
||||||
# You can remove this line if your reverse proxy already logs incoming http
|
# You can remove this line if your reverse proxy already logs incoming http
|
||||||
# requests and this ends up being duplicative
|
# requests and this ends up being duplicative
|
||||||
@ -57,19 +51,22 @@ DEBUG=http
|
|||||||
# set, all domains are allowed by default when using Google OAuth to signin
|
# set, all domains are allowed by default when using Google OAuth to signin
|
||||||
ALLOWED_DOMAINS=
|
ALLOWED_DOMAINS=
|
||||||
|
|
||||||
# TODO: setup compose.smtp.yml
|
|
||||||
# To support sending outgoing transactional emails such as "document updated" or
|
# To support sending outgoing transactional emails such as "document updated" or
|
||||||
# "you've been invited" you'll need to provide authentication for an SMTP server
|
# "you've been invited" you'll need to provide authentication for an SMTP server
|
||||||
|
# By default, this enables email login. You can disable this in the settings
|
||||||
|
# for configuration details see https://docs.getoutline.com/s/hosting/doc/smtp-cqCJyZGMIB
|
||||||
|
#COMPOSE_FILE="$COMPOSE_FILE:compose.smtp.yml"
|
||||||
#SMTP_ENABLED=1
|
#SMTP_ENABLED=1
|
||||||
#SMTP_HOST=
|
#SMTP_HOST=
|
||||||
#SMTP_PORT=
|
#SMTP_PORT=
|
||||||
#SMTP_USERNAME=
|
#SMTP_USERNAME=
|
||||||
#SMTP_PASSWORD=
|
|
||||||
#SMTP_FROM_EMAIL=
|
#SMTP_FROM_EMAIL=
|
||||||
#SMTP_REPLY_EMAIL=
|
#SMTP_REPLY_EMAIL=
|
||||||
#SMTP_TLS_CIPHERS=
|
#SMTP_TLS_CIPHERS=
|
||||||
#SMTP_SECURE=true
|
#SMTP_SECURE=true
|
||||||
|
#SECRET_SMTP_PASSWORD_VERSION=v1
|
||||||
|
|
||||||
|
#COMPOSE_FILE="$COMPOSE_FILE:compose.oidc.yml"
|
||||||
#OIDC_ENABLED=1
|
#OIDC_ENABLED=1
|
||||||
#OIDC_CLIENT_ID=
|
#OIDC_CLIENT_ID=
|
||||||
#OIDC_AUTH_URI=
|
#OIDC_AUTH_URI=
|
||||||
@ -80,6 +77,20 @@ ALLOWED_DOMAINS=
|
|||||||
#OIDC_SCOPES="openid profile email"
|
#OIDC_SCOPES="openid profile email"
|
||||||
#SECRET_OIDC_CLIENT_SECRET_VERSION=v1
|
#SECRET_OIDC_CLIENT_SECRET_VERSION=v1
|
||||||
|
|
||||||
|
#COMPOSE_FILE="$COMPOSE_FILE:compose.google.yml"
|
||||||
#GOOGLE_ENABLED=1
|
#GOOGLE_ENABLED=1
|
||||||
#GOOGLE_CLIENT_ID=
|
#GOOGLE_CLIENT_ID=
|
||||||
#SECRET_GOOGLE_CLIENT_SECRET_VERSION=v1
|
#SECRET_GOOGLE_CLIENT_SECRET_VERSION=v1
|
||||||
|
|
||||||
|
COMPOSE_FILE="$COMPOSE_FILE:compose.local.yml"
|
||||||
|
FILE_STORAGE_UPLOAD_MAX_SIZE=26214400
|
||||||
|
|
||||||
|
#COMPOSE_FILE="$COMPOSE_FILE:compose.aws.yml"
|
||||||
|
#AWS_ACCESS_KEY_ID=
|
||||||
|
#AWS_REGION=
|
||||||
|
#AWS_S3_UPLOAD_BUCKET_URL=
|
||||||
|
#AWS_S3_UPLOAD_BUCKET_NAME=
|
||||||
|
#AWS_S3_UPLOAD_MAX_SIZE=26214400
|
||||||
|
#AWS_S3_FORCE_PATH_STYLE=true
|
||||||
|
#AWS_S3_ACL=private
|
||||||
|
#SECRET_AWS_SECRET_KEY_VERSION=v1
|
||||||
|
55
README.md
55
README.md
@ -5,12 +5,12 @@ Wiki and knowledge base for growing teams
|
|||||||
<!-- metadata -->
|
<!-- metadata -->
|
||||||
|
|
||||||
* **Category**: Apps
|
* **Category**: Apps
|
||||||
* **Status**: 1, alpha
|
* **Status**: 3, beta
|
||||||
* **Image**: [outlinewiki/outline](https://hub.docker.com/r/outlinewiki/outline)
|
* **Image**: [outlinewiki/outline](https://hub.docker.com/r/outlinewiki/outline), 4, upstream
|
||||||
* **Healthcheck**: No
|
* **Healthcheck**: No
|
||||||
* **Backups**: No
|
* **Backups**: Yes
|
||||||
* **Email**: No
|
* **Email**: Yes
|
||||||
* **Tests**: No
|
* **Tests**: 2
|
||||||
* **SSO**: 3 (OAuth)
|
* **SSO**: 3 (OAuth)
|
||||||
|
|
||||||
<!-- endmetadata -->
|
<!-- endmetadata -->
|
||||||
@ -19,26 +19,27 @@ Wiki and knowledge base for growing teams
|
|||||||
|
|
||||||
1. Set up Docker Swarm and [`abra`]
|
1. Set up Docker Swarm and [`abra`]
|
||||||
2. Deploy [`coop-cloud/traefik`]
|
2. Deploy [`coop-cloud/traefik`]
|
||||||
3. `abra app new ${REPO_NAME} --secrets` (optionally with `--pass` if you'd like
|
3. `abra app new ${REPO_NAME}`
|
||||||
to save secrets in `pass`)
|
- **WARNING**: Choose "n" when `abra` asks if you'd like to generate secrets
|
||||||
4. `abra app config YOURAPPNAME` - be sure to change `$DOMAIN` to something that resolves to
|
4. `abra app config YOURAPPNAME` - be sure to change `$DOMAIN` to something that resolves to
|
||||||
your Docker swarm box
|
your Docker swarm box
|
||||||
5. `abra app deploy YOURAPPNAME`
|
5. Insert secrets:
|
||||||
7. Open the configured domain in your browser to finish set-up
|
- `abra app secret insert YOURAPPNAME secret_key v1 $(openssl rand -hex 32)` #12
|
||||||
|
- `abra app secret generate -a YOURAPPNAME`
|
||||||
|
6. `abra app deploy YOURAPPNAME`
|
||||||
|
8. Open the configured domain in your browser to finish set-up
|
||||||
|
|
||||||
[`abra`]: https://git.coopcloud.tech/coop-cloud/abra
|
[`abra`]: https://git.coopcloud.tech/coop-cloud/abra
|
||||||
[`coop-cloud/traefik`]: https://git.coopcloud.tech/coop-cloud/traefik
|
[`coop-cloud/traefik`]: https://git.coopcloud.tech/coop-cloud/traefik
|
||||||
|
|
||||||
## Tips & Tricks
|
## Tips & Tricks
|
||||||
|
|
||||||
### Post-deploy migration
|
### Create an initial admin user
|
||||||
|
|
||||||
```
|
```
|
||||||
abra app cmd YOURAPPNAME app migrate
|
abra app cmd YOURAPPNAME app create_email_user test@example.com
|
||||||
```
|
```
|
||||||
|
|
||||||
_As of 2022-03-30, this requires `abra` RC version, run `abra upgrade --rc`._
|
|
||||||
|
|
||||||
### Setting up your `.env` config
|
### Setting up your `.env` config
|
||||||
|
|
||||||
Avoid the use of quotes (`"..."`) as much as possible, the NodeJS scripts flip out for some reason on some vars.
|
Avoid the use of quotes (`"..."`) as much as possible, the NodeJS scripts flip out for some reason on some vars.
|
||||||
@ -51,4 +52,30 @@ Where `<username-to-delete>` is the username of the user to be removed, and
|
|||||||
`<username-to-replace>` is the username of another user, to assign documents and
|
`<username-to-replace>` is the username of another user, to assign documents and
|
||||||
revisions to (instead of deleting them).
|
revisions to (instead of deleting them).
|
||||||
|
|
||||||
_As of 2022-03-30, this requires `abra` RC version, run `abra upgrade --rc`._
|
### Migrate from S3 to local storage
|
||||||
|
|
||||||
|
- `abra app config <domain>`, add
|
||||||
|
- `COMPOSE_FILE="$COMPOSE_FILE:compose.local.yml"`
|
||||||
|
- `FILE_STORAGE_UPLOAD_MAX_SIZE=26214400`
|
||||||
|
- `abra app deploy <domain> -f`
|
||||||
|
- compose.aws.yml should still be deployed!
|
||||||
|
- `abra app undeploy <domain>`
|
||||||
|
- on the docker host, find mountpoint of newly created volume via `docker volume ls` and `docker volume inspect`
|
||||||
|
- volume name is smth like `<domain>_storage-data`
|
||||||
|
- take note which linux user owns `<storage_mountpoint>` (likely `1001`)
|
||||||
|
- use s3cmd/rclone/... to sync your bucket to `<storage_mountpoint>`
|
||||||
|
- `chown -R <storage_user>:<storage_user> <storage_mountpoint>`
|
||||||
|
- `abra app config <domain>`, switch storage backend
|
||||||
|
- remove `AWS_*` vars, `SECRET_AWS_SECRET_KEY_VERSION` and `COMPOSE_FILE="$COMPOSE_FILE:compose.aws.yml"`
|
||||||
|
- set `FILE_STORAGE=local`
|
||||||
|
- `abra app deploy <domain> -f`
|
||||||
|
- enjoy getting rid of S3 🥳
|
||||||
|
|
||||||
|
## Single Sign On with Keycloak/Authentik
|
||||||
|
|
||||||
|
- Create an OIDC client in Keycloak (in Authentik this is called a provider and application)
|
||||||
|
- Run `abra app config YOURAPPNAME`, then uncomment everything in the `OIDC_` section.
|
||||||
|
- **Valid Redirect URIs**: `https://YOURAPPDOMAIN/auth/oidc.callback`
|
||||||
|
- Reference the client/provider info to populate the `_AUTH_URI` `_TOKEN_URI` and `_USERINFO_URI` values
|
||||||
|
- Set the OIDC secret using the value from the client/provider `abra app secret insert YOURAPPNAME oidc_client_secret v1 SECRETVALUE`
|
||||||
|
- `abra app deploy YOURAPPDOMAIN`
|
96
abra.sh
96
abra.sh
@ -1,4 +1,18 @@
|
|||||||
export APP_ENTRYPOINT_VERSION=v6
|
export APP_ENTRYPOINT_VERSION=v9
|
||||||
|
export DB_ENTRYPOINT_VERSION=v2
|
||||||
|
export PG_BACKUP_VERSION=v1
|
||||||
|
|
||||||
|
create_email_user() {
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
echo "Usage: ... create_email_user <email_address>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
export DATABASE_PASSWORD=$(cat /run/secrets/db_password)
|
||||||
|
export DATABASE_URL="postgres://outline:${DATABASE_PASSWORD}@${STACK_NAME}_db:5432/outline"
|
||||||
|
export UTILS_SECRET=$(cat /run/secrets/utils_secret)
|
||||||
|
export SECRET_KEY=$(cat /run/secrets/secret_key)
|
||||||
|
node build/server/scripts/seed.js "$1"
|
||||||
|
}
|
||||||
|
|
||||||
migrate() {
|
migrate() {
|
||||||
export DATABASE_PASSWORD=$(cat /run/secrets/db_password)
|
export DATABASE_PASSWORD=$(cat /run/secrets/db_password)
|
||||||
@ -6,9 +20,46 @@ migrate() {
|
|||||||
yarn db:migrate --env=production-ssl-disabled
|
yarn db:migrate --env=production-ssl-disabled
|
||||||
}
|
}
|
||||||
|
|
||||||
|
generate_secret() {
|
||||||
|
abra app secret insert $DOMAIN secret_key v1 $(openssl rand -hex 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
delete_user_by_id() {
|
||||||
|
if [ -z "$1" ] || [ -z "$2" ]; then
|
||||||
|
echo "Usage: ... delete_user_by_id <userid-to-delete> <userid-to-replace>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
USERID_REPLACE="$2"
|
||||||
|
USERID_REMOVE="$1"
|
||||||
|
|
||||||
|
psql -U outline outline <<- SQL
|
||||||
|
UPDATE documents SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
UPDATE groups SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE pins SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE group_users SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE collections SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE collection_users SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE collection_groups SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE documents SET "lastModifiedById" = '$USERID_REPLACE' WHERE "lastModifiedById" = '$USERID_REMOVE';
|
||||||
|
UPDATE documents SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
||||||
|
UPDATE revisions SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
UPDATE attachments SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
UPDATE backlinks SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
UPDATE file_operations SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
UPDATE users SET "suspendedById" = '$USERID_REPLACE' WHERE "suspendedById" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM search_queries WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM shares WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM notification_settings WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM events WHERE "actorId" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM events WHERE "userId" = '$USERID_REMOVE';
|
||||||
|
DELETE FROM users WHERE "id" = '$USERID_REMOVE';
|
||||||
|
SQL
|
||||||
|
}
|
||||||
|
|
||||||
delete_user() {
|
delete_user() {
|
||||||
if [ -z "$1" ] || [ -z "$2" ]; then
|
if [ -z "$1" ] || [ -z "$2" ]; then
|
||||||
echo "Usage: ... delete_user <user-to-delete> <user-to-replace>"
|
echo "Usage: ... delete_user <userid-to-delete> <userid-to-replace>"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -24,23 +75,26 @@ delete_user() {
|
|||||||
echo "Can't find ID of '$2'"
|
echo "Can't find ID of '$2'"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
psql -U outline outline <<- SQL
|
delete_user_by_id "$USERID_REMOVE" "$USERID_REPLACE"
|
||||||
UPDATE documents SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
}
|
||||||
UPDATE groups SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
|
||||||
UPDATE pins SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
delete_duplicate_users() {
|
||||||
UPDATE group_users SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
if [ -z "$1" ]; then
|
||||||
UPDATE collections SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
echo "Usage: ... delete_duplicate_users <username>"
|
||||||
UPDATE collection_users SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
exit 1
|
||||||
UPDATE documents SET "lastModifiedById" = '$USERID_REPLACE' WHERE "lastModifiedById" = '$USERID_REMOVE';
|
fi
|
||||||
UPDATE documents SET "createdById" = '$USERID_REPLACE' WHERE "createdById" = '$USERID_REMOVE';
|
|
||||||
UPDATE revisions SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
USERIDS=$(echo "SELECT id FROM users WHERE username = '$1' ORDER BY users.\"createdAt\" DESC" | psql -t -A -U outline outline)
|
||||||
UPDATE attachments SET "userId" = '$USERID_REPLACE' WHERE "userId" = '$USERID_REMOVE';
|
|
||||||
DELETE FROM search_queries WHERE "userId" = '$USERID_REMOVE';
|
if [ ! "$(echo "$USERIDS" | wc -l)" -gt 1 ]; then
|
||||||
DELETE FROM shares WHERE "userId" = '$USERID_REMOVE';
|
echo "Only one user exists, bailing"
|
||||||
DELETE FROM notification_settings WHERE "userId" = '$USERID_REMOVE';
|
exit 1
|
||||||
DELETE FROM events WHERE "actorId" = '$USERID_REMOVE';
|
fi
|
||||||
DELETE FROM events WHERE "userId" = '$USERID_REMOVE';
|
|
||||||
DELETE FROM users WHERE username = '$1';
|
USERID_NEW=$(echo "$USERIDS" | head -n1)
|
||||||
SQL
|
|
||||||
|
for USERID_OLD in $(echo "$USERIDS" | tail -n+2); do
|
||||||
|
delete_user_by_id "$USERID_OLD" "$USERID_NEW"
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
15
alaconnect.yml
Normal file
15
alaconnect.yml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
authentik:
|
||||||
|
env:
|
||||||
|
OIDC_CLIENT_ID: outline
|
||||||
|
OIDC_AUTH_URI: https://authentik.example.com/application/o/authorize/
|
||||||
|
OIDC_TOKEN_URI: https://authentik.example.com/application/o/token/
|
||||||
|
OIDC_USERINFO_URI: https://authentik.example.com/application/o/userinfo/
|
||||||
|
OIDC_DISPLAY_NAME: "Authentik"
|
||||||
|
uncomment:
|
||||||
|
- compose.oidc.yml
|
||||||
|
- OIDC_ENABLED
|
||||||
|
- OIDC_USERNAME_CLAIM
|
||||||
|
- OIDC_SCOPES
|
||||||
|
- SECRET_OIDC_CLIENT_SECRET_VERSION
|
||||||
|
shared_secrets:
|
||||||
|
outline_secret: oidc_client_secret
|
22
compose.aws.yml
Normal file
22
compose.aws.yml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
secrets:
|
||||||
|
- aws_secret_key
|
||||||
|
environment:
|
||||||
|
- AWS_ACCESS_KEY_ID
|
||||||
|
- AWS_REGION
|
||||||
|
- AWS_S3_ACL
|
||||||
|
- AWS_S3_FORCE_PATH_STYLE
|
||||||
|
- AWS_S3_UPLOAD_BUCKET_NAME
|
||||||
|
- AWS_S3_UPLOAD_BUCKET_URL
|
||||||
|
- AWS_S3_UPLOAD_MAX_SIZE
|
||||||
|
- AWS_SDK_LOAD_CONFIG=0
|
||||||
|
- AWS_SECRET_KEY_FILE=/run/secrets/aws_secret_key
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
aws_secret_key:
|
||||||
|
name: ${STACK_NAME}_aws_secret_key_${SECRET_AWS_SECRET_KEY_VERSION}
|
||||||
|
external: true
|
13
compose.local.yml
Normal file
13
compose.local.yml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
volumes:
|
||||||
|
- storage-data:/var/lib/outline/data
|
||||||
|
environment:
|
||||||
|
- FILE_STORAGE
|
||||||
|
- FILE_STORAGE_UPLOAD_MAX_SIZE
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
storage-data:
|
18
compose.smtp.yml
Normal file
18
compose.smtp.yml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
secrets:
|
||||||
|
- smtp_password
|
||||||
|
environment:
|
||||||
|
- SMTP_HOST
|
||||||
|
- SMTP_PORT
|
||||||
|
- SMTP_USERNAME
|
||||||
|
- SMTP_FROM_EMAIL
|
||||||
|
- SMTP_REPLY_EMAIL
|
||||||
|
- SMTP_TLS_CIPHERS
|
||||||
|
- SMTP_SECURE
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
smtp_password:
|
||||||
|
external: true
|
||||||
|
name: ${STACK_NAME}_smtp_password_${SECRET_SMTP_PASSWORD_VERSION}
|
52
compose.yml
52
compose.yml
@ -6,9 +6,8 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- backend
|
- backend
|
||||||
- proxy
|
- proxy
|
||||||
image: outlinewiki/outline:0.64.4
|
image: outlinewiki/outline:0.82.0
|
||||||
secrets:
|
secrets:
|
||||||
- aws_secret_key
|
|
||||||
- db_password
|
- db_password
|
||||||
- secret_key
|
- secret_key
|
||||||
- utils_secret
|
- utils_secret
|
||||||
@ -17,15 +16,7 @@ services:
|
|||||||
target: /docker-entrypoint.sh
|
target: /docker-entrypoint.sh
|
||||||
mode: 0555
|
mode: 0555
|
||||||
environment:
|
environment:
|
||||||
- AWS_ACCESS_KEY_ID
|
- FILE_STORAGE
|
||||||
- AWS_REGION
|
|
||||||
- AWS_S3_ACL
|
|
||||||
- AWS_S3_FORCE_PATH_STYLE
|
|
||||||
- AWS_S3_UPLOAD_BUCKET_NAME
|
|
||||||
- AWS_S3_UPLOAD_BUCKET_URL
|
|
||||||
- AWS_S3_UPLOAD_MAX_SIZE
|
|
||||||
- AWS_SDK_LOAD_CONFIG=0
|
|
||||||
- AWS_SECRET_KEY_FILE=/run/secrets/aws_secret_key
|
|
||||||
- DATABASE_PASSWORD_FILE=/run/secrets/db_password
|
- DATABASE_PASSWORD_FILE=/run/secrets/db_password
|
||||||
- FORCE_HTTPS=true
|
- FORCE_HTTPS=true
|
||||||
- PGSSLMODE=disable
|
- PGSSLMODE=disable
|
||||||
@ -43,29 +34,44 @@ services:
|
|||||||
- "traefik.http.routers.${STACK_NAME}.rule=Host(`${DOMAIN}`${EXTRA_DOMAINS})"
|
- "traefik.http.routers.${STACK_NAME}.rule=Host(`${DOMAIN}`${EXTRA_DOMAINS})"
|
||||||
- "traefik.http.routers.${STACK_NAME}.entrypoints=web-secure"
|
- "traefik.http.routers.${STACK_NAME}.entrypoints=web-secure"
|
||||||
- "traefik.http.routers.${STACK_NAME}.tls.certresolver=${LETS_ENCRYPT_ENV}"
|
- "traefik.http.routers.${STACK_NAME}.tls.certresolver=${LETS_ENCRYPT_ENV}"
|
||||||
- "coop-cloud.${STACK_NAME}.version=0.4.1+0.64.4"
|
- "coop-cloud.${STACK_NAME}.version=2.9.0+0.82.0"
|
||||||
## Redirect from EXTRA_DOMAINS to DOMAIN
|
# Redirect from EXTRA_DOMAINS to DOMAIN
|
||||||
#- "traefik.http.routers.${STACK_NAME}.middlewares=${STACK_NAME}-redirect"
|
- "traefik.http.routers.${STACK_NAME}.middlewares=${STACK_NAME}-redirect"
|
||||||
#- "traefik.http.middlewares.${STACK_NAME}-redirect.headers.SSLForceHost=true"
|
- "traefik.http.middlewares.${STACK_NAME}-redirect.headers.SSLForceHost=true"
|
||||||
#- "traefik.http.middlewares.${STACK_NAME}-redirect.headers.SSLHost=${DOMAIN}"
|
- "traefik.http.middlewares.${STACK_NAME}-redirect.headers.SSLHost=${DOMAIN}"
|
||||||
|
- "coop-cloud.${STACK_NAME}.timeout=${TIMEOUT:-80}"
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
image: redis:6.2.6
|
image: redis:7.4.2
|
||||||
networks:
|
networks:
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:11
|
image: postgres:17.3
|
||||||
networks:
|
networks:
|
||||||
- backend
|
- backend
|
||||||
secrets:
|
secrets:
|
||||||
- db_password
|
- db_password
|
||||||
|
configs:
|
||||||
|
- source: db_entrypoint
|
||||||
|
target: /docker-entrypoint.sh
|
||||||
|
mode: 0555
|
||||||
|
- source: pg_backup
|
||||||
|
target: /pg_backup.sh
|
||||||
|
mode: 0555
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: outline
|
POSTGRES_DB: outline
|
||||||
POSTGRES_PASSWORD_FILE: /run/secrets/db_password
|
POSTGRES_PASSWORD_FILE: /run/secrets/db_password
|
||||||
POSTGRES_USER: outline
|
POSTGRES_USER: outline
|
||||||
volumes:
|
volumes:
|
||||||
- "postgres_data:/var/lib/postgresql/data"
|
- "postgres_data:/var/lib/postgresql/data"
|
||||||
|
entrypoint: /docker-entrypoint.sh
|
||||||
|
deploy:
|
||||||
|
labels:
|
||||||
|
backupbot.backup: "${ENABLE_BACKUPS:-true}"
|
||||||
|
backupbot.backup.pre-hook: "/pg_backup.sh backup"
|
||||||
|
backupbot.backup.volumes.postgres_data.path: "backup.sql"
|
||||||
|
backupbot.restore.post-hook: '/pg_backup.sh restore'
|
||||||
|
|
||||||
secrets:
|
secrets:
|
||||||
secret_key:
|
secret_key:
|
||||||
@ -74,9 +80,6 @@ secrets:
|
|||||||
utils_secret:
|
utils_secret:
|
||||||
name: ${STACK_NAME}_utils_secret_${SECRET_UTILS_SECRET_VERSION}
|
name: ${STACK_NAME}_utils_secret_${SECRET_UTILS_SECRET_VERSION}
|
||||||
external: true
|
external: true
|
||||||
aws_secret_key:
|
|
||||||
name: ${STACK_NAME}_aws_secret_key_${SECRET_AWS_SECRET_KEY_VERSION}
|
|
||||||
external: true
|
|
||||||
db_password:
|
db_password:
|
||||||
name: ${STACK_NAME}_db_password_${SECRET_DB_PASSWORD_VERSION}
|
name: ${STACK_NAME}_db_password_${SECRET_DB_PASSWORD_VERSION}
|
||||||
external: true
|
external: true
|
||||||
@ -94,3 +97,10 @@ configs:
|
|||||||
name: ${STACK_NAME}_app_entrypoint_${APP_ENTRYPOINT_VERSION}
|
name: ${STACK_NAME}_app_entrypoint_${APP_ENTRYPOINT_VERSION}
|
||||||
file: entrypoint.sh.tmpl
|
file: entrypoint.sh.tmpl
|
||||||
template_driver: golang
|
template_driver: golang
|
||||||
|
db_entrypoint:
|
||||||
|
name: ${STACK_NAME}_db_entrypoint_${DB_ENTRYPOINT_VERSION}
|
||||||
|
file: entrypoint.postgres.sh.tmpl
|
||||||
|
template_driver: golang
|
||||||
|
pg_backup:
|
||||||
|
name: ${STACK_NAME}_pg_backup_${PG_BACKUP_VERSION}
|
||||||
|
file: pg_backup.sh
|
||||||
|
44
entrypoint.postgres.sh.tmpl
Normal file
44
entrypoint.postgres.sh.tmpl
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
MIGRATION_MARKER=$PGDATA/migration_in_progress
|
||||||
|
OLDDATA=$PGDATA/old_data
|
||||||
|
NEWDATA=$PGDATA/new_data
|
||||||
|
|
||||||
|
if [ -e $MIGRATION_MARKER ]; then
|
||||||
|
echo "FATAL: migration was started but did not complete in a previous run. manual recovery necessary"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f $PGDATA/PG_VERSION ]; then
|
||||||
|
DATA_VERSION=$(cat $PGDATA/PG_VERSION)
|
||||||
|
|
||||||
|
if [ -n "$DATA_VERSION" -a "$PG_MAJOR" != "$DATA_VERSION" ]; then
|
||||||
|
echo "postgres data version $DATA_VERSION found, but need $PG_MAJOR. Starting migration"
|
||||||
|
echo "Installing postgres $DATA_VERSION"
|
||||||
|
sed -i "s/$/ $DATA_VERSION/" /etc/apt/sources.list.d/pgdg.list
|
||||||
|
apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
postgresql-$DATA_VERSION \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
echo "shuffling around"
|
||||||
|
gosu postgres mkdir $OLDDATA $NEWDATA
|
||||||
|
chmod 700 $OLDDATA $NEWDATA
|
||||||
|
mv $PGDATA/* $OLDDATA/ || true
|
||||||
|
touch $MIGRATION_MARKER
|
||||||
|
echo "running initdb"
|
||||||
|
# abuse entrypoint script for initdb by making server error out
|
||||||
|
gosu postgres bash -c "export PGDATA=$NEWDATA ; /usr/local/bin/docker-entrypoint.sh --invalid-arg || true"
|
||||||
|
echo "running pg_upgrade"
|
||||||
|
cd /tmp
|
||||||
|
gosu postgres pg_upgrade --link -b /usr/lib/postgresql/$DATA_VERSION/bin -d $OLDDATA -D $NEWDATA -U $POSTGRES_USER
|
||||||
|
cp $OLDDATA/pg_hba.conf $NEWDATA/
|
||||||
|
mv $NEWDATA/* $PGDATA
|
||||||
|
rm -rf $OLDDATA
|
||||||
|
rmdir $NEWDATA
|
||||||
|
rm $MIGRATION_MARKER
|
||||||
|
echo "migration complete"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
/usr/local/bin/docker-entrypoint.sh postgres
|
@ -1,6 +1,12 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
{{ if eq (env "FILE_STORAGE") "s3" }}
|
||||||
export AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_key)
|
export AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_key)
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if eq (env "SMTP_ENABLED") "1" }}
|
||||||
|
export SMTP_PASSWORD=$(cat /run/secrets/smtp_password)
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
{{ if eq (env "OIDC_ENABLED") "1" }}
|
{{ if eq (env "OIDC_ENABLED") "1" }}
|
||||||
export OIDC_CLIENT_SECRET=$(cat /run/secrets/oidc_client_secret)
|
export OIDC_CLIENT_SECRET=$(cat /run/secrets/oidc_client_secret)
|
||||||
@ -15,5 +21,7 @@ export SECRET_KEY=$(cat /run/secrets/secret_key)
|
|||||||
export DATABASE_PASSWORD=$(cat /run/secrets/db_password)
|
export DATABASE_PASSWORD=$(cat /run/secrets/db_password)
|
||||||
export DATABASE_URL="postgres://outline:${DATABASE_PASSWORD}@${STACK_NAME}_db:5432/outline"
|
export DATABASE_URL="postgres://outline:${DATABASE_PASSWORD}@${STACK_NAME}_db:5432/outline"
|
||||||
|
|
||||||
/usr/local/bin/yarn db:migrate --env=production-ssl-disabled
|
if [ ! "$1" = "-e" ]; then
|
||||||
/usr/local/bin/yarn start "$@"
|
/usr/local/bin/yarn db:migrate --env=production-ssl-disabled
|
||||||
|
/usr/local/bin/yarn start "$@"
|
||||||
|
fi
|
||||||
|
34
pg_backup.sh
Normal file
34
pg_backup.sh
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
BACKUP_FILE='/var/lib/postgresql/data/backup.sql'
|
||||||
|
|
||||||
|
function backup {
|
||||||
|
export PGPASSWORD=$(cat $POSTGRES_PASSWORD_FILE)
|
||||||
|
pg_dump -U ${POSTGRES_USER} ${POSTGRES_DB} > $BACKUP_FILE
|
||||||
|
}
|
||||||
|
|
||||||
|
function restore {
|
||||||
|
cd /var/lib/postgresql/data/
|
||||||
|
restore_config(){
|
||||||
|
# Restore allowed connections
|
||||||
|
cat pg_hba.conf.bak > pg_hba.conf
|
||||||
|
su postgres -c 'pg_ctl reload'
|
||||||
|
}
|
||||||
|
# Don't allow any other connections than local
|
||||||
|
cp pg_hba.conf pg_hba.conf.bak
|
||||||
|
echo "local all all trust" > pg_hba.conf
|
||||||
|
su postgres -c 'pg_ctl reload'
|
||||||
|
trap restore_config EXIT INT TERM
|
||||||
|
|
||||||
|
# Recreate Database
|
||||||
|
psql -U ${POSTGRES_USER} -d postgres -c "DROP DATABASE ${POSTGRES_DB} WITH (FORCE);"
|
||||||
|
createdb -U ${POSTGRES_USER} ${POSTGRES_DB}
|
||||||
|
psql -U ${POSTGRES_USER} -d ${POSTGRES_DB} -1 -f $BACKUP_FILE
|
||||||
|
|
||||||
|
trap - EXIT INT TERM
|
||||||
|
restore_config
|
||||||
|
}
|
||||||
|
|
||||||
|
$@
|
4
release/2.0.0+0.74.0
Normal file
4
release/2.0.0+0.74.0
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
Due to the introduction of local storage, you need to adapt your config to continue using S3 storage. Just add the following lines to your config:
|
||||||
|
|
||||||
|
FILE_STORAGE=s3
|
||||||
|
COMPOSE_FILE="$COMPOSE_FILE:compose.aws.yml"
|
1
release/2.9.1+0.82.0
Normal file
1
release/2.9.1+0.82.0
Normal file
@ -0,0 +1 @@
|
|||||||
|
Fixes a problem where deployments were consistently giving a timeout response even though they were successful
|
Loading…
x
Reference in New Issue
Block a user