Commit 24adf4f8 authored by Steve Weber's avatar Steve Weber

save

parent 7fb91064
Environments
============
We provide a few ways to get you up and running.
Use a python virtual environment (venv), Docker or a mix of them.
local environment
-----------------
packages
```
# install your local python package or grab it from python.org
command -v apt && sudo apt install node
command -v yum && sudo yum install node
```
config
```
# modify as needed (env)
mkdir .local
cp .devops/template/env_local_venv env
cp .devops/template/start_runserver.sh start.sh
openssl req -x509 -newkey rsa:4096 -nodes -out .local/server.crt -keyout .local/server.key -days 36500 -subj "/C=CA/ST=TEST/L=TEST/O=TEST/CN=localhost"
```
run
```
./init.sh
./start.sh
```
run extra (optional)
```
source ./env.sh
```
clean
```
sudo git clean -Xdf
```
Docker
------
requirements
```
install docker...
```
config
```
mkdir .local || true
mkdir .local/nginx || true
openssl req -x509 -newkey rsa:4096 -nodes -out .local/nginx/server.crt -keyout .local/nginx/server.key -days 36500 -subj "/C=CA/ST=TEST/L=TEST/O=TEST/CN=localhost"
```
start
```
docker-compose up
# find port the webproxy is binded on:
docker-compose port nginx 443
echo "https://127.0.0.1:$(docker-compose port nginx 443 | cut -d: -f2)"
```
packages
```
sudo apt install docker-compose
sudo usermod -a -G docker $USER
# reboot or use `su $USER` so `id` showes you in docker
```
config
```
# modify as needed (env)
mkdir .local
cp .devops/template/env_docker env
cp .devops/template/requirements_docker.sh requirements.sh
cp .devops/template/requirements_docker.txt requirements.txt
cp .devops/template/settings_docker.py settings.py
cp .devops/template/initdb_remote_postgresql.sh initdb.sh
cp .devops/template/start_gunicorn.sh start.sh
openssl req -x509 -newkey rsa:4096 -nodes -out .local/server.crt -keyout .local/server.key -days 36500 -subj "/C=CA/ST=TEST/L=TEST/O=TEST/CN=localhost"
```
run
```
# start dockers and run in background
docker-compose up --detach
# tail logs in another window (optional)
docker-compose logs -f -t
# find port the webproxy is binded on:
docker-compose port nginx 443
echo "https://127.0.0.1:$(docker-compose port nginx 443 | cut -d: -f2)"
```
run extra (optional)
```
# if you want a shell in a docker
docker-compose run app /bin/bash
# this shell is a new container; it shares storage volumes but not system files
# from here you can use manage.py to update the database and static files
source ./env.sh
python manage.py createadmin
python manage.py migrate
python manage.py collectstatic --noinput
python manage.py makemigrations
```
rebuild
```
docker-compose up --build
```
clean
```
docker-compose down
sudo git clean -Xdf
```
FAQ
===
How can I set user as admin:
```
python manage.py shell
...
from django.contrib.auth.models import User
user = User.objects.get(username="<the_user_id>")
user.is_staff = True
user.is_admin = True
user.is_superuser = True
user.save()
```
email cronjobs
```
* * * * * (/path/to/your/python /path/to/your/manage.py send_mail >> ~/cron_mail.log 2>&1)
0,20,40 * * * * (/path/to/your/python /path/to/your/manage.py retry_deferred >> ~/cron_mail_deferred.log 2>&1)
To prevent from the database filling up with the message log, you should clean it up every once in a while.
To remove successful log entries older than a week, add this to a cron job file or equivalent:
0 0 * * * (/path/to/your/python /path/to/your/manage.py purge_mail_log 7 >> ~/cron_mail_purge.log 2>&1)
```
!#/bin/bash
while :
do sleep 6h & wait $${!}
# do somthing with config....
nginx -s reload
....
# wait until new cert
done
nginx -g daemon off;
variables:
POSTGRES_DB: test
POSTGRES_USER: pguser
POSTGRES_PASSWORD: pgpasswd
POSTGRES_ENCODING: UTF8
before_script:
- sed -i 's|http://mirrors.us.kernel.org|http://mirror.csclub.uwaterloo.ca|g' /etc/apt/sources.list
- sed -i 's|http://ca.archive.ubuntu.com|http://mirror.csclub.uwaterloo.ca|g' /etc/apt/sources.list
- sed -i 's|http://security.ubuntu.com|http://mirror.csclub.uwaterloo.ca|g' /etc/apt/sources.list
- apt update > /dev/null
- apt install sudo
- bash init.sh
- source .env/bin/activate
- cp .gitlab-ci_settings_local.py core/settings_local.py
- echo "AUTH_LDAP_BIND_DN = 'CN=Math Faculty SALT (mfsaltad),OU=Users,OU=Math,OU=Academic,OU=People,DC=NEXUS,DC=UWATERLOO,DC=CA'" >> core/settings_local.py
- echo "AUTH_LDAP_BIND_PASSWORD = '$AUTH_LDAP_BIND_PASSWORD'" >> core/settings_local.py
- ./manage.py migrate
- ./manage.py collectstatic
test_all:
image: ubuntu:18.04
services:
- postgres:11.2
script:
- ./manage.py test core.tests
from .settings import * # pylint: disable=unused-wildcard-import
import ldap
from django_auth_ldap.config import LDAPSearch
from django_auth_ldap.config import GroupOfNamesType, ActiveDirectoryGroupType
DEBUG = True
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
INSTALLED_APPS += [
'django_summernote',
'django_auth_ldap',
'import_export',
'core',
]
SUMMERNOTE_CONFIG = {
'disable_attachment': True,
}
DATABASES['default'] = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
STATIC_ROOT = os.path.join(BASE_DIR, 'static_collected')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'core.context_processors.session',
],
},
},
]
# LDAP
# ========================
# Keep ModelBackend around for backdoor
AUTHENTICATION_BACKENDS = (
'django_auth_ldap.backend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend',
)
# Baseline configuration.
# if just auth can use: uwldap.uwaterloo.ca
AUTH_LDAP_SERVER_URI = "ldap://nexus.uwaterloo.ca"
AUTH_LDAP_START_TLS = True
AUTH_LDAP_CONNECTION_OPTIONS = {
ldap.OPT_REFERRALS: 0 # pylint: disable=no-member
}
AUTH_LDAP_BIND_DN = "CN=Math Faculty SALT (mfsaltad),OU=Users,OU=Math,OU=Academic,OU=People,DC=NEXUS,DC=UWATERLOO,DC=CA"
# 3edcC
AUTH_LDAP_BIND_PASSWORD = 'xxxxx'
AUTH_LDAP_USER_SEARCH = LDAPSearch("dc=nexus,dc=uwaterloo,dc=ca", ldap.SCOPE_SUBTREE, "(&(objectclass=user)(sAMAccountName=%(user)s))")
#AUTH_LDAP_USER_DN_TEMPLATE = "NEXUS\\%(user)s"
#AUTH_LDAP_BIND_AS_AUTHENTICATING_USER = True
AUTH_LDAP_USER_ATTR_MAP = {"first_name": "givenName", "last_name": "sn", 'email': 'mail', 'department': 'department'}
#AUTH_LDAP_GROUP_SEARCH = LDAPSearch("dc=nexus,dc=uwaterloo,dc=ca", ldap.SCOPE_SUBTREE, "(objectClass=group)")
AUTH_LDAP_GROUP_SEARCH = LDAPSearch("ou=grouper,ou=watiam,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca", ldap.SCOPE_SUBTREE, "(objectClass=group)")
AUTH_LDAP_GROUP_TYPE = ActiveDirectoryGroupType()
# populate django user groups with what is found in ldap
# would be best if we could just whitelist groups because this list is large
#AUTH_LDAP_MIRROR_GROUPS = True
AUTH_LDAP_MIRROR_GROUPS = ['Math_GG_Courses_Admin']
# sadly the group needs to be speeled out in full
# self._user.ldap_user.group_names
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
#'is_active': 'cn=active,ou=django,ou=groups,dc=example,dc=com',
#'is_staff': 'Math_G_Grouper_Group_s8weber',
#'is_active': 'cn=Math_GG_Mathprereg_Staff,ou=grouper,ou=watiam,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca',
'is_staff': 'cn=Math_GG_Courses_Admin,ou=grouper,ou=watiam,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca',
#'is_active': 'cn=math_g_netapp_oum,ou=applications,ou=groups,ou=math,ou=academic,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca',
#'is_staff': 'cn=math_g_netapp_oum,ou=applications,ou=groups,ou=math,ou=academic,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca'
'is_superuser': 'cn=Math_GG_Courses_Admin,ou=grouper,ou=watiam,ou=security groups,dc=nexus,dc=uwaterloo,dc=ca',
}
## ADFS
# ========================
## idp?
## https://idp.uwaterloo.ca
'''
AUTHENTICATION_BACKENDS = (
'django_auth_adfs.backend.AdfsAuthCodeBackend',
)
INSTALLED_APPS.extend([
'core',
'django_auth_adfs',
])
AUTH_ADFS = {
'SERVER': 'adfs.uwaterloo.ca',
'CLIENT_ID': 'your-configured-client-id',
'RELYING_PARTY_ID': 'your-adfs-RPT-name',
# Make sure to read the documentation about the AUDIENCE setting
# when you configured the identifier as a URL!
'AUDIENCE': 'microsoft:identityserver:your-RelyingPartyTrust-identifier',
# https://django-auth-adfs.readthedocs.io/en/latest/settings_ref.html#ca-bundle
# default is True to use the os bundle
# False to disable the check
# or path to custom bundle
#'CA_BUNDLE': '/path/to/ca-bundle.pem',
'CA_BUNDLE': False,
'CLAIM_MAPPING': {'first_name': 'given_name',
'last_name': 'family_name',
'email': 'email'},
}
# path('oauth2/', include('django_auth_adfs.urls')),
LOGIN_URL = 'django_auth_adfs:login'
LOGIN_REDIRECT_URL = '/'
MIDDLEWARE.extend([
# With this you can force a user to login without using
# the LoginRequiredMixin on every view class
#
# You can specify URLs for which login is not enforced by
# specifying them in the LOGIN_EXEMPT_URLS setting.
'django_auth_adfs.middleware.LoginRequiredMiddleware',
])
'''
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django.request':{
'handlers': ['console'],
'propagate': False,
'level': 'DEBUG',
},
'core.handlers': {
'level': 'DEBUG',
'handlers': ['console']
},
'django_auth_ldap': {
'level': 'DEBUG',
'handlers': ['console'],
},
},
}
# gitlab custom
# ----------------------
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'test',
'USER': 'pguser',
'PASSWORD': 'pgpasswd',
'HOST': 'postgres',
'PORT': '5432',
}
}
#!/usr/bin/env bash
# To activate the env run this helper using:
# source ./env.sh
#DEBUG set -x
set -a
#my_caller_file=${BASH_SOURCE[${#BASH_SOURCE[@]} - 1]}
#ENV_CALLER_DIR=$(cd "$(dirname ".")" && pwd)
# get dir of this file (should work if file was sourced)
ENV_ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source "$ENV_ROOT_DIR/env_base"
test -e "$ENV_ROOT_DIR/env" && source "$ENV_ROOT_DIR/env"
set +a
#!/bin/bash
source ./env.sh
set -exu
# dump
# mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > /some/path/on/your/host/all-databases.sql
INIT_DB_SOURCE_FILE=.devops/setup.sql
# file:///home/s8weber/scm/mathsoclibrary-frontend/backend/models/database/setup.sql
# alpine
if command -v apk; then
apk update
apk add mysql-client
fi
# debian
if command -v apt; then
apt update
apt install -y mysql-client
fi
# redhat
if command -v yum; then
yum install -y mysql-client
fi
# set mysql_native_password
mysql -h$INIT_DB_HOST -uroot -p$MYSQL_ROOT_PASSWORD <<EOF
ALTER USER 'root' IDENTIFIED WITH mysql_native_password BY '$MYSQL_ROOT_PASSWORD';
commit;
EOF
#mysql -uroot -p"$MYSQL_ROOT_PASSWORD" < $INIT_DB_SOURCE_FILE
POSTGRES_DB=pgdb
POSTGRES_USER=pguser
POSTGRES_PASSWORD=pgpasswd
# INITDB_SOURCE_USER=_courses_t01
# INITDB_SOURCE_PASSWORD=xxxxxxxx
# INITDB_SOURCE_HOST=postgresql-t01-ha.math.private.uwaterloo.ca
# INITDB_SOURCE_NAME=courses_t01
DJANGO_DEBUG=1
DJANGO_SECRET_KEY=xxxxxxxx
DJANGO_DB_HOST=db
DJANGO_EMAIL_HOST_USER=email
DJANGO_EMAIL_HOST_PASSWORD=xxxxxxxx
TLS_CERTFILE=.local/server.crt
TLS_KEYFILE=.local/server.key
DJANGO_DEBUG=1
SECRET_KEY=xxSOME_RAMDOM_TEXTxxx
VIRTUAL_ENV=.local/venv
#INITDB_SSH_KEY=~/.ssh/id_ed25516
#INITDB_SSH_HOST=root@courses.math.uwaterloo.ca
#INITDB_SSH_CMD=/srv/webapp/env/bin/python /srv/webapp/src/manage.py dumpdata --exclude=auth.Permission --exclude=contenttypes --all
#!/usr/bin/env bash
python3 ./manage.py loaddata initdb.json
rm initdb.json
#!/usr/bin/env bash
source ./env.sh
set -exu
#INITDB_SOURCE_SSH_KEY='~/.ssh/id_ed25516'
#INITDB_SOURCE_SSH_HOST='root@courses.math.uwaterloo.ca'
#INITDB_SOURCE_SSH_CMD='/srv/webapp/env/bin/python /srv/webapp/src/manage.py dumpdata --exclude=auth.Permission --exclude=contenttypes --all'
# Problem installing fixture '-': Could not load contenttypes.ContentType(pk=1): no such table: django_content_type
# --exclude=auth.Permission --exclude=contenttypes
# --natural-foreign --natural-primary
ssh -i "$INITDB_SOURCE_SSH_KEY" "$INITDB_SOURCE_SSH_HOST" '/srv/webapp/env/bin/python /srv/webapp/src/manage.py dumpdata --exclude=auth.Permission --exclude=contenttypes --all' \
| python ./manage.py loaddata --format=json -
echo "DONE."
#!/bin/bash
source ./env.sh
set -exu
# database to sync from
# INITDB_SOURCE_USER
# INITDB_SOURCE_PASSWORD
# INITDB_SOURCE_HOST
# INITDB_SOURCE_NAME
# database to sync to
# INITDB_USER
# INITDB_PASSWORD
# INITDB_HOST
# INITDB_NAME
# alpine
if command -v apk; then
apk update
apk add postgresql-client
fi
# debian
if command -v apt; then
apt update
apt install -y postgresql-client
fi
# redhat
if command -v yum; then
yum install -y postgresql-client
fi
# Help prevent production database from a wipe!
# test "${DJANGO_DEBUG:-0}" != "1" \
# && echo 'Database sync only allowed when DEBUG=1' \
# && echo '**** TAKE CARE NOT TO WIPE PRODUCTION DATABASE' \
# && exit 1
# Drop all objects in database without dropping the database
# TODO: got to be a nice way to do this. no?
# -c "select 'drop table \"' || tablename || '\" cascade;' from pg_tables where schemaname='public'" \
# -c 'DROP SCHEMA public CASCADE; CREATE SCHEMA public; GRANT ALL ON SCHEMA public TO postgres; GRANT ALL ON SCHEMA public TO public;' \
echo ""
echo "**** WIPE DATABASE $INITDB_HOST/$INITDB_NAME in 5sec****"
sleep 5 # give user 5seconds to ctr+c
PGPASSWORD="$INITDB_PASSWORD" psql --host="$INITDB_HOST" --username="$INITDB_USER" --dbname="$INITDB_NAME" << "EOF"
DO $$
DECLARE
r RECORD;
BEGIN
-- triggers
FOR r IN (SELECT pns.nspname, pc.relname, pt.tgname
FROM pg_trigger pt, pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace AND pc.oid=pt.tgrelid
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pt.tgisinternal=false
) LOOP
EXECUTE format('DROP TRIGGER %I ON %I.%I;',
r.tgname, r.nspname, r.relname);
END LOOP;
-- constraints #1: foreign key
FOR r IN (SELECT pns.nspname, pc.relname, pcon.conname
FROM pg_constraint pcon, pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace AND pc.oid=pcon.conrelid
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pcon.contype='f'
) LOOP
EXECUTE format('ALTER TABLE ONLY %I.%I DROP CONSTRAINT %I;',
r.nspname, r.relname, r.conname);
END LOOP;
-- constraints #2: the rest
FOR r IN (SELECT pns.nspname, pc.relname, pcon.conname
FROM pg_constraint pcon, pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace AND pc.oid=pcon.conrelid
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pcon.contype<>'f'
) LOOP
EXECUTE format('ALTER TABLE ONLY %I.%I DROP CONSTRAINT %I;',
r.nspname, r.relname, r.conname);
END LOOP;
-- indicēs
FOR r IN (SELECT pns.nspname, pc.relname
FROM pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pc.relkind='i'
) LOOP
EXECUTE format('DROP INDEX %I.%I;',
r.nspname, r.relname);
END LOOP;
-- normal and materialised views
FOR r IN (SELECT pns.nspname, pc.relname
FROM pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pc.relkind IN ('v', 'm')
) LOOP
EXECUTE format('DROP VIEW %I.%I;',
r.nspname, r.relname);
END LOOP;
-- tables
FOR r IN (SELECT pns.nspname, pc.relname
FROM pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pc.relkind='r'
) LOOP
EXECUTE format('DROP TABLE %I.%I;',
r.nspname, r.relname);
END LOOP;
-- sequences
FOR r IN (SELECT pns.nspname, pc.relname
FROM pg_class pc, pg_namespace pns
WHERE pns.oid=pc.relnamespace
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
AND pc.relkind='S'
) LOOP
EXECUTE format('DROP SEQUENCE %I.%I;',
r.nspname, r.relname);
END LOOP;
-- functions / procedures
FOR r IN (SELECT pns.nspname, pp.proname, pp.oid
FROM pg_proc pp, pg_namespace pns
WHERE pns.oid=pp.pronamespace
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
) LOOP
EXECUTE format('DROP FUNCTION %I.%I(%s);',
r.nspname, r.proname,
pg_get_function_identity_arguments(r.oid));
END LOOP;
-- non-default schemata we own; assume to be run by a not-superuser
FOR r IN (SELECT pns.nspname
FROM pg_namespace pns, pg_roles pr
WHERE pr.oid=pns.nspowner
AND pns.nspname NOT IN ('information_schema', 'pg_catalog', 'pg_toast', 'public')
AND pr.rolname=current_user
) LOOP
EXECUTE format('DROP SCHEMA %I;', r.nspname);
END LOOP;
-- voila
RAISE NOTICE 'Database cleared!';
END; $$;
EOF
echo ""
echo "**** SYNC DATABASE FROM $INITDB_SOURCE_HOST/$INITDB_SOURCE_NAME****"
PGPASSWORD="$INITDB_SOURCE_PASSWORD" pg_dump \
--host="$INITDB_SOURCE_HOST" --username="$INITDB_SOURCE_USER" --dbname="$INITDB_SOURCE_NAME" \
--no-owner --clean \
| PGPASSWORD="$INITDB_PASSWORD" psql --host="$INITDB_HOST" --username="$INITDB_USER" --dbname="$INITDB_NAME"
echo "DONE."
#!/usr/bin/env bash
# alpine
if command -v apk; then
apk update
apk add build-base gcc openldap-dev libsasl postgresql-dev libressl-dev
fi
# debian
if command -v apt; then
apt update
apt install -y build-essential libldap2-dev libsasl2-dev libpq-dev libssl-dev python3-dev
fi
# redhat
if command -v yum; then
yum install -y gcc gcc-c++ make
fi
-r requirements_base.txt
django-auth-ldap==1.7.*
python-ldap==3.2.*
psycopg2-binary==2.*
gunicorn
#uvicorn
# needed for debugging
ptvsd==4.3.2