diff --git a/.env b/.env index 901ba2af..08d441e2 100644 --- a/.env +++ b/.env @@ -1 +1,9 @@ CI_PROJECT_DIR=. +SCRAM_PEERING_IFACE=eth1 +SCRAM_V4_SUBNET=10.124.0.0/24 +SCRAM_V4_GATEWAY=10.124.0.1 +SCRAM_V4_ADDRESS=10.124.0.4 +SCRAM_V6_SUBNET=2004:600:202:9::8/126 +SCRAM_V6_GATEWAY=2004:600:202:9::9 +SCRAM_V6_ADDRESS=2004:600:202:9::a +HOSTNAME=$(hostname) diff --git a/.envs/.production/.django b/.envs/.production/.django new file mode 100644 index 00000000..45f255c9 --- /dev/null +++ b/.envs/.production/.django @@ -0,0 +1,35 @@ +# General +# ------------------------------------------------------------------------------ +#DJANGO_READ_DOT_ENV_FILE=True +#DJANGO_SETTINGS_MODULE=config.settings.production +#DJANGO_SECRET_KEY= +#DJANGO_ADMIN_URL=admin/ +#DJANGO_ALLOWED_HOSTS= +# Security +# ------------------------------------------------------------------------------ +# TIP: better off using DNS, however, redirect is OK too +#DJANGO_SECURE_SSL_REDIRECT=False + +# Email +# ------------------------------------------------------------------------------ +#DJANGO_SERVER_EMAIL=test@test.com + +# Gunicorn +# ------------------------------------------------------------------------------ +#WEB_CONCURRENCY=4 + +# Redis +# ------------------------------------------------------------------------------ +#REDIS_URL=redis://redis:6379/0 + +#SCRAM_AUTH_METHOD=oidc +#SCRAM_AUTH_METHOD=local +#OIDC_OP_JWKS_ENDPOINT= +#OIDC_OP_AUTHORIZATION_ENDPOINT= +#OIDC_OP_TOKEN_ENDPOINT= +#OIDC_OP_USER_ENDPOINT= +#OIDC_RP_SIGN_ALGO=RS256 + + + +#OIDC_RP_CLIENT_ID= diff --git a/.envs/.production/.postgres b/.envs/.production/.postgres new file mode 100644 index 00000000..78e1b46b --- /dev/null +++ b/.envs/.production/.postgres @@ -0,0 +1,7 @@ +# PostgreSQL +# ------------------------------------------------------------------------------ +#POSTGRES_HOST=postgres +#POSTGRES_PORT=5432 +#POSTGRES_DB=scram +#POSTGRES_USER=scram +#POSTGRES_PASSWORD=scram diff --git a/.envs/.production/.translator b/.envs/.production/.translator new file mode 100644 index 00000000..7294fa2e --- /dev/null +++ b/.envs/.production/.translator @@ -0,0 +1,4 @@ +# Translator +# ------------------------------------------------------------------------------ +#TRANSLATOR_HOSTNAME=localhost +#TRANSLATOR_URL="ws://django:5000/ws/route_manager/translator_block/" diff --git a/.gitignore b/.gitignore index ed35d0c4..a9af3522 100644 --- a/.gitignore +++ b/.gitignore @@ -7,11 +7,6 @@ __pycache__/ # Ignore prod nginx files so git doesnt think we've locally added new files when deploying templates with ansible compose/production/nginx -.idea/ -.idea/* -.idea/scram.iml -*.iml - # C extensions *.so @@ -63,6 +58,9 @@ staticfiles/ # Sphinx documentation docs/_build/ +docs/api +docs/*.db + # PyBuilder target/ @@ -162,6 +160,7 @@ typings/ !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json +*.code-workspace # Provided default Pycharm Run/Debug Configurations should be tracked by git @@ -172,7 +171,13 @@ typings/ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 -.idea/* +# Allow Debugger Files +/.idea/runConfigurations/*.xml +/.idea/inspectionProfiles +!.idea/runConfigurations/*_Debugger.xml + +.idea/scram.iml +*.iml # User-specific stuff: .idea/**/workspace.xml @@ -334,6 +339,7 @@ scram/media/ .ipython/ +# envs are now stored in compose file .envs/* !.envs/.local/ compose.override.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ef651524..435a4812 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -7,7 +7,6 @@ include: stages: - lint - test -- cleanup variables: POSTGRES_USER: scram @@ -28,6 +27,8 @@ pytest: image: docker:24.0.6-dind services: - docker:dind + variables: + POSTGRES_ENABLED: 1 before_script: - apk add make - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY @@ -38,12 +39,18 @@ pytest: - make migrate - make run script: + - export COMPOSE_PROJECT_NAME=$CI_PIPELINE_ID - make coverage.xml artifacts: reports: coverage_report: coverage_format: cobertura path: coverage.xml + after_script: + - export COMPOSE_PROJECT_NAME=$CI_PIPELINE_ID + - make stop + - make clean + gemnasium-dependency_scanning: variables: @@ -59,17 +66,3 @@ code_quality_html: sast: stage: test - -final_clean: - image: docker:24.0.6-dind - services: - - docker:dind - before_script: - - apk add make - - export COMPOSE_PROJECT_NAME=$CI_PIPELINE_ID - stage: cleanup - rules: - - when: always # run even if something failed - script: - - make stop - - make clean diff --git a/.idea/runConfigurations/Django_Debugger.xml b/.idea/runConfigurations/Django_Debugger.xml new file mode 100644 index 00000000..83810494 --- /dev/null +++ b/.idea/runConfigurations/Django_Debugger.xml @@ -0,0 +1,17 @@ + + + + + diff --git a/.idea/runConfigurations/Translator_Debugger.xml b/.idea/runConfigurations/Translator_Debugger.xml new file mode 100644 index 00000000..91e42d24 --- /dev/null +++ b/.idea/runConfigurations/Translator_Debugger.xml @@ -0,0 +1,17 @@ + + + + + diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..0f7145d2 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,38 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Django Debugger", + "type": "debugpy", + "request": "attach", + "connect": { + "host": "0.0.0.0", + "port": 56780 + }, + "pathMappings": [ + { + "localRoot": "${workspaceFolder}", + "remoteRoot": "/app" + } + ] + }, + { + "name": "Translator Debugger", + "type": "debugpy", + "request": "attach", + "connect": { + "host": "0.0.0.0", + "port": 56781 + }, + "pathMappings": [ + { + "localRoot": "${workspaceFolder}/translator", + "remoteRoot": "/app" + } + ] + } + ] +} diff --git a/Makefile b/Makefile index 4a83c796..082c6f91 100644 --- a/Makefile +++ b/Makefile @@ -22,28 +22,28 @@ compose.override.yml: ## behave-all: runs behave inside the containers against all of your features .Phony: behave-all behave-all: compose.override.yml - @docker compose run django coverage run -a manage.py behave --no-input --simple + @podman-compose run django coverage run -a manage.py behave --no-input --simple ## behave: runs behave inside the containers against a specific feature (append FEATURE=feature_name_here) .Phony: behave behave: compose.override.yml - @docker compose run django python manage.py behave --no-input --simple -i $(FEATURE) + @podman-compose run django python manage.py behave --no-input --simple -i $(FEATURE) ## behave-translator .Phony: behave-translator behave-translator: compose.override.yml - @docker compose exec -T translator /usr/local/bin/behave /app/acceptance/features + @podman-compose exec -T translator /usr/local/bin/behave /app/acceptance/features ## build: rebuilds all your containers or a single one if CONTAINER is specified .Phony: build build: compose.override.yml - @docker compose up -d --no-deps --build $(CONTAINER) - @docker compose restart $(CONTAINER) + @podman-compose up -d --no-deps --build $(CONTAINER) + @podman-compose restart $(CONTAINER) ## coverage.xml: generate coverage from test runs coverage.xml: pytest behave-all behave-translator - @docker compose run django coverage report - @docker compose run django coverage xml + @podman-compose run django coverage report + @podman-compose run django coverage xml ## ci-test: runs all tests just like Gitlab CI does .Phony: ci-test @@ -52,18 +52,18 @@ ci-test: | toggle-local build migrate run coverage.xml ## clean: remove local containers and volumes .Phony: clean clean: compose.override.yml - @docker compose rm -f -s - @docker volume prune -f + @podman-compose rm -f -s + @podman volume prune -f ## collect-static: run collect static admin command .Phony: collectstatic collectstatic: compose.override.yml - @docker compose run django python manage.py collectstatic + @podman-compose run django python manage.py collectstatic ## django-addr: get the IP and ephemeral port assigned to docker:8000 .Phony: django-addr django-addr: compose.override.yml - @docker compose port django 8000 + @podman-compose port django 8000 ## django-url: get the URL based on http://$(make django-addr) .Phony: django-url @@ -75,15 +75,15 @@ django-url: compose.override.yml django-open: compose.override.yml @open http://$$(make django-addr) -## down: turn down docker compose stack +## down: turn down podman-compose stack .Phony: down down: compose.override.yml - @docker compose down + @podman-compose down ## exec: executes a given command on a given container (append CONTAINER=container_name_here and COMMAND=command_here) .Phony: exec exec: compose.override.yml - @docker compose exec $(CONTAINER) $(COMMAND) + @podman-compose exec $(CONTAINER) $(COMMAND) # This automatically builds the help target based on commands prepended with a double hashbang ## help: print this help output @@ -95,41 +95,41 @@ help: Makefile ## list-routes: list gobgp routes .Phony: list-routes list-routes: compose.override.yml - @docker compose exec gobgp gobgp global rib -a ipv4 - @docker compose exec gobgp gobgp global rib -a ipv6 + @podman-compose exec gobgp gobgp global rib -a ipv4 + @podman-compose exec gobgp gobgp global rib -a ipv6 ## migrate: makemigrations and then migrate .Phony: migrate migrate: compose.override.yml - @docker compose run django python manage.py makemigrations - @docker compose run django python manage.py migrate + @podman-compose run django python manage.py makemigrations + @podman-compose run django python manage.py migrate ## pass-reset: change admin's password .Phony: pass-reset pass-reset: compose.override.yml - @docker compose run django python manage.py changepassword admin + @podman-compose run django python manage.py changepassword admin ## pytest: runs pytest inside the containers .Phony: pytest pytest: compose.override.yml - @docker compose run django coverage run -m pytest + @podman-compose run django coverage run -m pytest ## run: brings up the containers as described in compose.override.yml .Phony: run run: compose.override.yml - @docker compose up -d + @podman-compose up -d ## stop: turns off running containers .Phony: stop stop: compose.override.yml - @docker compose stop + @podman-compose stop ## tail-log: tail a docker container's logs (append CONTAINER=container_name_here) .Phony: tail-log tail-log: compose.override.yml - @docker compose logs -f $(CONTAINER) + @podman-compose logs -f $(CONTAINER) ## type-check: static type checking .Phony: type-check type-check: compose.override.yml - @docker compose run django mypy scram + @podman-compose run django mypy scram diff --git a/Makefile.docker b/Makefile.docker new file mode 100644 index 00000000..4a83c796 --- /dev/null +++ b/Makefile.docker @@ -0,0 +1,135 @@ +.DEFAULT_GOAL := help + +## toggle-prod: configure make to use the production stack +.Phony: toggle-prod +toggle-prod: + @ln -sf compose.override.production.yml compose.override.yml + +## toggle-local: configure make to use the local stack +.Phony: toggle-local +toggle-local: + @ln -sf compose.override.local.yml compose.override.yml + +# Since toggle-(local|prod) are phony targets, this file is not +# tracked to compare if its "newer" so running another target with +# this as a prereq will not run this target again. That would +# overwrite compose.override.yml back to compose.override.local.yml no +# matter what, which is bad. Phony targets prevents this +## compose.override.yml: creates file compose.override.yml on first run (as a prereq) +compose.override.yml: + @ln -sf compose.override.local.yml compose.override.yml + +## behave-all: runs behave inside the containers against all of your features +.Phony: behave-all +behave-all: compose.override.yml + @docker compose run django coverage run -a manage.py behave --no-input --simple + +## behave: runs behave inside the containers against a specific feature (append FEATURE=feature_name_here) +.Phony: behave +behave: compose.override.yml + @docker compose run django python manage.py behave --no-input --simple -i $(FEATURE) + +## behave-translator +.Phony: behave-translator +behave-translator: compose.override.yml + @docker compose exec -T translator /usr/local/bin/behave /app/acceptance/features + +## build: rebuilds all your containers or a single one if CONTAINER is specified +.Phony: build +build: compose.override.yml + @docker compose up -d --no-deps --build $(CONTAINER) + @docker compose restart $(CONTAINER) + +## coverage.xml: generate coverage from test runs +coverage.xml: pytest behave-all behave-translator + @docker compose run django coverage report + @docker compose run django coverage xml + +## ci-test: runs all tests just like Gitlab CI does +.Phony: ci-test +ci-test: | toggle-local build migrate run coverage.xml + +## clean: remove local containers and volumes +.Phony: clean +clean: compose.override.yml + @docker compose rm -f -s + @docker volume prune -f + +## collect-static: run collect static admin command +.Phony: collectstatic +collectstatic: compose.override.yml + @docker compose run django python manage.py collectstatic + +## django-addr: get the IP and ephemeral port assigned to docker:8000 +.Phony: django-addr +django-addr: compose.override.yml + @docker compose port django 8000 + +## django-url: get the URL based on http://$(make django-addr) +.Phony: django-url +django-url: compose.override.yml + @echo http://$$(make django-addr) + +## django-open: open a browser for http://$(make django-addr) +.Phony: django-open +django-open: compose.override.yml + @open http://$$(make django-addr) + +## down: turn down docker compose stack +.Phony: down +down: compose.override.yml + @docker compose down + +## exec: executes a given command on a given container (append CONTAINER=container_name_here and COMMAND=command_here) +.Phony: exec +exec: compose.override.yml + @docker compose exec $(CONTAINER) $(COMMAND) + +# This automatically builds the help target based on commands prepended with a double hashbang +## help: print this help output +.Phony: help +help: Makefile + @sed -n 's/^##//p' $< + +# TODO: When we move to flowspec this -a flag with change +## list-routes: list gobgp routes +.Phony: list-routes +list-routes: compose.override.yml + @docker compose exec gobgp gobgp global rib -a ipv4 + @docker compose exec gobgp gobgp global rib -a ipv6 + +## migrate: makemigrations and then migrate +.Phony: migrate +migrate: compose.override.yml + @docker compose run django python manage.py makemigrations + @docker compose run django python manage.py migrate + +## pass-reset: change admin's password +.Phony: pass-reset +pass-reset: compose.override.yml + @docker compose run django python manage.py changepassword admin + +## pytest: runs pytest inside the containers +.Phony: pytest +pytest: compose.override.yml + @docker compose run django coverage run -m pytest + +## run: brings up the containers as described in compose.override.yml +.Phony: run +run: compose.override.yml + @docker compose up -d + +## stop: turns off running containers +.Phony: stop +stop: compose.override.yml + @docker compose stop + +## tail-log: tail a docker container's logs (append CONTAINER=container_name_here) +.Phony: tail-log +tail-log: compose.override.yml + @docker compose logs -f $(CONTAINER) + +## type-check: static type checking +.Phony: type-check +type-check: compose.override.yml + @docker compose run django mypy scram diff --git a/PodmanNotes b/PodmanNotes new file mode 100644 index 00000000..0b065a3b --- /dev/null +++ b/PodmanNotes @@ -0,0 +1,161 @@ +Some general notes on using Podman and some shortcuts + +To completely clean the environment + + # Containers + podman rm -af + # Images + podman rmi -af + # Volumes + podman volume rm $(podman volume ls -q) + # Networks + podman network rm $(podman network ls -q) + # Pods + podman pod rm $(podman pod ls -q) + +To correct the postgres error: + + go to the postgres container: + podman exec -it b9a66c459fdf /bin/bash + + will be root. Now: + su - postgres + + postgres@b9a66c459fdf:~$ psql --dbname=scram --username=debug + + SELECT rolname FROM pg_roles; + CREATE ROLE postgres; + ALTER ROLE postgres WITH LOGIN; + + +General install directions with vanilla rRocky8 + + - yum update + - yum -y install podman podman-plugins containernetworking-plugins python3.12.x86_64 python3.12-pip.noarch git make netavark.x86_64 net-tools.x86_64 + - pip3.12 install podman-compose + - pip3.12 install django-anymail + - mkdir -p /root/.config/containers + - cp /etc/containers/registries.conf /root/.config/containers/registries.conf + + - edit /root/.config/containers/registries.conf with 'unqualified-search-registries = ["docker.io"]' + + - git clone https://github.com/esnet-security/SCRAM.git + + - podman-compose up : loads images and containers + - podman-compose down : there will be an error, this gets read for repair + - ./load_database : installs database + - podman-compose down : some containers are running now, close them down + - podman-compose up : final start + + +Container infrastructure mods: + +(1) edit the /etc/containers/containers.conf + +[network] +# Explicitly force "netavark" as to not use the outdated CNI networking, which it would not apply otherwise as long as old stuff is there. +# This may be removed once all containers were upgraded? +# see https://discussion.fedoraproject.org/t/how-to-get-podman-dns-plugin-container-name-resolution-to-work-in-fedora-coreos-36-podman-plugins-podman-dnsname/39493/5?u=rugk + +# official doc: +# Network backend determines what network driver will be used to set up and tear down container networks. +# Valid values are "cni" and "netavark". +# The default value is empty which means that it will automatically choose CNI or netavark. If there are +# already containers/images or CNI networks preset it will choose CNI. +# +# Before changing this value all containers must be stopped otherwise it is likely that +# iptables rules and network interfaces might leak on the host. A reboot will fix this. +# +network_backend = "netavark" + +# List of default capabilities for containers. If it is empty or commented out, +# the default capabilities defined in the container engine will be added. +# +default_capabilities = [ + "CHOWN", + "DAC_OVERRIDE", + "FOWNER", + "FSETID", + "KILL", + "NET_BIND_SERVICE", + "SETFCAP", + "SETGID", + "SETPCAP", + "SETUID", + "SYS_CHROOT", +] + + +(2) registries.conf +Be sure to have the following line in there: +unqualified-search-registries = ["docker.io"] + + + + +----- +If you see the error: + +[nginx] | Error: unable to start container b5756315d00ad99544e38cad2c15c7639275b5e74be5c2e013ce54b5e89ea689: generating dependency graph for container b5756315d00ad99544e38cad2c15c7639275b5e74be5c2e013ce54b5e89ea689: container bedbe4722e03856c0e200e41cee4d6c3d21a61f99c19ba5cf6347c7722f593f5 depends on container f7147489340954021f5a0b15298e2835be2f2d1856588f0ecee240945e430854 not found in input list: no such container + + +[root@fedora-s-1vcpu-2gb-sfo3-01 SCRAM]# podman ps -a +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +d09dbec54b9d localhost/scram_production_postgres:latest postgres 51 seconds ago Up 49 seconds (healthy) 5432/tcp scram_postgres_1 +f71474893409 docker.io/library/redis:5.0 redis-server 51 seconds ago Up 49 seconds (healthy) 6379/tcp scram_redis_1 +021234dbb965 docker.io/jauderho/gobgp:v2.32.0 /usr/local/bin/go... 51 seconds ago Up 46 seconds (starting) 0.0.0.0:41595->50051/tcp, 179/tcp scram_gobgp_1 +bedbe4722e03 localhost/scram_production_django:latest /start 51 seconds ago Up 47 seconds (healthy) scram_django_1 +7ec92daeddbb localhost/scram_translator:latest 50 seconds ago Up 45 seconds scram_translator_1 +b5756315d00a docker.io/library/nginx:1.19 nginx -g daemon o... 50 seconds ago Created 0.0.0.0:80->80/tcp, 0.0.0.0:443->443/tcp, 80/tcp scram_nginx_1 + +This has happened cause it looked for one of the dependent containers and did not find it. You can just restart the container in question. +podman container start b5756315d00a + + +[root@fedora-s-1vcpu-2gb-sfo3-01 SCRAM]# podman-compose systemd -a create-unit + +while in your project type `podman-compose systemd -a register` + +[root@fedora-s-1vcpu-2gb-sfo3-01 SCRAM]# podman-compose systemd -a register + +you can use systemd commands like enable, start, stop, status, cat +all without `sudo` like this: + + systemctl --user enable --now 'podman-compose@scram' + systemctl --user status 'podman-compose@scram' + journalctl --user -xeu 'podman-compose@scram' + +and for that to work outside a session +you might need to run the following command *once* + + sudo loginctl enable-linger 'root' + +you can use podman commands like: + + podman pod ps + podman pod stats 'pod_scram' + podman pod logs --tail=10 -f 'pod_scram' + + + +---------------- + +new diffs +move symlink from system to python 3.12 +use pip3.12 to install pkgs +check out Podman-0.2 +tar xvf envs.tar + +---------------- +nginx container + + nginx config file is located compose/production/nginx/nginx.conf + + The file on the podman server (/etc/scram/ssl/server.crt) will be mounted here /etc/ssl/server.crt + The file on the podman server (/etc/scram/ssl/server.key) will be mounted here /etc/ssl/server.key + + +---------------- + + + diff --git a/SCRAM_PodmanNetworkNotes b/SCRAM_PodmanNetworkNotes new file mode 100644 index 00000000..180a2bf1 --- /dev/null +++ b/SCRAM_PodmanNetworkNotes @@ -0,0 +1,90 @@ +This file contains a number of notes and configuration snippets to help things make more sense. + +In the (installed) SCRAM pod we have the following networks + +NETWORK ID NAME DRIVER +2f259bab93aa podman bridge +399b5841a66a scram_podman_default bridge +e60c71373ed7 scram_podman_peering macvlan + +These networks provide the following functionality: + +scram_podman_default: network for different containers to talk amongst themselves. +scram_podman_peering: network attached to a dedicated interface that will talk to the external router. Only the gobgp container instance will be connnected to this network. + +Notes on each of these follow. + +-------------- scram_podman_default -------------- + +The config for the scram_podman_default looks like: + + "name": "scram_podman_default", + "id": "399b5841a66abf71233de6a1d9b1052f0ed87e9346d2d7eee20c80c3ad890ed7", + "driver": "bridge", + "network_interface": "podman1", + "created": "2024-11-12T06:04:45.494674612Z", + "subnets": [ + { + "subnet": "200:c0:ff:ee::/64", + "gateway": "200:c0:ff:ee::1" + }, + { + "subnet": "10.89.0.0/24", + "gateway": "10.89.0.1" + } + ], + "ipv6_enabled": true, + "internal": false, + "dns_enabled": true, + "labels": { + "com.docker.compose.project": "scram_podman", + "io.podman.compose.project": "scram_podman" + }, + "ipam_options": { + "driver": "host-local" + }, + "containers": {} + + +which will show up as an interface when the pod is running which matches up to the config above: + +podman1: flags=4163 mtu 1500 + inet 10.89.0.1 netmask 255.255.255.0 broadcast 10.89.0.255 + inet6 fe80::e033:77ff:fe99:f09e prefixlen 64 scopeid 0x20 + inet6 200:c0:ff:ee::1 prefixlen 64 scopeid 0x0 + ether e2:33:77:99:f0:9e txqueuelen 1000 (Ethernet) + RX packets 79 bytes 6136 (5.9 KiB) + RX errors 0 dropped 0 overruns 0 frame 0 + TX packets 48 bytes 6958 (6.7 KiB) + TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0 + + +An interesting note is that you can run tcpdump on this interface outside of any of the containers, and see all the inter-container network traffic. I did not figure this out for a very long time. + + + + +-------------- scram_podman_peering -------------- + +The following variables are typically defined in the .env file located at the repo root. These + +SCRAM_PEERING_IFACE=eth1 +SCRAM_V4_SUBNET=10.124.0.0/24 +SCRAM_V4_GATEWAY=10.124.0.1 +SCRAM_V4_ADDRESS=10.124.0.4 +SCRAM_V6_SUBNET=2004:600:202:9::8/126 +SCRAM_V6_GATEWAY=2004:600:202:9::9 +SCRAM_V6_ADDRESS=2004:600:202:9::a + + +eth1: flags=4163 mtu 1500 + inet 10.124.0.4 netmask 255.255.240.0 broadcast 10.124.15.255 + inet6 fe80::940b:c2ff:fee8:f6f prefixlen 64 scopeid 0x20 + ether 96:0b:c2:e8:0f:6f txqueuelen 1000 (Ethernet) + RX packets 546 bytes 38136 (37.2 KiB) + RX errors 0 dropped 0 overruns 0 frame 0 + TX packets 1118 bytes 79608 (77.7 KiB) + TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0 + + + diff --git a/compose.override.local.yml b/compose.override.local.yml index c6414a69..fd0e0aea 100644 --- a/compose.override.local.yml +++ b/compose.override.local.yml @@ -14,20 +14,42 @@ services: volumes: - $CI_PROJECT_DIR:/app:z - /tmp/profile_data:/tmp/profile_data - env_file: - - ./.envs/.local/.django - - ./.envs/.local/.postgres + environment: + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - POSTGRES_DB=scram + - POSTGRES_USER=scram + - POSTGRES_PASSWORD=scram + - DJANGO_READ_DOT_ENV_FILE=True + - DJANGO_SETTINGS_MODULE=config.settings.production + - DJANGO_SECRET_KEY=DJANGO_SECRET_KY + - DJANGO_ADMIN_URL=admin/ + - DJANGO_ALLOWED_HOSTS=django + - DJANGO_SECURE_SSL_REDIRECT=True + - DJANGO_SERVER_EMAIL=test@test.com + - WEB_CONCURRENCY=4 + - SCRAM_AUTH_METHOD=local + - POSTGRES_SSL=False + - REDIS_URL=redis://redis:6379/0 healthcheck: test: ["CMD", "curl", "-f", "http://django:8000/process_expired/"] ports: - "8000" + - 56780:56780 + environment: + # This can be set to either `debugpy` or `pycharm-pydevd` currently. + - DEBUG=${DEBUG:-} postgres: volumes: - local_postgres_data:/var/lib/postgresql/data:Z - local_postgres_data_backups:/backups:z - env_file: - - ./.envs/.local/.postgres + environment: + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - POSTGRES_DB=scram + - POSTGRES_USER=scram + - POSTGRES_PASSWORD=scram docs: image: scram_local_docs @@ -57,6 +79,15 @@ services: - "179" - "50051" + translator: + volumes: + - ./translator/tests/:/app/tests/ + ports: + - 56781:56781 + environment: + # This can be set to either `debugpy` or `pycharm-pydevd` currently. + - DEBUG=${DEBUG:-} + networks: default: ipam: diff --git a/compose.override.production.yml b/compose.override.production.yml index 112473c8..775996f5 100644 --- a/compose.override.production.yml +++ b/compose.override.production.yml @@ -1,32 +1,64 @@ --- version: '3.4' +# +# environmental variables are defined in the .env file in this repo root +# + volumes: - production_postgres_data: {} - production_postgres_data_backups: {} + local_postgres_data: {} + local_postgres_data_backups: {} production_redis_data: {} production_nginx: {} + staticfiles: services: + # Rework based on known dependencies and podman serial startup + gobgp: + volumes: + - $CI_PROJECT_DIR/gobgp_config:/config:z + cap_add: [ "CAP_NET_BIND_SERVICE" ] + networks: + default: {} + peering: + ipv4_address: ${SCRAM_V4_ADDRESS} + ipv6_address: ${SCRAM_V6_ADDRESS} + privileged: true + ports: + #- "179" + - "50051" + + redis: + volumes: + - production_redis_data:/var/lib/redis:Z + + postgres: + volumes: + - local_postgres_data:/var/lib/postgresql/data:z + - local_postgres_data_backups:/backups:z + configs: + file: .envs/.production/.postgres + #ports: + # - "5432:5432" + django: build: dockerfile: ./compose/production/django/Dockerfile image: scram_production_django - env_file: - - ./.envs/.production/.django - - ./.envs/.production/.postgres - - /etc/vault.d/secrets/kv_root_security.env + configs: + file: .envs/.production/.django volumes: - - ./staticfiles:/staticfiles + - ./staticfiles:/staticfiles:rw,z healthcheck: test: ["CMD", "curl", "-f", "http://django:5000/process_expired/"] + #ports: + # - 5000:5000 - postgres: - volumes: - - production_postgres_data:/var/lib/postgresql/data:Z - - production_postgres_data_backups:/backups:z - env_file: - - ./.envs/.production/.postgres + translator: + configs: + file: .envs/.production/.translator + networks: + default: {} nginx: image: nginx:1.19 @@ -41,31 +73,11 @@ services: - ./compose/production/nginx/nginx.conf:/etc/nginx/conf.d/default.conf - /etc/scram/ssl/server.crt:/etc/ssl/server.crt - /etc/scram/ssl/server.key:/etc/ssl/server.key - - ./staticfiles:/staticfiles + - ./staticfiles:/staticfiles:rw,z ports: - "443:443" - "80:80" - redis: - volumes: - - production_redis_data:/var/lib/redis:Z - - gobgp: - volumes: - - ./gobgp_config:/config:z - networks: - default: {} - peering: - ipv4_address: ${SCRAM_V4_ADDRESS} - ipv6_address: ${SCRAM_V6_ADDRESS} - ports: - - "179:179" - - "50051" - - translator: - env_file: - - ./.envs/.production/.translator - networks: default: enable_ipv6: true diff --git a/compose.yml b/compose.yml index 30a6c624..eb397bff 100644 --- a/compose.yml +++ b/compose.yml @@ -1,5 +1,4 @@ --- -version: '3.4' services: django: @@ -32,6 +31,8 @@ services: - net.ipv6.conf.all.disable_ipv6=0 healthcheck: test: ["CMD-SHELL", "pg_isready -U postgres"] + deploy: + replicas: ${POSTGRES_ENABLED:-0} redis: image: redis:5.0 @@ -60,8 +61,9 @@ services: condition: service_healthy gobgp: condition: service_healthy + djengo: + condition: service_healthy networks: default: {} sysctls: - net.ipv6.conf.all.disable_ipv6=0 - diff --git a/compose/local/translator/Dockerfile b/compose/local/translator/Dockerfile index 6f84a7b3..21ec884a 100644 --- a/compose/local/translator/Dockerfile +++ b/compose/local/translator/Dockerfile @@ -11,18 +11,20 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* # Requirements are installed here to ensure they will be cached. -COPY ./translator/requirements.txt /requirements.txt -RUN pip install -r /requirements.txt +COPY ./translator/requirements /requirements +RUN pip install -r /requirements/base.txt RUN mkdir /app \ - && cd /app \ - && git clone -b v2.32.0 https://github.com/osrg/gobgp.git \ - && cd gobgp/api \ - && python3 -m grpc_tools.protoc -I./ --python_out=/app/ --grpc_python_out=/app/ *.proto + && cd /app \ + && git clone -b v2.32.0 https://github.com/osrg/gobgp.git \ + && cd gobgp/api \ + && python3 -m grpc_tools.protoc -I./ --python_out=/app/ --grpc_python_out=/app/ *.proto COPY ./translator/translator.py /app COPY ./translator/gobgp.py /app -COPY ./translator/tests /app +COPY ./translator/exceptions.py /app +COPY ./translator/shared.py /app + RUN chmod +x /app/translator.py WORKDIR /app diff --git a/compose/production/django/Dockerfile b/compose/production/django/Dockerfile index e4a786a2..04579c49 100644 --- a/compose/production/django/Dockerfile +++ b/compose/production/django/Dockerfile @@ -5,7 +5,7 @@ ENV PYTHONUNBUFFERED 1 RUN apt-get update \ # dependencies for building Python packages and Docker healthcheck - && apt-get install -y build-essential curl git\ + && apt-get install -y build-essential curl git net-tools iputils-ping procps\ # psycopg2 dependencies && apt-get install -y libpq-dev \ # Translations dependencies diff --git a/compose/production/django/entrypoint b/compose/production/django/entrypoint index 3a01683f..91029030 100644 --- a/compose/production/django/entrypoint +++ b/compose/production/django/entrypoint @@ -24,6 +24,7 @@ try: password="${POSTGRES_PASSWORD}", host="${POSTGRES_HOST}", port="${POSTGRES_PORT}", + sslmode="disable", ) except psycopg2.OperationalError: sys.exit(-1) diff --git a/compose/production/nginx/nginx.conf b/compose/production/nginx/nginx.conf new file mode 100644 index 00000000..cd2f48bb --- /dev/null +++ b/compose/production/nginx/nginx.conf @@ -0,0 +1,54 @@ +upstream django { + server django:5000; +} + +#server { +# listen [::]:80; +# listen 80; +# server_name 24.199.100.172; +# +# #return 301 http://$host$request_uri; +# +#} + +server { + #listen [::]:443 ssl; + #listen 443 ssl; + listen 80; + #server_name fedora-s-1vcpu-2gb-sfo3-01; + + server_tokens off; + proxy_read_timeout 600; + proxy_connect_timeout 600; + proxy_send_timeout 600; + + #ssl_certificate /etc/ssl/server.crt; + #ssl_certificate_key /etc/ssl/server.key; + #ssl_protocols TLSv1.2; + #ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA3 +84:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:!SHA1:!SHA256:!SHA384:!DSS:!aNULL'; + #ssl_prefer_server_ciphers on; + + location / { + proxy_pass http://django; + proxy_redirect off; + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Port $server_port; + proxy_set_header X-Forwarded-Host $host:$server_port; + proxy_set_header X-Forwarded-Server $host; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /staticfiles/ { + root /staticfiles/; + } + + location /ws/ { + proxy_pass http://django/ws/; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_set_header Host $host; + } +} + diff --git a/config/asgi.py b/config/asgi.py index 6916269a..1b3cd6e9 100644 --- a/config/asgi.py +++ b/config/asgi.py @@ -7,6 +7,7 @@ https://docs.djangoproject.com/en/dev/howto/deployment/asgi/ """ +import logging import os import sys from pathlib import Path @@ -17,6 +18,29 @@ # TODO: from channels.security.websocket import AllowedHostsOriginValidator from django.core.asgi import get_asgi_application +# Here we setup a debugger if this is desired. This obviously should not be run in production. +debug_mode = os.environ.get("DEBUG") +if debug_mode: + logging.info(f"Django is set to use a debugger. Provided debug mode: {debug_mode}") + if debug_mode == "pycharm-pydevd": + logging.info("Entering debug mode for pycharm, make sure the debug server is running in PyCharm!") + + import pydevd_pycharm + + pydevd_pycharm.settrace("host.docker.internal", port=56783, stdoutToServer=True, stderrToServer=True) + + logging.info("Debugger started.") + elif debug_mode == "debugpy": + logging.info("Entering debug mode for debugpy (VSCode)") + + import debugpy + + debugpy.listen(("0.0.0.0", 56780)) + + logging.info("Debugger listening on port 56780.") + else: + logging.warning(f"Invalid debug mode given: {debug_mode}. Debugger not started") + # This allows easy placement of apps within the interior # scram directory. ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent diff --git a/config/settings/production.py b/config/settings/production.py index bb900668..93acdfee 100644 --- a/config/settings/production.py +++ b/config/settings/production.py @@ -15,8 +15,10 @@ DATABASES["default"] = env.db("DATABASE_URL") # noqa F405 DATABASES["default"]["ATOMIC_REQUESTS"] = True # noqa F405 DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa F405 -if env("POSTGRES_SSL"): - DATABASES["default"]["OPTIONS"] = {"sslmode": "require"} # noqa F405 +#if env("POSTGRES_SSL"): +# DATABASES["default"]["OPTIONS"] = {"sslmode": "require"} # noqa F405 +#else: +#DATABASES["default"]["OPTIONS"] = {"sslmode": "disable"} # noqa F405 # CACHES # ------------------------------------------------------------------------------ @@ -149,6 +151,7 @@ # https://docs.djangoproject.com/en/dev/ref/settings/#login-url LOGIN_URL = "oidc_authentication_init" +#LOGIN_URL = "/" # https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url LOGIN_REDIRECT_URL = "/" diff --git a/docs/development_guide.md b/docs/development_guide.md index e751e140..38b6d5a4 100644 --- a/docs/development_guide.md +++ b/docs/development_guide.md @@ -1,8 +1,9 @@ +# Development Guide + ## Technology Explainer This project is based off of [this cookiecutter](https://github.com/cookiecutter/cookiecutter-django), which is based off of the book [Two Scoops of Django](https://www.feldroy.com/books/two-scoops-of-django-3-x). This book provides an opinionated take on Django. If you are working on this project, and stuck on the "how" for a given situation, your first step should be to see if the book has a suggestion. - The last two could theoretically be pulled out and run separately from the rest of this stack, but for the time being, we are running them all on the same host(s). ### Styling @@ -18,6 +19,7 @@ We use git as our VCS and the main repo is stored on ESnet's [gitlab instance](h Changes are expected to be created and tested fully locally using docker. This should give a higher level of confidence in changes, as well as speed up the development cycle as you can immediately test things locally. Theoretically we can run either the local or production version (no SSL) on our development workstations. Production will mirror that of a true production instance (minus SSL), whereas local runs with more debugging options and no web server since the dev version of django can serve all we need. Accepted branch naming examples: + * `topic/soehlert/add_docs-sec-123` (the sec-123 represents a jira ticket number) * `topic/soehlert/update_docs` (if there is no related jira ticket) * `hotfix/broken_thing` @@ -36,7 +38,7 @@ Accepted branch naming examples: ## Testing -We should be testing as much as we reasonably can. Currently, there is a mix of [behave-django](https://behave-django.readthedocs.io/en/stable/) and pytest. +We should be testing as much as we reasonably can. Currently, there is a mix of [behave-django](https://behave-django.readthedocs.io/en/stable/) and pytest. If you are unsure which of the two to use, please feel free to ask. * To run all behave tests `make behave-all` @@ -47,16 +49,18 @@ If you are unsure which of the two to use, please feel free to ask. Ideally we would never have failing tests getting merged, so clearing up the `make ci-test` before a merge request is suggested. -#### Debugging Tests +### Debugging Tests If you are seeing failed API calls, you can try to browse the API directly in a web browser. You can also try to run some curl commands. You will need to create and authorize a client before you can make any API calls to affect change. * To create a UUID for use with a client you can use python in an interactive terminal: - 1. ```python - import uuid - print(str(uuid.uuid4())) - ``` + + ```python + import uuid + print(str(uuid.uuid4())) + ``` + * To create a client 1. Make sure you know the admin user password for basic auth `make pass-reset` 2. `make django-open` @@ -65,7 +69,7 @@ You will need to create and authorize a client before you can make any API calls * Use curl with basic auth and json content type header 1. Get the correct port `make django-url` - 2. Replace with the correct port `curl -XPOST http://0.0.0.0:62410/api/v1/entrys/ -H 'Content-Type: application/json' + 2. Replace with the correct port `curl -XPOST http://0.0.0.0:62410/api/v1/entrys/ -H 'Content-Type: application/json' --data '{"route": "1.1.1.1/32", "actiontype": "block", "comment": "testing", "uuid": "UUID GOES HERE"}'; echo` Trying to write test output to a file is a challenge due to running in ephemeral docker containers, but you should be able to print to stderr to get some debug info as needed as well. @@ -75,9 +79,90 @@ Trying to write test output to a file is a challenge due to running in ephemeral There are a few troubleshooting tricks available to you. * Run with `make toggle-local` as this will turn on debug mode in django -* To see if your blocks are making it into gobgp you can run `make list-routes` +* To see if your blocks are making it into GoBGP you can run `make list-routes` * If you want container logs `make tail-log CONTAINER=$service-name` +## Interactive Debugger + +All of the python applications that are part of SCRAM are setup to work with a variety of interactive debuggers. This can prove quite useful when developing a new feature, tracing down a bug, or just trying to learn how data flows through the application. When running SCRAM in `local` mode, this is enabled by setting up the `DEBUG` environment variable on your system. Because various IDEs have their own approaches to debugging an application, below are the instructions on how to set things up so you can debug. + +### VSCode and debugpy + +To debug SCRAM with VSCode, you simply need to set an environment variable on your machine that sets `DEBUG` to `debugpy` and then you'll need to recreate your SCRAM containers. For example, on zsh, you would run `export DEBUG=debugpy && make clean && make run` and the application stack will start up ready to be connected to with your debugger. + +Now that the debugpy server is running, using VScode, navigate to the debugger tab and start both the django and translator debugging sessions as shown in the image below: + +![Screenshot showing VSCode and saying to open the debugger panel first, then start both provided debuggers.](img/development_guide/debug_vscode/debug_start_debugger.png) + +> _**Note:**_ VSCode might prompt you to install the `Python Debugger` Extension if you don't have this installed already. Additionally, make sure you are opening the remote debugger session that connects to the containers running the SCRAM code. VSCode can also run a debugger against the file you have open, however, this won't work in an application like SCRAM. + +Once both debuggers are connected, you should see them listed in the call portion of the debugger tray. Now you're ready to actually debug the application. To do this, you'll typically want to start setting breakpoints where you want to inspect the code as it runs. In this example, we'll debug entering a block into SCRAM and we'll debug the stack by setting breakpoints on the first lines within `scram/route_manager_api/views.py:perform_create` and `translator/gobgp.py:add_path`. You can see the breakpoints represented by the red dots in the below screenshot: + +![Screenshot of image showing VSCode debugger with two breakpoints set.](img/development_guide/debug_vscode/debug_breakpoints.png) + +Using this example, you can add a block for the address `2001:db8::/128` to the web UI and see it as it hits the breakpoint in `views.py` and you step over to the line that extracts the route. From here you can inspect the route variable and even make changes to it! Here is what you should see in VSCode: + +![Screenshot of VSCode showing the route entry provided to the API `EntryViewSet`](img/development_guide/debug_vscode/debug_route_variable.png) + +When we continue to the next breakpoint, we are brought to the breakpoint added in the translator's `add_path()` function for GoBGP, and we can see that it's the same value that went through django. Again, we could change the values here if we wanted to, and we can also interact with the python REPL for further debugging. Since the application is currently paused and waiting for us to continue, you can see that the block entry shows up in the web UI, but it's status is unknown. By running `make list-routes, we can also see that GoBGP has not yet received the instructions to announce the prefix: + +![Image of web UI showing that 2001:db8::/128 is unknown and that the Network is not in table for GoBGP](img/development_guide/debug_vscode/debug_route_unknown.png) + +Once we hit continue on the debugger, since we have no more breakpoints, the block will be completed, and we'll see that it's being announced properly: + +![Image of web UI showing that 2001:db8::/128 is announced and that the Network is in the table for GoBGP](img/development_guide/debug_vscode/debug_route_announced.png) + +For more detailed information on how to use the VSCode debugger, read [this article.](https://code.visualstudio.com/docs/editor/debugging) + +## PyCharm and pycharm-pydevd + +To debug SCRAM with PyCharm, you first need to ensure that the debugger servers are running in PyCharm. PyCharm uses `pydevd` which works the opposite of `debugpy`, such that the server lives in your IDE, and your code connects out to the IDE for the debugger connection. To start the debugger server in PyCharm, you simply need to choose the two debugger profiles that are already checked in to this repository and start them, as shown here: + +![An image showing a screenshot of pycharm with an arrow pointing towards the button that you click to start the debugger.](img/development_guide/debug_pycharm/start_debugger_server.png) + +> **NOTE:** Ensure that you start both the django and the translator debugger servers! + +Once both servers are started in PyCharm, you simply need to set an environment variable on your machine that sets `DEBUG` to `pycharm-pydevd` and then you'll need to recreate your SCRAM containers. For example, on zsh, you would run `export DEBUG=pycharm-pydevd && make clean && make run` and the application stack will start up and be connect to your debugger. The pycharm debugger by default pauses the application once the debugger connects, so you'll need to manually resume the code by clicking the `Resume` button for both the django and the translator containers, like so: + +![Image showing how to resume the debugger once it initially connects](img/development_guide/debug_pycharm/resume_programs.png) + +> **NOTE:** Once the application connects to the debugger, you will see the folling warning in the debugger console: +>```text +>Warning: wrong debugger version. Use pycharm-debugger.egg from PyCharm installation folder +>Or execute: 'pip install pydevd-pycharm~=%242.23339.19' +>``` +> This is because we use an unpinned version of `pycharm-pydevd` for flexibility. If you run into issues, pin the dependency to the version you need in both `translator/requirements/local.txt` and `requirements/local.txt` and rebuild the entire stack. + +Now that the debugger is connected, you're ready to actually debug the application. To do this, you'll typically want to start setting breakpoints where you want to inspect the code as it runs. In this example, we'll debug entering a block into SCRAM and we'll debug the stack by setting breakpoints on the first lines within `scram/route_manager_api/views.py:perform_create` and `translator/gobgp.py:add_path`. You can see the breakpoints represented by the red dots in the below screenshot: + +![Screenshot of image showing PyCharm IDE and debugger with two breakpoints set.](img/development_guide/debug_pycharm/breakpoints.png) + +Using this example, you can add a block for the address `2001:db8::/128` to the web UI and see it as it hits the breakpoint in `views.py` and you step over to the line that extracts the route. From here you can inspect the route variable in the `Threads & Variables` tab and even make changes to it! Here is what you should see in PyCharm: + +![Screenshot of PyCharm showing the route entry provided to the API `EntryViewSet`](img/development_guide/debug_pycharm/debug_route_variable.png) + +When we continue to the next breakpoint, we are brought to the breakpoint added in the translator's `add_path()` function for GoBGP, and we can see that it's the same value that went through django. Again, we could change the values here if we wanted to, and we can also interact with the python REPL for further debugging. Since the application is currently paused and waiting for us to continue, you can see that the block entry shows up in the web UI, but it's status is unknown. By running `make list-routes, we can also see that GoBGP has not yet received the instructions to announce the prefix: + +![Image of web UI showing that 2001:db8::/128 is unknown and that the Network is not in table for GoBGP](img/development_guide/debug_pycharm/debug_route_unknown.png) + +Once we hit continue on the debugger, since we have no more breakpoints, the block will be completed, and we'll see that it's being announced properly: + +![Image of web UI showing that 2001:db8::/128 is announced and that the Network is in the table for GoBGP](img/development_guide/debug_pycharm/debug_route_announced.png) + +For more detailed information on how to use the PyCharm debugger, read [this article.](https://www.jetbrains.com/guide/python/tutorials/getting-started-pycharm/basic-code-debugging/) + +## Turning off Debugging + +To turn off debugging for either application, you simply need to remove the variable `DEBUG` and relaunch SCRAM. Using `zsh` as an example, you would simply run `unset DEBUG && make clean && make run`. +## Debugging Debugging +Like all complicated things, running a remote debugger in this manner can sometimes be finicky. Some common scenarios you might run into are listed below: +* Starting the wrong debugger for the application you're trying to debug with + * Make sure that you are using `debugpy` with VSCode and `pycharm-pydevd` with PyCharm! + * If you provide an invalid debugger, you'll see something like `f"Invalid debug mode given: {debug_mode}. Debugger not started"` as a warning level log in syslog. +* Not being able to connect from VSCode to the applications with the debugger + * Make sure that you can see that the debugger has started by looking at the container logs with `make tail-log`. Here you should see a message along the lines of `Translator is set to use a debugger. Provided debug mode: debugpy` or `Django is set to use a debugger. Provided debug mode: debugpy`. This will of course vary upon which app you're looking at and which debug mode you're using. +* Not being able to connect with PyCharm. + * Because the PyCharm debugger requires a connection from the application to PyCharm, we have to rely on the existence of the `host.docker.internal` DNS entry being available to the application. This is provided by default with colima and Docker Desktop, however, using docker-engine on linux does not provide this, so you might need to manually add this to your container using the `extra_hosts` option in docker compose. diff --git a/docs/img/development_guide/debug_pycharm/breakpoints.png b/docs/img/development_guide/debug_pycharm/breakpoints.png new file mode 100644 index 00000000..a3b20c24 Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/breakpoints.png differ diff --git a/docs/img/development_guide/debug_pycharm/debug_route_announced.png b/docs/img/development_guide/debug_pycharm/debug_route_announced.png new file mode 100644 index 00000000..5ac20702 Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/debug_route_announced.png differ diff --git a/docs/img/development_guide/debug_pycharm/debug_route_unknown.png b/docs/img/development_guide/debug_pycharm/debug_route_unknown.png new file mode 100644 index 00000000..8bb4adae Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/debug_route_unknown.png differ diff --git a/docs/img/development_guide/debug_pycharm/debug_route_variable.png b/docs/img/development_guide/debug_pycharm/debug_route_variable.png new file mode 100644 index 00000000..e58342cd Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/debug_route_variable.png differ diff --git a/docs/img/development_guide/debug_pycharm/resume_programs.png b/docs/img/development_guide/debug_pycharm/resume_programs.png new file mode 100644 index 00000000..8bf9b9e1 Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/resume_programs.png differ diff --git a/docs/img/development_guide/debug_pycharm/start_debugger_server.png b/docs/img/development_guide/debug_pycharm/start_debugger_server.png new file mode 100644 index 00000000..721d3187 Binary files /dev/null and b/docs/img/development_guide/debug_pycharm/start_debugger_server.png differ diff --git a/docs/img/development_guide/debug_vscode/debug_breakpoints.png b/docs/img/development_guide/debug_vscode/debug_breakpoints.png new file mode 100644 index 00000000..826db0ce Binary files /dev/null and b/docs/img/development_guide/debug_vscode/debug_breakpoints.png differ diff --git a/docs/img/development_guide/debug_vscode/debug_route_announced.png b/docs/img/development_guide/debug_vscode/debug_route_announced.png new file mode 100644 index 00000000..d4ce098a Binary files /dev/null and b/docs/img/development_guide/debug_vscode/debug_route_announced.png differ diff --git a/docs/img/development_guide/debug_vscode/debug_route_unknown.png b/docs/img/development_guide/debug_vscode/debug_route_unknown.png new file mode 100644 index 00000000..ff4b5664 Binary files /dev/null and b/docs/img/development_guide/debug_vscode/debug_route_unknown.png differ diff --git a/docs/img/development_guide/debug_vscode/debug_route_variable.png b/docs/img/development_guide/debug_vscode/debug_route_variable.png new file mode 100644 index 00000000..4d8f4db6 Binary files /dev/null and b/docs/img/development_guide/debug_vscode/debug_route_variable.png differ diff --git a/docs/img/development_guide/debug_vscode/debug_start_debugger.png b/docs/img/development_guide/debug_vscode/debug_start_debugger.png new file mode 100644 index 00000000..ee4b0984 Binary files /dev/null and b/docs/img/development_guide/debug_vscode/debug_start_debugger.png differ diff --git a/envs.tar b/envs.tar new file mode 100644 index 00000000..9ce6d5c2 Binary files /dev/null and b/envs.tar differ diff --git a/load_database b/load_database new file mode 100644 index 00000000..a3e64152 --- /dev/null +++ b/load_database @@ -0,0 +1,4 @@ +# Installs the SCRAM database into a shiny new install +# +podman-compose run django python manage.py makemigrations +podman-compose run django python manage.py migrate diff --git a/requirements/base.txt b/requirements/base.txt index cd489161..fa58d0b6 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -23,6 +23,7 @@ django-model-utils==4.2.0 # https://github.com/jazzband/django-model-utils django-allauth==0.51.0 # https://github.com/pennersr/django-allauth django-crispy-forms==1.14.0 # https://github.com/django-crispy-forms/django-crispy-forms crispy-bootstrap5==0.6 # https://github.com/django-crispy-forms/crispy-bootstrap5 +django-anymail==8.6 # https://github.com/anymail/django-anymail # Django REST Framework djangorestframework==3.13.1 # https://github.com/encode/django-rest-framework diff --git a/requirements/local.txt b/requirements/local.txt index 09955c51..edc15a30 100644 --- a/requirements/local.txt +++ b/requirements/local.txt @@ -39,3 +39,9 @@ django-coverage-plugin==2.0.3 # https://github.com/nedbat/django_coverage_plugi pytest-django==4.5.2 # https://github.com/pytest-dev/pytest-django pytest~=7.1.2 behave~=1.2.6 # https://behave.readthedocs.io/en/stable/ + +# Debugging +# ------------------------------------------------------------------------------ +debugpy +# Pycharm might force you to be on the same version IDE as library, use caution. +pydevd-pycharm diff --git a/scram/route_manager/tests/test_websockets.py b/scram/route_manager/tests/test_websockets.py index dcda2f53..367359b8 100644 --- a/scram/route_manager/tests/test_websockets.py +++ b/scram/route_manager/tests/test_websockets.py @@ -183,5 +183,9 @@ def local_setUp(self): lambda ip, mask: { "type": "translator_add", "message": {"asn": 65550, "community": 100, "route": f"{ip}/{mask}"}, - } + }, + lambda ip, mask: { + "type": "translator_add", + "message": {"asn": 64496, "community": 4294967295, "route": f"{ip}/{mask}"}, + }, ] diff --git a/scram/users/apps.py b/scram/users/apps.py index 2ce72578..4d3bdd69 100644 --- a/scram/users/apps.py +++ b/scram/users/apps.py @@ -10,8 +10,8 @@ class UsersConfig(AppConfig): name = "scram.users" verbose_name = _("Users") - def ready(self): - try: - import scram.users.signals # noqa F401 - except ImportError: - logger.warning("SCRAM user signals not found") +# def ready(self): +# try: +# import scram.users.signals # noqa F401 +# except ImportError: +# logger.warning("SCRAM user signals not found") diff --git a/translator/exceptions.py b/translator/exceptions.py new file mode 100644 index 00000000..f1a54299 --- /dev/null +++ b/translator/exceptions.py @@ -0,0 +1,9 @@ +""" +This module holds all of the exceptions we want to raise in our translators. +""" + + +class ASNError(TypeError): + """ + ASNError provides an error class to use when there is an issue with an Autonomous System Number. + """ diff --git a/translator/gobgp.py b/translator/gobgp.py index fe0b679b..23ae2572 100644 --- a/translator/gobgp.py +++ b/translator/gobgp.py @@ -4,7 +4,9 @@ import gobgp_pb2 import gobgp_pb2_grpc import grpc +from exceptions import ASNError from google.protobuf.any_pb2 import Any +from shared import asn_is_valid _TIMEOUT_SECONDS = 1000 DEFAULT_ASN = 65400 @@ -76,8 +78,8 @@ def _build_path(self, ip, event_data={}): as_path = Any() as_segment = None - # Make sure our asn is an acceptable number. This is the max as stated in rfc6996 - assert 0 < asn < 4294967295 + # Make sure our asn is an acceptable value. + asn_is_valid(asn) as_segment = [attribute_pb2.AsSegment(numbers=[asn])] as_segments = attribute_pb2.AsPathAttribute(segments=as_segment) as_path.Pack(as_segments) @@ -88,6 +90,8 @@ def _build_path(self, ip, event_data={}): # Standard community # Since we pack both into the community string we need to make sure they will both fit if asn < 65536 and community < 65536: + # We bitshift ASN left by 16 so that there is room to add the community on the end of it. This is because + # GoBGP wants the community sent as a single integer. comm_id = (asn << 16) + community communities.Pack(attribute_pb2.CommunitiesAttribute(communities=[comm_id])) else: @@ -120,8 +124,8 @@ def add_path(self, ip, event_data): gobgp_pb2.AddPathRequest(table_type=gobgp_pb2.GLOBAL, path=path), _TIMEOUT_SECONDS, ) - except AssertionError: - logging.warning("ASN assertion failed") + except ASNError as e: + logging.warning(f"ASN assertion failed with error: {e}") def del_all_paths(self): logging.warning("Withdrawing ALL routes") @@ -136,8 +140,8 @@ def del_path(self, ip, event_data): gobgp_pb2.DeletePathRequest(table_type=gobgp_pb2.GLOBAL, path=path), _TIMEOUT_SECONDS, ) - except AssertionError: - logging.warning("ASN assertion failed") + except ASNError as e: + logging.warning(f"ASN assertion failed with error: {e}") def get_prefixes(self, ip): prefixes = [gobgp_pb2.TableLookupPrefix(prefix=str(ip.ip))] diff --git a/translator/requirements.txt b/translator/requirements/base.txt similarity index 100% rename from translator/requirements.txt rename to translator/requirements/base.txt diff --git a/translator/requirements/local.txt b/translator/requirements/local.txt new file mode 100644 index 00000000..74985ed8 --- /dev/null +++ b/translator/requirements/local.txt @@ -0,0 +1,5 @@ +-r base.txt + +debugpy +# Pycharm might force you to be on the same version IDE as library, use caution. +pydevd-pycharm diff --git a/translator/shared.py b/translator/shared.py new file mode 100644 index 00000000..1fe048bd --- /dev/null +++ b/translator/shared.py @@ -0,0 +1,27 @@ +""" +This module provides a location for code that we want to share between all translators. +""" + +from exceptions import ASNError + + +def asn_is_valid(asn: int) -> bool: + """ + asn_is_valid makes sure that an ASN passed in is a valid 2 or 4 Byte ASN. + + Args: + asn (int): The Autonomous System Number that we want to validate + + Raises: + ASNError: If the ASN is not between 0 and 4294967295 or is not an integer. + + Returns: + bool: _description_ + """ + if not isinstance(asn, int): + raise ASNError(f"ASN {asn} is not an Integer, has type {type(asn)}") + if not 0 < asn < 4294967295: + # This is the max as stated in rfc6996 + raise ASNError(f"ASN {asn} is out of range. Must be between 0 and 4294967295") + + return True diff --git a/translator/tests/acceptance/environment.py b/translator/tests/acceptance/environment.py index f6b95746..1f06b08d 100644 --- a/translator/tests/acceptance/environment.py +++ b/translator/tests/acceptance/environment.py @@ -3,3 +3,4 @@ def before_all(context): context.gobgp = GoBGP("gobgp:50051") + context.config.setup_logging() diff --git a/translator/tests/acceptance/features/bgp.feature b/translator/tests/acceptance/features/bgp.feature index 2779e55f..8a27651d 100644 --- a/translator/tests/acceptance/features/bgp.feature +++ b/translator/tests/acceptance/features/bgp.feature @@ -4,23 +4,24 @@ Feature: block with BGP Scenario Outline: We can block an IP When we add with and to the block list Then is blocked + Then we delete with and from the block list And is unblocked Examples: data - | route | asn | community | unblock_ip | - | 192.0.2.4/32 | 54321 | 444 | 192.0.2.5 | - | 192.0.2.10/32 | 4200000000 | 321 | 192.0.2.11 | - | 2001:DB8:A::/64 | 54321 | 444 | baba:: | - | 2001:DB8:B::/64 | 4200000000 | 321 | 2001:DB8::4 | + | route | asn | community | unblock_ip | + | 192.0.2.4/32 | 54321 | 444 | 192.0.2.5 | + | 192.0.2.10/32 | 4200000000 | 321 | 192.0.2.11 | + | 2001:DB8:A::/64 | 54321 | 444 | baba:: | + | 2001:DB8:B::/64 | 4200000000 | 321 | 2001:DB8::4 | + | 192.0.2.20/32 | 4200000000 | 4200000000 | 192.0.2.11 | + | 2001:DB8:C::/64 | 4200000000 | 4200000000 | 2001:DB8::4 | + | 2001:DB8:C::1/64 | 4200000000 | 4200000000 | 2001:DB8::5 | - Scenario Outline: We can block an IP - When we add with and to the block list - And we delete with and from the block list - Then is unblocked + Scenario Outline: Invalid ASNs fail + When and with invalid is sent Examples: data - | route | asn | community | unblock_ip | - | 192.0.2.4/32 | 54321 | 444 | 192.0.2.4 | - | 192.0.2.10/32 | 4200000000 | 321 | 192.0.2.11 | - | 2001:DB8:A::/64 | 54321 | 444 | 2001:DB8::1 | - | 2001:DB8:B::/64 | 4200000000 | 321 | 2001:DB8::4 | + | route | asn | community | + | 2001:DB8:C::2/64 | 4242424242424242424242 | 100 | + | 2001:DB8:C::2/64 | -1 | 100 | + | 2001:DB8:C::2/64 | 0 | 100 | diff --git a/translator/tests/acceptance/steps/actions.py b/translator/tests/acceptance/steps/actions.py index e3e7f7c7..ce340de1 100644 --- a/translator/tests/acceptance/steps/actions.py +++ b/translator/tests/acceptance/steps/actions.py @@ -1,7 +1,11 @@ import ipaddress +import logging import time from behave import then, when +from behave.log_capture import capture + +logging.basicConfig(level=logging.DEBUG) @when("we add {route} with {asn} and {community} to the block list") @@ -11,7 +15,7 @@ def add_block(context, route, asn, community): context.gobgp.add_path(ip, event_data) -@when("we delete {route} with {asn} and {community} from the block list") +@then("we delete {route} with {asn} and {community} from the block list") def del_block(context, route, asn, community): ip = ipaddress.ip_interface(route) event_data = {"asn": int(asn), "community": int(community)} @@ -31,6 +35,13 @@ def get_block_status(context, ip): return False +@capture +@when("{route} and {community} with invalid {asn} is sent") +def asn_validation_fails(context, route, asn, community): + add_block(context, route, asn, community) + assert context.log_capture.find_event("ASN assertion failed") + + @then("{ip} is blocked") def check_block(context, ip): assert get_block_status(context, ip) diff --git a/translator/translator.py b/translator/translator.py index c9367fae..d0fe2dc9 100644 --- a/translator/translator.py +++ b/translator/translator.py @@ -9,9 +9,52 @@ import websockets from gobgp import GoBGP +# Here we setup a debugger if this is desired. This obviously should not be run in production. +debug_mode = os.environ.get("DEBUG") +if debug_mode: + + def install_deps(): + # Because of how we build translator currently, we don't have a great way to selectively install things at + # build, so we just do it here! Right now this also includes base.txt, which is unecessary, but in the + # future when we build a little better, it'll already be setup. + logging.info("Installing dependencies for debuggers") + + import subprocess + import sys + + subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", "/requirements/local.txt"]) + + logging.info("Done installing dependencies for debuggers") + + logging.info(f"Translator is set to use a debugger. Provided debug mode: {debug_mode}") + # We have to setup the debugger appropriately for various IDEs. It'd be nice if they all used the same thing but + # sadly, we live in a fallen world. + if debug_mode == "pycharm-pydevd": + logging.info("Entering debug mode for pycharm, make sure the debug server is running in PyCharm!") + + install_deps() + + import pydevd_pycharm + + pydevd_pycharm.settrace("host.docker.internal", port=56782, stdoutToServer=True, stderrToServer=True) + + logging.info("Debugger started.") + elif debug_mode == "debugpy": + logging.info("Entering debug mode for debugpy (VSCode)") + + install_deps() + + import debugpy + + debugpy.listen(("0.0.0.0", 56781)) + + logging.info("Debugger listening on port 56781.") + else: + logging.warning(f"Invalid debug mode given: {debug_mode}. Debugger not started") + # Must match the URL in asgi.py, and needs a trailing slash hostname = os.environ.get("SCRAM_HOSTNAME", "scram_hostname_not_set") -url = os.environ.get("SCRAM_EVENTS_URL", "ws://django:8000/ws/route_manager/translator_block/") +url = os.environ.get("SCRAM_EVENTS_URL", "ws://django:5000/ws/route_manager/translator_block/") async def main():