Add PEP support via NGINX + JS.

This commit is contained in:
Andrea Dell'Amico 2021-11-02 19:47:36 +01:00
parent 10b13ff505
commit eb2c0d0d0d
Signed by: andrea.dellamico
GPG Key ID: 147ABE6CEB9E20FF
10 changed files with 651 additions and 53 deletions

View File

@ -1,8 +1,9 @@
---
inception_project_version: 21.1
inception_project_compose_dir: '/srv/inception_project_stack'
inception_project_docker_stack_name: 'inception-project'
inception_project_docker_service_name: 'inception'
inception_project_docker_image: 'inceptionproject/inception:0.17.3'
inception_project_docker_image: 'inceptionproject/inception:{{ inception_project_version }}'
inception_project_docker_network: 'inception_project_net'
inception_project_docker_data_node: 'localhost'
# We need a directory because the confg file goes inside it
@ -10,7 +11,7 @@ inception_project_service_volume: '/srv/inception_project_data'
inception_project_service_port: 8080
# IMPORTANT. Set it to True for the server that is going to host the DB
inception_project_service_constraints: 'node.labels.service_data==inception_project'
inception_project_behind_haproxy: True
#inception_project_behind_haproxy: False
inception_project_haproxy_public_net: 'haproxy-public'
# Settings
inception_project_server_port: 8080
@ -24,6 +25,11 @@ inception_project_debug_enabled: 'false'
inception_project_disable_crsf: True
# In seconds
inception_project_backup_keep_time: 0
inception_pep: True
inception_authz_audience_name: inception
keycloak_auth_server: https://localhost
#keycloak_auth_credentials_prod: 'use a vault file'
# Choices: preauth, database
inception_project_server_auth_mode: 'database'
inception_project_server_preauth_header: 'remote_user'
inception_project_server_preauth_default_roles: ''
@ -41,6 +47,9 @@ inception_project_db_user: 'inception_user'
inception_project_db_volume: 'inception_db_data'
inception_project_db_constraints: 'node.labels.mysql_data==inception_project'
inception_project_websockets_enabled: 'false'
inception_project_websockets_loggedevent_enabled: 'false'
inception_project_versioning_enabled: 'false'
inception_project_settings:
- 'debug.showExceptionPage={{ inception_project_debug_enabled }}'
- 'user.profile.accessible=true'
@ -55,3 +64,12 @@ inception_project_settings:
- 'auth.preauth.header.principal={{ inception_project_server_preauth_header }}'
- 'auth.preauth.newuser.roles={{ inception_project_server_preauth_default_roles }}'
- 'warnings.embeddedDatabase=true'
- 'versioning.enabled={{ inception_project_versioning_enabled }}'
- 'websocket.enabled={{ inception_project_websockets_enabled }}'
- 'websocket.loggedevent.enabled={{ inception_project_websockets_loggedevent_enabled }}'
pep_port: 80
pep_replicas: 1
# hostnames to be used as vhosts
#pep_credentials: in vault
nginx_pep_debug_enabled: False

48
tasks/inception.yml Normal file
View File

@ -0,0 +1,48 @@
---
- name: Manage the installation of the Inception configuration of the swarm service
block:
- name: Create the data directory used by the inception service, and its subdirectories
file: dest={{ inception_project_service_volume }}/{{ item }} state=directory
with_items:
- 'repository/kb'
- 'plugins'
- name: Install the inception properties file
template: src=settings.properties.j2 dest={{ inception_project_service_volume }}/settings.properties owner=root group=root mode='0444'
when: inception_project_docker_data_node == ansible_fqdn
tags: [ 'inception_project', 'inception_project_swarm' ]
- name: Manage the installation of the Inception project Docker stack
block:
- name: Create the directory where the compose file will be installed
file: dest={{ inception_project_compose_dir }} state=directory mode='0750' owner=root group=root
- name: Install the docker compose file
template: src=inception-project-docker-compose.yml.j2 dest={{ inception_project_compose_dir }}/docker-inception-project-stack.yml owner=root group=root mode='0400'
- name: Add the label that will be used as a constraint for the inception data volume
docker_node:
hostname: '{{ inception_project_docker_data_node }}'
labels:
service_data: 'inception_project'
labels_state: 'merge'
- name: Add the label that will be used as a constraint for the MySQL DB
docker_node:
hostname: '{{ inception_project_docker_db_node }}'
labels:
mysql_data: 'inception_project'
labels_state: 'merge'
when: inception_project_db_as_container
- name: Start the Inception project stack
docker_stack:
name: inception-project
state: present
compose:
- '{{ inception_project_compose_dir }}/docker-inception-project-stack.yml'
run_once: True
when: docker_swarm_manager_main_node is defined and docker_swarm_manager_main_node | bool
tags: [ 'inception_project', 'inception_project_swarm' ]

View File

@ -1,48 +1,4 @@
---
- name: Manage the installation of the Inception configuration of the swarm service
block:
- name: Create the data directory used by the inception service, and its subdirectories
file: dest={{ inception_project_service_volume }}/{{ item }} state=directory
with_items:
- 'repository/kb'
- 'plugins'
- name: Install the inception properties file
template: src=settings.properties.j2 dest={{ inception_project_service_volume }}/settings.properties owner=root group=root mode='0444'
when: inception_project_docker_data_node == ansible_fqdn
tags: [ 'inception_project', 'inception_project_swarm' ]
- name: Manage the installation of the Inception project Docker stack
block:
- name: Create the directory where the compose file will be installed
file: dest={{ inception_project_compose_dir }} state=directory mode='0750' owner=root group=root
- name: Install the docker compose file
template: src=inception-project-docker-compose.yml.j2 dest={{ inception_project_compose_dir }}/docker-inception-project-stack.yml owner=root group=root mode='0400'
- name: Add the label that will be used as a constraint for the inception data volume
docker_node:
hostname: '{{ inception_project_docker_data_node }}'
labels:
service_data: 'inception_project'
labels_state: 'merge'
- name: Add the label that will be used as a constraint for the MySQL DB
docker_node:
hostname: '{{ inception_project_docker_db_node }}'
labels:
mysql_data: 'inception_project'
labels_state: 'merge'
when: inception_project_db_as_container
- name: Start the Inception project stack
docker_stack:
name: inception-project
state: present
compose:
- '{{ inception_project_compose_dir }}/docker-inception-project-stack.yml'
run_once: True
when: docker_swarm_manager_main_node is defined and docker_swarm_manager_main_node | bool
tags: [ 'inception_project', 'inception_project_swarm' ]
- include_tasks: inception.yml
- include_tasks: pep.yml
when: inception_pep

45
tasks/pep.yml Normal file
View File

@ -0,0 +1,45 @@
---
- name: Manage the PEP configuration
block:
- name: Generate PEP config
template:
src: templates/nginx.conf.j2
dest: "{{ inception_project_compose_dir }}/nginx.conf"
- name: Generate PEP default config
template:
src: templates/nginx.default.conf.j2
dest: "{{ inception_project_compose_dir }}/nginx.default.conf"
- name: Generate config.js
template:
src: templates/config.js.j2
dest: "{{ inception_project_compose_dir }}/config.js"
- name: Generate pep.js
template:
src: templates/pep.js.j2
dest: "{{ inception_project_compose_dir }}/pep.js"
- name: Generate pep-docker-swarm
template:
src: templates/pep-swarm.yml.j2
dest: "{{ inception_project_compose_dir }}/inception-pep-stack.yml"
run_once: True
when: docker_swarm_manager_main_node is defined and docker_swarm_manager_main_node | bool
tags: [ 'inception_project', 'inception_project_swarm', 'inception_project_pep' ]
- name: Run the PEP stack
block:
- name: Start the Inception project PEP stack
docker_stack:
name: inception-project
state: present
compose:
- '{{ inception_project_compose_dir }}/inception-pep-stack.yml'
run_once: True
when: docker_swarm_manager_main_node is defined and docker_swarm_manager_main_node | bool
tags: [ 'inception_project', 'inception_project_swarm', 'inception_project_pep' ]

21
templates/config.js.j2 Normal file
View File

@ -0,0 +1,21 @@
export default { config, exportBackendHeaders };
function exportBackendHeaders(context){
exportVariable("remote_user", context.authn.verified_token.preferred_username)
return context
}
var config = {
"pep-credentials" : "{{ keycloak_auth_credentials_prod }}",
"hosts" : [
{
"host": "{{ inception_project_docker_service_name }}",
"allow-basic-auth" : true,
"audience" : '{{ inception_authz_audience_name }}',
"paths" : [
{
"name" : "inception",
"path" : "^/?.*$",
}
]
}
]
}

View File

@ -1,7 +1,7 @@
version: '3.6'
networks:
{% if inception_project_behind_haproxy %}
{% if not inception_pep %}
haproxy-public:
external: true
{% endif %}
@ -16,14 +16,12 @@ services:
image: {{ inception_project_docker_image }}
networks:
- {{ inception_project_docker_network }}
{% if inception_project_behind_haproxy %}
{% if not inception_pep %}
- haproxy-public
{% endif %}
environment:
- INCEPTION_DB_DIALECT=org.hibernate.dialect.MySQL5InnoDBDialect
# - INCEPTION_DB_DRIVER=com.mysql.jdbc.Driver
- INCEPTION_DB_DRIVER=com.mysql.cj.jdbc.Driver
# - INCEPTION_DB_URL=jdbc:mysql://{{ inception_project_db_host }}:{{ inception_project_db_port }}/{{ inception_project_db_name }}?useSSL=false&useUnicode=true&characterEncoding=UTF-8
- INCEPTION_DB_URL=jdbc:mysql://{{ inception_project_db_host }}:{{ inception_project_db_port }}/{{ inception_project_db_name }}?useUnicode=true&characterEncoding=UTF-8
- INCEPTION_DB_USERNAME={{ inception_project_db_user }}
- INCEPTION_DB_PASSWORD={{ inception_project_db_pwd }}

17
templates/nginx.conf.j2 Normal file
View File

@ -0,0 +1,17 @@
load_module modules/ngx_http_js_module.so;
worker_processes 1;
events {
worker_connections 1024;
}
http {
js_import pep.js;
js_set $authorization pep.enforce;
underscores_in_headers on;
proxy_cache_path /var/cache/nginx/pep keys_zone=token_responses:1m max_size=2m;
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}

View File

@ -0,0 +1,94 @@
upstream _inception-server {
ip_hash;
server {{ conductor_service }}:8080;
}
map $http_authorization $source_auth {
default "";
}
js_var $auth_token;
js_var $pep_credentials;
server {
listen *:{{ pep_port }};
server_name {{ inception_project_docker_service_name }};
{% if inception_project_websockets_enabled == 'true' %}
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
{% endif %}
location / {
js_content pep.enforce;
proxy_request_buffering off;
proxy_buffering off; # Required for HTTP-based CLI to work over SSL
proxy_set_header Connection ""; # Clear for keepalive
proxy_set_header Host $host;
proxy_pass http://_inception-server;
}
location @backend {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
# Required for new HTTP-based CLI
proxy_request_buffering off;
proxy_buffering off; # Required for HTTP-based CLI to work over SSL
proxy_set_header Connection ""; # Clear for keepalive
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Server $host;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Original-URI $request_uri;
proxy_set_header Authorization "Bearer $auth_token";
proxy_set_header remote_user "$remote_user";
proxy_pass http://_inception-server;
}
location /gcube_user_info {
internal;
gunzip on;
proxy_method GET;
proxy_http_version 1.1;
resolver 146.48.122.10;
proxy_pass https://api.d4science.org/rest/2/people/profile?gcube-token=$auth_token;
}
location /jwt_verify_request {
internal;
proxy_method POST;
proxy_http_version 1.1;
proxy_set_header Authorization $pep_credentials;
proxy_set_header Content-Type "application/x-www-form-urlencoded";
proxy_pass "{{ iam_host }}/auth/realms/d4science/protocol/openid-connect/token/introspect";
proxy_ignore_headers Cache-Control Expires Set-Cookie;
gunzip on;
proxy_cache token_responses; # Enable caching
proxy_cache_key $source_auth; # Cache for each source authentication
proxy_cache_lock on; # Duplicate tokens must wait
proxy_cache_valid 200 10s; # How long to use each response
}
location /jwt_request {
internal;
proxy_method POST;
proxy_http_version 1.1;
proxy_set_header Authorization $pep_credentials;
proxy_set_header Content-Type "application/x-www-form-urlencoded";
proxy_pass "{{ keycloak_auth_server }}/auth/realms/d4science/protocol/openid-connect/token";
gunzip on;
}
location /permission_request {
internal;
proxy_method POST;
proxy_http_version 1.1;
proxy_set_header Content-Type "application/x-www-form-urlencoded";
proxy_set_header Authorization "Bearer $auth_token";
proxy_pass "{{ keycloak_auth_server }}/auth/realms/d4science/protocol/openid-connect/token";
gunzip on;
}
}

View File

@ -0,0 +1,40 @@
version: '3.6'
services:
pep:
image: nginx:stable-alpine
networks:
- {{ conductor_network }}
- haproxy-public
deploy:
replicas: 1
placement:
constraints: [node.role == worker]
endpoint_mode: dnsrr
restart_policy:
condition: on-failure
delay: 10s
window: 120s
configs:
- source: nginxconf
target: /etc/nginx/templates/default.conf.template
- source: nginxbaseconf
target: /etc/nginx/nginx.conf
- source: pep
target: /etc/nginx/pep.js
- source: pepconfig
target: /etc/nginx/config.js
networks:
{{ inception_project_docker_network }}:
haproxy-public:
external: true
configs:
nginxconf:
file: ./nginx.default.conf
nginxbaseconf:
file: ./nginx.conf
pep:
file: ./pep.js
pepconfig:
file: ./config.js

361
templates/pep.js.j2 Normal file
View File

@ -0,0 +1,361 @@
export default { enforce };
import defaultExport from './config.js';
function log(c, s){
c.request.error(s)
}
function enforce(r) {
var context = {
request: r ,
config : defaultExport["config"],
backend : (defaultExport.backend ? defaultExport.backend : "@backend"),
export_backend_headers : (defaultExport.backendHeaders ? defaultExport.backendHeaders : wkf.export_backend_headers)
}
{% if nginx_pep_debug_enabled %}
log(context, "Inside NJS enforce for " + r.method + " @ " + r.headersIn.host + "/" + r.uri)
{% endif %}
if(context.request.args.token){
var token = context.request.args.token
{% if nginx_pep_debug_enabled %}
log(context, "token is " + token)
{% endif %}
exportVariable(context, "auth_token", token)
context.request.subrequest("/gcube_user_info")
.then(reply=>{
if (reply.status === 200) {
var response = JSON.parse(reply.responseBody);
{% if nginx_pep_debug_enabled %}
log(context, "got response " + reply.responseBody)
{% endif %}
return response
} else {
log(context, reply.status + " got response " + reply.responseBody)
throw new Error("Unauthorized")
}
}).then(userinfo => {
exportVariable(context, "remote_user", userinfo.result.username)
{% if nginx_pep_debug_enabled %}
log(context, "username is " + userinfo.result.username)
{% endif %}
context.request.internalRedirect(context.backend)
{% if nginx_pep_debug_enabled %}
log(context, "context after setting the username:" + njs.dump(context))
{% endif %}
return context
}).catch(e => { context.request.error("error .... " + njs.dump(e)); context.request.return(401)} )
return
}
context = computeProtection(context)
wkf.run(wkf.build(context), context)
}
// ######## WORKFLOW FUNCTIONS ###############
var wkf = {
build : (context)=>{
var actions = [
"export_pep_credentials",
"parse_authentication",
"check_authentication",
"export_authn_token",
"pip",
"pdp",
"export_backend_headers",
"pass"
]
return actions
},
run : (actions, context) => {
context.request.error("Starting workflow with " + njs.dump(actions))
var w = actions.reduce(
(acc, f) => acc.then(typeof(f) === "function" ? f : wkf[f]),
Promise.resolve().then(()=>context)
)
w.catch(e => { context.request.error(njs.dump(e)); context.request.return(401)} )
},
export_pep_credentials : exportPepCredentials,
export_authn_token : exportAuthToken,
export_backend_headers : c=>c,
parse_authentication : parseAuthentication,
check_authentication : checkAuthentication,
verify_token : verifyToken,
request_token : requestToken,
pip : pipExecutor,
pdp : pdpExecutor,
pass : pass,
//PIP utilities
"get-path-component" : (c, i) => c.request.uri.split("/")[i],
"get-token-field" : getTokenField,
"get-contexts" : (c) => {
var ra = c.authn.verified_token["resource_access"]
if(ra){
var out = [];
for(var k in ra){
if(ra[k].roles && ra[k].roles.length !== 0) out.push(k)
}
}
return out;
}
}
function getTokenField(context, f){
return context.authn.verified_token[f]
}
function exportVariable(context, name, value){
context.request.variables[name] = value
{% if nginx_pep_debug_enabled %}
log(context, "Exported variables:" + njs.dump(context.request.variables))
{% endif %}
return context
}
function exportPepCredentials(context){
if(!context.config["pep-credentials"]){
throw new Error("Need PEP credentials")
}
return exportVariable(context, "pep_credentials", "Basic " + context.config["pep-credentials"])
}
function exportAuthToken(context){
return exportVariable(context, "auth_token", context.authn.token)
}
function checkAuthentication(context){
return context.authn.type === "bearer" ? wkf.verify_token(context) : wkf.request_token(context)
}
function parseAuthentication(context){
{% if nginx_pep_debug_enabled %}
context.request.log("Inside parseAuthentication")
{% endif %}
var incomingauth = context.request.headersIn["Authorization"]
if(!incomingauth) throw new Error("Authentication required");
var arr = incomingauth.trim().replace(/\s\s+/g, " ").split(" ")
if(arr.length != 2) throw new Error("Unknown authentication scheme");
var type = arr[0].toLowerCase()
if(type === "basic" && context.authz.host && context.authz.host["allow-basic-auth"]){
var unamepass = Buffer.from(arr[1], 'base64').toString().split(":")
if(unamepass.length != 2) return null;
context.authn = { type : type, raw : arr[1], user : unamepass[0], password : unamepass[1]}
return context
}else if(type === "bearer"){
context.authn = { type : type, raw : arr[1], token : arr[1]}
return context
}
throw new Error("Unknown authentication scheme");
}
function verifyToken(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside verifyToken")
{% endif %}
var options = {
"body" : "token=" + context.authn.token + "&token_type_hint=access_token"
}
return context.request.subrequest("/jwt_verify_request", options)
.then(reply=>{
if (reply.status === 200) {
{% if nginx_pep_debug_enabled %}
log(context, reply.responseBody)
{% endif %}
var response = JSON.parse(reply.responseBody);
if (response.active === true) {
return response
} else {
throw new Error("Unauthorized")
}
} else {
throw new Error("Unauthorized")
}
}).then(verified_token => {
context.authn.verified_token =
JSON.parse(Buffer.from(context.authn.token.split('.')[1], 'base64url').toString())
return context
})
}
function requestToken(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside requestToken")
{% endif %}
var options = {
"body" : "grant_type=password&username="+context.authn.user+"&password="+context.authn.password
}
return context.request.subrequest("/jwt_request", options)
.then(reply=>{
if (reply.status === 200) {
var response = JSON.parse(reply.responseBody);
context.authn.token = response.access_token
context.authn.verified_token =
JSON.parse(Buffer.from(context.authn.token.split('.')[1], 'base64url').toString())
return context
} else {
throw new Error("Unauthorized")
}
})
}
function pipExecutor(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside extra claims PIP")
{% endif %}
context.authz.pip.forEach(extra =>{
//call extra claim pip function
try{
var operator = extra.operator
var result = wkf[operator](context, extra.args)
//ensure array and add to extra_claims
if(!(result instanceof Array)) result = [result]
if(!context.extra_claims) context.extra_claims = {};
context.extra_claims[extra.claim] = result
} catch (error){
{% if nginx_pep_debug_enabled %}
log(context, "Skipping invalid extra claim " + njs.dump(error))
{% endif %}
}
})
{% if nginx_pep_debug_enabled %}
log(context, "Extra claims are " + njs.dump(context.extra_claims))
{% endif %}
return context
}
function pdpExecutor(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside PDP")
{% endif %}
return context.authz.pdp(context)
}
function umaCall(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside UMA call")
{% endif %}
var options = { "body" : computePermissionRequestBody(context) };
return context.request.subrequest("/permission_request", options)
.then(reply =>{
if(reply.status === 200){
return context
}else{
throw new Error("Response for authorization request is not ok " + reply.status + " " + njs.dump(reply.responseBody))
}
})
}
function pass(context){
{% if nginx_pep_debug_enabled %}
log(context, "Inside pass");
{% endif %}
if(typeof(context.backend) === "string") context.request.internalRedirect(context.backend);
else if (typeof(context.backend) === "function") context.request.internalRedirect(context.backend(context))
return context;
}
// ######## AUTHORIZATION PART ###############
function computePermissionRequestBody(context){
if(!context.authz.host || !context.authz.path ){
throw new Error("Enforcemnt mode is always enforcing. Host or path not found...")
}
var audience = computeAudience(context)
var grant = "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket"
var mode = "response_mode=decision"
var permissions = computePermissions(context)
var extra = ""
if(context.extra_claims){
extra =
"claim_token_format=urn:ietf:params:oauth:token-type:jwt&claim_token=" +
JSON.stringify(context.extra_claims).toString("base64url")
}
var body = audience + "&" + grant + "&" + permissions + "&" + mode + "&" + extra
context.request.error("Computed permission request body is " + body)
return body
}
function computeAudience(context){
var aud = context.request.headersIn.host
if(context.authz.host){
aud = context.authz.host.audience||context.authz.host.host
}
return "audience=" + aud
}
function computePermissions(context){
var resource = context.request.uri
if(context.authz.path){
resource = context.authz.path.name||context.authz.path.path
}
var scopes = []
if(context.authz.method && context.authz.method.scopes){
scopes = context.authz.method.scopes
}
if(scopes.length > 0){
return scopes.map(s=>"permission=" + resource + "#" + s).join("&")
}
return "permission=" + resource
}
function getPath(hostconfig, incomingpath, incomingmethod){
var paths = hostconfig.paths || []
var matchingpaths = paths
.filter(p => {return incomingpath.match(p.path) != null})
.reduce((acc, p) => {
if (!p.methods || p.methods.length === 0) acc.weak.push({ path: p});
else{
var matchingmethods = p.methods.filter(m=>m.method.toUpperCase() === incomingmethod)
if(matchingmethods.length > 0) acc.strong.push({ method : matchingmethods[0], path: p});
}
return acc;
}, { strong: [], weak: []})
return matchingpaths.strong.concat(matchingpaths.weak)[0]
}
function getHost(config, host){
var matching = config.hosts.filter(h=>{
return h.host === host
})
return matching.length > 0 ? matching[0] : null
}
function computeProtection(context){
{% if nginx_pep_debug_enabled %}
log(context, "Getting by host " + context.request.headersIn.host)
{% endif %}
context.authz = {}
context.authz.host = getHost(context.config, context.request.headersIn.host)
if(context.authz.host !== null){
context.authz.pip = context.authz.host.pip ? context.authz.host.pip : [];
context.authz.pdp = context.authz.host.pdp ? context.authz.host.pdp : umaCall;
var pathandmethod = getPath(context.authz.host, context.request.uri, context.request.method);
if(pathandmethod){
context.authz.path = pathandmethod.path;
context.authz.pip = context.authz.path.pip ? context.authz.pip.concat(context.authz.path.pip) : context.authz.pip;
context.authz.pdp = context.authz.path.pdp ? context.authz.path.pdp : context.authz.pdp;
context.authz.method = pathandmethod.method;
if(context.authz.method){
context.authz.pip = context.authz.method.pip ? context.authz.pip.concat(context.authz.method.pip) : context.authz.pip;
context.authz.pdp = context.authz.method.pdp ? context.authz.method.pdp : context.authz.pdp;
}
}
}
{% if nginx_pep_debug_enabled %}
log(context, "Leaving protection computation: ")
{% endif %}
return context
}