diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e4ca97d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +FROM nginx:stable-alpine + +# The following variables are used to keep the ansible modifications to a minumum. + +ENV DOCKER_FULL_STACK_SERVICE_NAME="localhost" +ENV DOCKER_SERVICE_PORT=8080 +ENV SERVICE_HOST="localhost" +ENV KEYCLOACK_SERVER="localhost" +ENV ACCOUNTING_SERVICE_BASEURL="https://localhost" +ENV SUBREQUEST_OUTPUT_BUFFER_SIZE="8192k" +ENV SHINYPROXY_PROMETHEUS_PORT="9090" +ENV D4SCIENCE_API_ENDPOINT="https://localhost" + + +# config.js and nginx.default.conf.template must be overwritten at deployment time +# with the correct files for the scenario +# +COPY src/nginx.conf /etc/nginx/nginx.conf +COPY src/config.js /etc/nginx/config.js +COPY src/nginx.default.conf.template /etc/nginx/templates/default.conf.template +COPY src/pep.js /etc/nginx/pep.js + diff --git a/Jenkinsfile b/Jenkinsfile index 4ab75cb..2b6993b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -9,18 +9,19 @@ pipeline { label 'docker' } environment { - imagename = "" //TO FILL WITH THE RIGHT VALUE (RepositoryName) e.g. d4science/RepositoryName - registryCredential = 'e348bfab-5580-4db6-b0e0-d854966bde08' - dockerImage = '' - git_url='' // SET HERE THE URL OF YOUR NEW GIT PROJECT + imagename = "pep/nginx-pep" + hubname = "hub.dev.d4science.org" + registryUrl = "https://hub.dev.d4science.org" + registryCredential = 'c4ece6a2-3488-43c4-b74d-714b057845d1' + dockerImage = '' + git_url='https://code-repo.d4science.org/D4Science/pep-container-image.git' } stages { - stage('Cloning Git') { - steps { - git([url: git_url, branch: 'master', credentialsId: '88b54962-1c0e-49cb-8155-22276860f346']) - - } - } + // stage('Cloning Git') { + // steps { + // git([url: git_url, branch: 'main', credentialsId: '']) + // } + // } stage('Building image') { steps{ script { @@ -31,18 +32,17 @@ pipeline { stage('Deploy Image') { steps{ script { - docker.withRegistry( '', registryCredential ) { + docker.withRegistry( registryUrl, registryCredential ) { dockerImage.push("$BUILD_NUMBER") - dockerImage.push('latest') - + dockerImage.push('latest') } } } } stage('Remove Unused docker image') { steps{ - sh "docker rmi $imagename:$BUILD_NUMBER" - sh "docker rmi $imagename:latest" + sh "docker rmi $hubname/$imagename:$BUILD_NUMBER" + sh "docker rmi $hubname/$imagename:latest" } } diff --git a/README.md b/README.md index 3332270..09e71bf 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,10 @@ -# Pipeline-Docker-Template +# PEP image, nginx based -This is a template useful for building docker images and push to dockerhub. -There is a jenkinsjob template on our jenkins with the same name. +This is a nginx image with some bits of a javascript based PEP. +Two files: +* nginx.default.conf.template +* config.js -## USAGE TIPS: +must be overwritten (via ansible): `config.js` as *secret*, `nginx.default.conf.template` as *config*. -* REMEMBER TO FILL the jenkinsfile environment section with your values. The following field should be properly filled: imagename, git_url -* REMEMBER TO PUT your Dockerfile in the root folder of your new project - -* You can find the related jenkinsjob template here: https://jenkins.d4science.org/job/Pipeline-Docker-Template/ diff --git a/src/config.js b/src/config.js new file mode 100644 index 0000000..d9509cc --- /dev/null +++ b/src/config.js @@ -0,0 +1,33 @@ +export default { config }; + +var config = { + "pep_credentials" : "pep_credentials", + "debug": true, + "accounting": { + "scope": "authorized_scope", + "service_name": "docker_stack_name", + "host": "service_hostname" + }, + "hosts": [ + { + "host": ["service_hostname"], + "audience": "oidc_client_id", + "allow-basic-auth": "true/false", + "paths": [ + { + "name": "oidc_client_resource_name", + "path": "^/?.*$", + "methods": [ + { + "method": "GET" + }, + { + "method": "POST" + } + ] + } + ] + } + ] +} + diff --git a/src/nginx.conf b/src/nginx.conf new file mode 100644 index 0000000..75e2bcb --- /dev/null +++ b/src/nginx.conf @@ -0,0 +1,43 @@ +# Added to load njs module +load_module modules/ngx_http_js_module.so; + +user nginx; +worker_processes auto; + +error_log /var/log/nginx/error.log info; +pid /var/run/nginx.pid; + + +events { + worker_connections 1024; +} + +# declare environmental variable to get credentials requiredd to access keycloak +env PEP_CREDENTIALS; + +http { + + # added to import pep script + js_import pep.js; + + # added to bind enforce function + js_set $authorization pep.enforce_legacy; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + access_log /var/log/nginx/access.log main; + + sendfile on; + #tcp_nopush on; + + keepalive_timeout 65; + + #gzip on; + + include /etc/nginx/conf.d/*.conf; +} diff --git a/src/nginx.default.conf.template b/src/nginx.default.conf.template new file mode 100644 index 0000000..619aefd --- /dev/null +++ b/src/nginx.default.conf.template @@ -0,0 +1,170 @@ +# Websockets +map $http_upgrade $connection_upgrade { + default upgrade; + '' close; +} + +# Prometheus metrics +upstream prometheus { + ip_hash; + server {{ shinyproxy_as_docker_stack_name }}_{{ shinyproxy_as_docker_service_name }}:{{ shinyproxy_prometheus_port }}; +} + +# backend service +upstream service { + ip_hash; + server ${DOCKER_FULL_STACK_SERVICE_NAME}:${DOCKER_SERVICE_PORT}; +} + + +# variables computed by njs and which may possibly be passed among locations +js_var $auth_token; +js_var $account_record; +js_var $pep_credentials; + +proxy_cache_path /tmp levels=1:2 keys_zone=social_cache:10m max_size=10g inactive=60m use_temp_path=off; + + +server { + + listen *:80; + listen [::]:80; + + server_name ${SERVICE_HOST}; + + subrequest_output_buffer_size ${SUBREQUEST_OUTPUT_BUFFER_SIZE}; + + proxy_hide_header X-Frame-Options; + add_header X-Frame-Options ""; + proxy_hide_header Content-Security-Policy; + add_header Content-Security-Policy ""; + + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + location /health { + add_header Content-Length 0; + add_header Content-Type "text/plain"; + return 200; + } + + proxy_http_version 1.1; + proxy_set_header Connection ""; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-Host "$remote_addr"; + proxy_set_header X-Forwarded-Server $host; + proxy_set_header nginx-request-uri $request_uri; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_buffering on; + proxy_buffer_size 8k; + proxy_buffers 4 8k; + proxy_busy_buffers_size 16k; + proxy_temp_file_write_size 16k; + proxy_redirect off; + proxy_connect_timeout 30s; + proxy_read_timeout 2400s; + proxy_send_timeout 120s; + + + location ~ /app/ { + proxy_read_timeout 300; + proxy_send_timeout 300; + js_content pep.enforce_legacy; + } + + + + location /gcube_user_info { + internal; + gunzip on; + proxy_method GET; + proxy_http_version 1.1; + proxy_set_header gcube-token "$auth_token"; + proxy_pass https://api.d4science.org/rest/2/people/profile; + + proxy_cache social_cache; + proxy_cache_key $auth_token; + } + + + location / { + access_log /var/log/nginx/proxy.access.log; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-NginX-Proxy true; + proxy_cache_bypass $http_upgrade; + # resolver 127.0.0.11; + proxy_pass http://service$request_uri; + } + + +# internal location that redirects to backend will only be called from PEP JS code when all checks are passed + location /_backend { + internal; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-NginX-Proxy true; + proxy_cache_bypass $http_upgrade; + # proxy_redirect off; + proxy_pass http://service$request_uri; + } + + + # internal location that redirects to Keycloak in order to verify a token's validity. This will be called only from PEP JS code + location /jwt_verify_request { + internal; + proxy_method POST; + proxy_http_version 1.1; + proxy_set_header Authorization $pep_credentials; + proxy_set_header Content-Type "application/x-www-form-urlencoded"; + proxy_pass "https://${KEYCLOACK_SERVER}/auth/realms/d4science/protocol/openid-connect/token/introspect"; + + proxy_ignore_headers Cache-Control Expires Set-Cookie; + gunzip on; + } + + # internal location that redirects to Keycloak in order to exchange Basic auth credentials with token. This will be called only from PEP JS code + location /jwt_request { + internal; + proxy_method POST; + proxy_http_version 1.1; + proxy_set_header Authorization $pep_credentials; + proxy_set_header Content-Type "application/x-www-form-urlencoded"; + proxy_pass "https://${KEYCLOACK_SERVER}/auth/realms/d4science/protocol/openid-connect/token"; + gunzip on; + } + + # internal location that redirects to Keycloak in order to perform a specific authorization request. This will be called only from PEP JS code + location /permission_request { + internal; + proxy_method POST; + proxy_http_version 1.1; + proxy_set_header Content-Type "application/x-www-form-urlencoded"; + proxy_set_header Authorization "Bearer $auth_token"; + proxy_pass "https://${KEYCLOACK_SERVER}/auth/realms/d4science/protocol/openid-connect/token"; + gunzip on; + } + + # internal location that sends a record to accounting service. This will be called only from PEP JS code if requested. + location /accounting { + internal; + proxy_method POST; + proxy_http_version 1.1; + proxy_set_header Authorization "Bearer $auth_token"; + proxy_set_header Content-Type "application/json"; + proxy_pass "${ACCOUNTING_SERVICE_BASEURL}/record"; + } + + location /_accounting_legacy { + internal; + proxy_method POST; + proxy_http_version 1.1; + proxy_set_header gcube-token "$auth_token"; + proxy_set_header Content-Type "application/json"; + proxy_pass ${ACCOUNTING_SERVICE_BASEURL}/record; + } + + +} diff --git a/src/pep.js b/src/pep.js new file mode 100644 index 0000000..8b9a042 --- /dev/null +++ b/src/pep.js @@ -0,0 +1,563 @@ +export default { enforce_legacy }; + +import defaultExport from './config.js'; + +function log(c, s) { + c.request.log(s) +} + +var _debug = defaultExport["config"]["debug"] +var _debug = true + +njs.dump(_debug); + +function debug(c, s) { + if (_debug === true) { + log(c, s) + } +} + +function enforce(r) { + + var context = { + request: r, + config: defaultExport["config"], + backend: (defaultExport.backend ? defaultExport.backend : "/_backend") + } + + log(context, "Inside NJS enforce for " + r.method + " @ " + r.headersIn.host + r.uri) + + context = computeProtection(context) + + wkf.run(wkf.build(context), context) +} + +function enforce_legacy(r) { + var context = { + request: r, + config: defaultExport["config"] + } + + debug(context, JSON.stringify(context.config["accounting"], null, 2)); + var allowedcontexts = [context.config["accounting"]["scope"]] + log(context, "Inside NJS enforce for " + r.method + " @ " + r.headersIn.host + "/" + r.uri) + debug(context, "debug is " + JSON.stringify(defaultExport["config"])) + const token = getGCubeToken(context) + debug(context, JSON.stringify(context, null, 2)) + debug(context, "gcube token" + token) + if (token != null) { + debug(context, "[PEP] token is " + token) + exportVariable(context, "auth_token", token) + context.request.subrequest("/_gcube_user_info") + .then(reply => { + if (reply.status === 200) { + debug(context, "[Social Service] got response " + reply.responseText) + // var response = JSON.parse(reply.responseBody); + var response = JSON.parse(reply.responseText); + if (allowedcontexts.indexOf(response.result.context) === -1) { + debug(context, "[PEP] Unathorized context " + response.result.context) + throw new Error("Unauthorized") + } + return response + } else { + log(context, "[Social Service] failed " + reply.status + ":" + reply.responseText) + throw new Error("Unauthorized") + } + }).then(userinfo => { + debug(context, "[Social Service] username is " + userinfo.result.username) + debug(context, njs.dump(context)); + context.userinfo = userinfo + context.record = buildAccountingRecord_legacy(context) + return context.request.subrequest("/_backend", { method: context.request.method, args: JSON.stringify(context.request.args), headers: context.request.headersIn }) + }).then(reply => { + debug(context, reply.responseText); + debug(context, "[SHINYPROXY] response status: " + reply.status) + copyHeaders(context, reply.headersOut, r.headersOut) + closeAccountingRecord_legacy(context.record, (reply.status === 200 || reply.status === 201 || reply.status === 204)) + context.request.subrequest("/_accounting_legacy", { detached: true, body: JSON.stringify([context.record]) }) + // r.return(reply.status, reply.responseBody) + debug(context, "Redirect URI: " + reply.headersOut["Location"]) + if(reply.status === 301 || reply.status === 302){ + debug(context, "sto per fare la redirect"); + r.return(reply.status, reply.headersOut["Location"]) + }else{ + r.return(reply.status, reply.responseText) + } + }).catch(e => { log(context, "Error .... " + njs.dump(e)); context.request.return(e.message === "Unauthorized" ? 403 : 500) }) + return + } + r.return(401, "Authorization required") +} + +function copyHeaders(context, hin, hout){ + for (var h in hin) { + if(h !== "Location") hout[h] = hin[h]; + } +} + +// ######## WORKFLOW FUNCTIONS ############### +var wkf = { + + build: (context) => { + var actions = [ + "export_pep_credentials", + "parse_authentication", + "check_authentication", + "export_authn_token", + // "pip", + "pdp", + // "export_backend_headers", + // "pass" + "start_accounting", + "pass_and_wait", + "close_accounting", + "send_accounting", + "respond_to_client" + ] + /*var actions = [ + "export_pep_credentials", + "parse_authentication", + "check_authentication", + "export_authn_token", + "pip", + "pdp", + "export_backend_headers", + "start_accounting", + "pass_and_wait", + "close_accounting", + "respond_to_client" + ]*/ + return actions + }, + + run: (actions, context) => { + context.request.log("Starting workflow with " + njs.dump(actions)) + var w = actions.reduce( + (acc, f) => { return acc.then(typeof (f) === "function" ? f : wkf[f]) }, + Promise.resolve().then(() => context) + ) + w.catch(e => { context.request.error(njs.dump(e)); context.request.return(401) }) + }, + + export_pep_credentials: exportPepCredentials, + export_authn_token: exportAuthToken, + export_backend_headers: exportBackendHeaders, + parse_authentication: parseAuthentication, + check_authentication: checkAuthentication, + verify_token: verifyToken, + request_token: requestToken, + pip: pipExecutor, + pdp: pdpExecutor, + pass: pass, + pass_and_wait: pass_and_wait, + respond_to_client: respondToClient, + start_accounting: buildAccountingRecord, + close_accounting: closeAccountingRecord, + send_accounting: sendAccountingRecord +} + +function getGCubeToken(context) { + if (context.request.args["gcube-token"]) { + return context.request.args["gcube-token"]; + } else if (context.request.headersIn['gcube-token']) { + return context.request.headersIn['gcube-token']; + } + return null; +} + + +function getTokenField(context, f) { + return context.authn.verified_token[f] +} + +function exportVariable(context, name, value) { + context.request.variables[name] = value + return context +} + +function exportBackendHeaders(context) { + return context +} + +function exportPepCredentials(context) { + if (process.env["pep_credentials"] || process.env["PEP_CREDENTIALS"]) { + return exportVariable(context, "pep_credentials", "Basic " + process.env["PEP_CREDENTIALS"]) + } else if (context.config["pep_credentials"]) { + return exportVariable(context, "pep_credentials", "Basic " + context.config["pep_credentials"]) + } else { + throw new Error("Need PEP credentials") + } +} + +function exportAuthToken(context) { + return exportVariable(context, "auth_token", context.authn.token) +} + +function checkAuthentication(context) { + return context.authn.type === "bearer" ? wkf.verify_token(context) : wkf.request_token(context) +} + +function parseAuthentication(context) { + context.request.log("Inside parseAuthentication") + context.request.log(JSON.stringify(context.request.args, null, 2)) + var incomingauth = context.request.headersIn["Authorization"] + + if (!incomingauth) throw new Error("Authentication required"); + + var arr = incomingauth.trim().replace(/\s\s+/g, " ").split(" ") + if (arr.length != 2) throw new Error("Unknown authentication scheme"); + + var type = arr[0].toLowerCase() + if (type === "basic" && context.authz.host && (context.authz.host["allow-basic-auth"] || context.authz.host["allow_basic_auth"])) { + var unamepass = Buffer.from(arr[1], 'base64').toString().split(":") + if (unamepass.length != 2) return null; + context.authn = { type: type, raw: arr[1], user: unamepass[0], password: unamepass[1] } + return context + } else if (type === "bearer") { + context.authn = { type: type, raw: arr[1], token: arr[1] } + return context + } + throw new Error("Unknown authentication scheme"); +} + +function verifyToken(context) { + log(context, "Inside verifyToken") + debug(context, "Token is " + context.authn.token) + var options = { + "body": "token=" + context.authn.token + "&token_type_hint=access_token" + } + return context.request.subrequest("/_jwt_verify_request", options) + .then(reply => { + if (reply.status === 200) { + var response = JSON.parse(reply.responseText); + if (response.active === true) { + return response + } else { + throw new Error("Unauthorized: " + reply.responseText) + } + } else { + throw new Error("Unauthorized: " + reply.responseText) + } + }).then(verified_token => { + context.authn.verified_token = + JSON.parse(Buffer.from(context.authn.token.split('.')[1], 'base64url').toString()) + return context + }) +} + +function requestToken(context) { + log(context, "Inside requestToken") + var options = { + "body": "grant_type=client_credentials&client_id=" + context.authn.user + "&client_secret=" + context.authn.password + } + return context.request.subrequest("/_jwt_request", options) + .then(reply => { + if (reply.status === 200) { + var response = JSON.parse(reply.responseText); + context.authn.token = response.access_token + context.authn.verified_token = + JSON.parse(Buffer.from(context.authn.token.split('.')[1], 'base64url').toString()) + return context + } else if (reply.status === 400 || reply.status === 401) { + var options = { + "body": "grant_type=password&username=" + context.authn.user + "&password=" + context.authn.password + } + return context.request.subrequest("/_jwt_request", options) + .then(reply => { + if (reply.status === 200) { + var response = JSON.parse(reply.responseText); + context.authn.token = response.access_token + context.authn.verified_token = + JSON.parse(Buffer.from(context.authn.token.split('.')[1], 'base64url').toString()) + return context + } else { + throw new Error("Unauthorized " + reply.status) + } + }) + } else { + throw new Error("Unauthorized " + reply.status) + } + }) +} + +function pipExecutor(context) { + log(context, "Inside extra claims PIP") + context.authz.pip.forEach(extra => { + //call extra claim pip function + try { + var operator = extra.operator + var result = wkf[operator](context, extra.args) + //ensure array and add to extra_claims + if (!(result instanceof Array)) result = [result] + if (!context.extra_claims) context.extra_claims = {}; + context.extra_claims[extra.claim] = result + } catch (error) { + log(context, "Skipping invalid extra claim " + njs.dump(error)) + } + }) + log(context, "Extra claims are " + njs.dump(context.extra_claims)) + return context +} + +function pdpExecutor(context) { + log(context, "Inside PDP") + return context.authz.pdp(context) +} + +function umaCall(context) { + log(context, "Inside UMA call") + var options = { "body": computePermissionRequestBody(context) }; + return context.request.subrequest("/_permission_request", options) + .then(reply => { + if (reply.status === 200) { + debug(context, "UMA call reply is " + reply.status) + return context + } else { + throw new Error("Response for authorization request is not ok " + reply.status + " " + njs.dump(reply.responseText)) + } + }) +} + +// Call backend and return reply to client directly +async function pass(context) { + log(context, "Inside pass and wait"); + const r = context.request + const reply = await r.subrequest(context.backend, { method: r.method, args: r.variables.args, headers: r.headersIn }) + debug(context, "[BACKEND] response status: " + reply.status) + context.backendresponse = reply + return respondToClient(context) +} + +// Pass to backend but instead of returning to client wait for the reply. Thi intercepts the possibility of performing extra action like closing and sending accounting record. +async function pass_and_wait(context) { + log(context, "Inside pass and wait"); + const r = context.request + const reply = await r.subrequest(context.backend, { method: r.method, args: r.variables.args, headers: r.headersIn }) + debug(context, "[BACKEND] response status: " + reply.status) + context.backendresponse = reply + return context +} + +// Definetely return backend response +function respondToClient(context) { + log(context, "Inside respond to client" + njs.dump(context.backendresponse)); + for (let k in context.backendresponse.headersOut) { + context.request.headersOut[k] = context.backendresponse.headersOut[k] + } + context.request.return(context.backendresponse.status, context.backendresponse.responseText) + return context +} + +// ######## AUTHORIZATION PART ############### +function computePermissionRequestBody(context) { + + if (!context.authz.host || !context.authz.path) { + throw new Error("Enforcemnt mode is always enforcing. Host or path not found...") + } + + var audience = computeAudience(context) + var grant = "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" + var mode = "response_mode=decision" + var permissions = computePermissions(context) + var extra = "" + if (context.extra_claims) { + extra = + "claim_token_format=urn:ietf:params:oauth:token-type:jwt&claim_token=" + + JSON.stringify(context.extra_claims).toString("base64url") + } + var body = audience + "&" + grant + "&" + permissions + "&" + mode + "&" + extra + context.request.error("Computed permission request body is " + body) + return body +} + +function computeAudience(context) { + var aud = context.request.headersIn.host + if (context.authz.host) { + aud = context.authz.host.audience || context.authz.host.host + } + return "audience=" + aud +} + +function computePermissions(context) { + var resource = context.request.uri + if (context.authz.path) { + resource = context.authz.path.name || context.authz.path.path + } + var scopes = [] + if (context.authz.method && context.authz.method.scopes) { + scopes = context.authz.method.scopes + } + if (scopes.length > 0) { + return scopes.map(s => "permission=" + resource + "#" + s).join("&") + } + return "permission=" + resource +} + +function getPath(hostconfig, incomingpath, incomingmethod) { + var paths = hostconfig.paths || [] + var matchingpaths = paths + .filter(p => { return incomingpath.match(p.path) != null }) + .reduce((acc, p) => { + if (!p.methods || p.methods.length === 0) acc.weak.push({ path: p }); + else { + var matchingmethods = p.methods.filter(m => m.method.toUpperCase() === incomingmethod) + if (matchingmethods.length > 0) acc.strong.push({ method: matchingmethods[0], path: p }); + } + return acc; + }, { strong: [], weak: [] }) + return matchingpaths.strong.concat(matchingpaths.weak)[0] +} + +function getHost(config, host) { + var matching = config.hosts.filter(h => { + //compare for both string and array of strings + return ((h.host.filter && h.host.indexOf(host) !== -1) || h.host === host) + }) + return matching.length > 0 ? matching[0] : null +} + +function computeProtection(context) { + debug(context, "Getting by host " + context.request.headersIn.host) + context.authz = {} + context.authz.host = getHost(context.config, context.request.headersIn.host) + if (context.authz.host !== null) { + log(context, "Host found: " + njs.dump(context.authz.host)) + context.authz.pip = context.authz.host.pip ? context.authz.host.pip : []; + context.authz.pdp = context.authz.host.pdp ? context.authz.host.pdp : umaCall; + var pathandmethod = getPath(context.authz.host, context.request.uri, context.request.method); + if (pathandmethod) { + context.authz.path = pathandmethod.path; + context.authz.pip = context.authz.path.pip ? context.authz.pip.concat(context.authz.path.pip) : context.authz.pip; + context.authz.pdp = context.authz.path.pdp ? context.authz.path.pdp : context.authz.pdp; + context.authz.method = pathandmethod.method; + if (context.authz.method) { + context.authz.pip = context.authz.method.pip ? context.authz.pip.concat(context.authz.method.pip) : context.authz.pip; + context.authz.pdp = context.authz.method.pdp ? context.authz.method.pdp : context.authz.pdp; + } + } + } + debug(context, "Leaving protection computation: ") + return context +} + +// ####### ACCOUNTING PART ##################### +function buildAccountingRecord(context) { + log(context, "Inside build accounting record"); + const t = (new Date()).getTime() + context.record = { + "recordType": "ServiceUsageRecord", + "operationCount": 1, + "creationTime": t, + "callerHost": context.request.headersIn["x-forwarded-for"], + "serviceClass": "Application", + "callerQualifier": "TOKEN", + "consumerId": getTokenField(context, "preferred_username"), + "aggregated": true, + "serviceName": defaultExport["accounting"]["service_name"], + "duration": 0, + "maxInvocationTime": 0, + "scope": defaultExport["accounting"]["scope"], + "host": defaultExport["accounting"]["host"], + "startTime": t, + "id": uuid(), + "calledMethod": context.request.method + " " + context.request.uri, + "endTime": 0, + "minInvocationTime": 0, + "operationResult": null + } + debug(context, "Record is " + JSON.stringify(context.record)) + return context +} + +function __buildAccountingRecord_legacy(context) { + log(context, "Inside build accounting record"); + const t = (new Date()).getTime() + context.record = { + "recordType": "ServiceUsageRecord", + "operationCount": 1, + "creationTime": t, + "callerHost": context.request.headersIn["x-forwarded-for"], + "serviceClass": "Application", + "callerQualifier": "TOKEN", + "consumerId": context.userinfo.username, + "aggregated": true, + "serviceName": context.config["accounting"]["service_name"], + "duration": 0, + "maxInvocationTime": 0, + "scope": context.config["accounting"]["scope"], + "host": context.config["accounting"]["host"], + "startTime": t, + "id": uuid(), + "calledMethod": context.request.method + " " + context.request.uri, + "endTime": 0, + "minInvocationTime": 0, + "operationResult": null + } + log(context, "Record is " + JSON.stringify(context.record)) + return context +} + +function buildAccountingRecord_legacy(context) { + const t = (new Date()).getTime() + return { + "recordType": "ServiceUsageRecord", + "operationCount": 1, + "creationTime": t, + "callerHost": context.request.remoteAddress, + "serviceClass": "ShinyApp", + "callerQualifier": "TOKEN", + "consumerId": context.userinfo.username, + "aggregated": true, + "serviceName": context.request.uri.split("app/")[1], + "duration": 0, + "maxInvocationTime": 0, + "scope": context.userinfo.context, + "host": context.request.headersIn.host, + "startTime": t, + "id": uuid(), + "calledMethod": context.request.method + " " + context.request.uri, + "endTime": 0, + "minInvocationTime": 0, + "operationResult": null + } +} + +function closeAccountingRecord_legacy(record, success) { + const t = (new Date()).getTime() + record.duration = t - record.startTime + record.endTime = t + record.minInvocationTime = record.duration + record.operationResult = success ? "SUCCESS" : "FAILED"; +} + +function closeAccountingRecord(context, success) { + log(context, "Inside close accounting"); + const t = (new Date()).getTime() + context.record.duration = t - context.record.startTime + context.record.endTime = t + context.record.minInvocationTime = context.record.duration + context.record.operationResult = success ? "SUCCESS" : "FAILED"; + log(context, "Record is " + njs.dump(context.record)) + return context +} + +async function sendAccountingRecord(context) { + log(context, "Inside send accounting"); + const detached = !_debug; + const subreq_params = { detached: detached, body: JSON.stringify([context.record]) }; + if (_debug) { + // debug(context, JSON.stringify(subreq_params)); + const acc_request = await context.request.subrequest("/_accounting", subreq_params); + debug(context, "[ACCOUNTING] response status: " + JSON.stringify(acc_request)); + } else { + context.request.subrequest("/_accounting", subreq_params); + } + return context +} + +function uuid() { + return 'xxxxxxxx-xxxx-4xxx-8xxx-xxxxxxxxxxxx'.replace(/[x]/g, function (c) { + const r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8); + return v.toString(16); + }); +} \ No newline at end of file