initial commit
This commit is contained in:
43
dev/tekton/README.md
Executable file
43
dev/tekton/README.md
Executable file
@@ -0,0 +1,43 @@
|
||||
#Installeren tekton-pipelines:
|
||||
|
||||
kubectl apply --filename \
|
||||
https://storage.googleapis.com/tekton-releases/pipeline/latest/release.yaml
|
||||
|
||||
#Installeren tekton-dashboard:
|
||||
|
||||
kubectl apply --filename \
|
||||
https://storage.googleapis.com/tekton-releases/dashboard/latest/release-full.yaml
|
||||
|
||||
#daarna:
|
||||
|
||||
kubectl apply -f ingressroute-tls.yaml
|
||||
|
||||
Taken die je vaak nodig hebt:
|
||||
|
||||
kubectl apply -f https://api.hub.tekton.dev/v1/resource/tekton/task/git-clone/0.10/raw
|
||||
kubectl apply -f https://api.hub.tekton.dev/v1/resource/tekton/task/kaniko/0.7/raw
|
||||
kubectl apply -f https://api.hub.tekton.dev/v1/resource/tekton/task/buildah/0.9/raw
|
||||
kubectl apply -f https://api.hub.tekton.dev/v1/resource/tekton/task/maven/0.4/raw
|
||||
kubectl apply -f https://api.hub.tekton.dev/v1/resource/tekton/task/sonarqube-scanner/0.4/raw
|
||||
kubectl apply -f https://raw.githubusercontent.com/tektoncd/catalog/main/task/syft/0.1/syft.yaml
|
||||
kubectl apply -f https://raw.githubusercontent.com/tektoncd/catalog/main/task/grype/0.1/grype.yaml
|
||||
kubectl apply -f https://raw.githubusercontent.com/tektoncd/catalog/main/task/argocd-task-sync-and-wait/0.2/argocd-task-sync-and-wait.yaml
|
||||
kubectl apply -f /home/ubuntu/containers/kubernetes/dev/tekton/tasks/push-sbom-task.yaml
|
||||
kubectl apply -f /home/ubuntu/containers/kubernetes/dev/tekton/tasks/register-change-task.yaml
|
||||
kubectl apply -f /home/ubuntu/containers/kubernetes/dev/tekton/tasks/curl-task.yaml
|
||||
kubectl apply -f /home/ubuntu/containers/kubernetes/dev/tekton/tasks/argocd/argocd-task-synt-and-wait.yaml
|
||||
|
||||
## Voor deze laatste task moet je ook een configmap en een secret aanmaken!!
|
||||
|
||||
|
||||
#om TKN command-line te gebruiken:
|
||||
|
||||
microk8s config > admin.conf
|
||||
sudo mv admin.conf /etc/kubernetes/
|
||||
|
||||
#Tekton triggers:
|
||||
|
||||
kubectl apply --filename \
|
||||
https://storage.googleapis.com/tekton-releases/triggers/latest/release.yaml
|
||||
kubectl apply --filename \
|
||||
https://storage.googleapis.com/tekton-releases/triggers/latest/interceptors.yaml
|
||||
19
dev/tekton/catalog-info.yaml
Normal file
19
dev/tekton/catalog-info.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
apiVersion: backstage.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: dev-tekton
|
||||
title: tekton (dev)
|
||||
description: tekton instance running in Kubernetes
|
||||
annotations:
|
||||
backstage.io/kubernetes-label-selector: "app=tekton"
|
||||
links:
|
||||
- url: https://github.com/AllardKrings/kubernetes/dev/tekton
|
||||
title: tekton-configuration
|
||||
docs:
|
||||
- url: ./README.md
|
||||
spec:
|
||||
type: service
|
||||
lifecycle: production
|
||||
owner: group:default/allarddcs
|
||||
subcomponentOf: component:default/DEV-cluster
|
||||
|
||||
52
dev/tekton/clean.yaml
Executable file
52
dev/tekton/clean.yaml
Executable file
@@ -0,0 +1,52 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: cleaner
|
||||
---
|
||||
kind: Role
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: cleaner
|
||||
rules:
|
||||
- apiGroups: ["tekton.dev"]
|
||||
resources: ["pipelineruns"]
|
||||
verbs: ["delete", "get", "watch", "list"]
|
||||
---
|
||||
kind: RoleBinding
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: cleaner-to-cleaner
|
||||
roleRef:
|
||||
kind: Role
|
||||
name: cleaner
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: cleaner
|
||||
---
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: cleanup-pipelineruns
|
||||
spec:
|
||||
schedule: "*/15 * * * *"
|
||||
concurrencyPolicy: Forbid
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: OnFailure
|
||||
serviceAccount: cleaner
|
||||
containers:
|
||||
- name: kubectl
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: NUM_TO_KEEP
|
||||
value: "1"
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
TO_DELETE="$(kubectl get pipelinerun -o jsonpath='{range .items[?(@.status.completionTime)]}{.status.completionTime}{" "}{.metadata.name}{"\n"}{end}' | sort | head -n -${NUM_TO_KEEP} | awk '{ print $2}')"
|
||||
test -n "$TO_DELETE" && kubectl delete pipelinerun ${TO_DELETE} || true
|
||||
53
dev/tekton/cleanup-pipelineruns.yaml
Executable file
53
dev/tekton/cleanup-pipelineruns.yaml
Executable file
@@ -0,0 +1,53 @@
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: cleaner
|
||||
---
|
||||
kind: Role
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: cleaner
|
||||
rules:
|
||||
- apiGroups: ["tekton.dev"]
|
||||
resources: ["pipelineruns"]
|
||||
verbs: ["delete", "get", "watch", "list"]
|
||||
---
|
||||
kind: RoleBinding
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
metadata:
|
||||
name: cleaner-to-cleaner
|
||||
roleRef:
|
||||
kind: Role
|
||||
name: cleaner
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: cleaner
|
||||
---
|
||||
apiVersion: batch/v1beta1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: cleanup-pipelineruns
|
||||
spec:
|
||||
schedule: "*/15 * * * *"
|
||||
concurrencyPolicy: Forbid
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: OnFailure
|
||||
serviceAccount: cleaner
|
||||
containers:
|
||||
- name: kubectl
|
||||
image: ghcr.io/ctron/kubectl:latest
|
||||
env:
|
||||
- name: NUM_TO_KEEP
|
||||
value: "3"
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
TO_DELETE="$(kubectl get pipelinerun -o jsonpath='{range .items[?(@.status.completionTime)]}{.status.completionTime}{" "}{.metadata.name}{"\n"}{end}' | sort | head -n -${NUM_TO_KEEP} | awk '{ print $2}')"
|
||||
test -n "$TO_DELETE" && kubectl delete pipelinerun ${TO_DELETE} || true
|
||||
31
dev/tekton/dashboard-patch.yaml
Executable file
31
dev/tekton/dashboard-patch.yaml
Executable file
@@ -0,0 +1,31 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: tekton-dashboard-tutorial
|
||||
rules:
|
||||
- apiGroups:
|
||||
- tekton.dev
|
||||
resources:
|
||||
- tasks
|
||||
- taskruns
|
||||
- pipelines
|
||||
- pipelineruns
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- update
|
||||
- patch
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: tekton-dashboard-tutorial
|
||||
namespace: default
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: tekton-dashboard-tutorial
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: default
|
||||
namespace: tekton-dashboard
|
||||
337
dev/tekton/dashboard.yaml
Executable file
337
dev/tekton/dashboard.yaml
Executable file
@@ -0,0 +1,337 @@
|
||||
apiVersion: apiextensions.k8s.io/v1
|
||||
kind: CustomResourceDefinition
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: extensions.dashboard.tekton.dev
|
||||
spec:
|
||||
group: dashboard.tekton.dev
|
||||
names:
|
||||
categories:
|
||||
- tekton
|
||||
- tekton-dashboard
|
||||
kind: Extension
|
||||
plural: extensions
|
||||
shortNames:
|
||||
- ext
|
||||
- exts
|
||||
preserveUnknownFields: false
|
||||
scope: Namespaced
|
||||
versions:
|
||||
- additionalPrinterColumns:
|
||||
- jsonPath: .spec.apiVersion
|
||||
name: API version
|
||||
type: string
|
||||
- jsonPath: .spec.name
|
||||
name: Kind
|
||||
type: string
|
||||
- jsonPath: .spec.displayname
|
||||
name: Display name
|
||||
type: string
|
||||
- jsonPath: .metadata.creationTimestamp
|
||||
name: Age
|
||||
type: date
|
||||
name: v1alpha1
|
||||
schema:
|
||||
openAPIV3Schema:
|
||||
type: object
|
||||
x-kubernetes-preserve-unknown-fields: true
|
||||
served: true
|
||||
storage: true
|
||||
subresources:
|
||||
status: {}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: tekton-dashboard
|
||||
namespace: tekton-pipelines
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: tekton-dashboard-info
|
||||
namespace: tekton-pipelines
|
||||
rules:
|
||||
- apiGroups:
|
||||
- ""
|
||||
resourceNames:
|
||||
- dashboard-info
|
||||
resources:
|
||||
- configmaps
|
||||
verbs:
|
||||
- get
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: tekton-dashboard-backend
|
||||
rules:
|
||||
- apiGroups:
|
||||
- apiextensions.k8s.io
|
||||
resources:
|
||||
- customresourcedefinitions
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- apiGroups:
|
||||
- security.openshift.io
|
||||
resources:
|
||||
- securitycontextconstraints
|
||||
verbs:
|
||||
- use
|
||||
- apiGroups:
|
||||
- tekton.dev
|
||||
resources:
|
||||
- clustertasks
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- update
|
||||
- apiGroups:
|
||||
- triggers.tekton.dev
|
||||
resources:
|
||||
- clusterinterceptors
|
||||
- clustertriggerbindings
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- update
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: tekton-dashboard-tenant
|
||||
rules:
|
||||
- apiGroups:
|
||||
- dashboard.tekton.dev
|
||||
resources:
|
||||
- extensions
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- update
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- events
|
||||
- namespaces
|
||||
- pods
|
||||
- pods/log
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- update
|
||||
- apiGroups:
|
||||
- tekton.dev
|
||||
resources:
|
||||
- tasks
|
||||
- taskruns
|
||||
- pipelines
|
||||
- pipelineruns
|
||||
- customruns
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
- update
|
||||
- apiGroups:
|
||||
- triggers.tekton.dev
|
||||
resources:
|
||||
- eventlisteners
|
||||
- interceptors
|
||||
- triggerbindings
|
||||
- triggers
|
||||
- triggertemplates
|
||||
verbs:
|
||||
- get
|
||||
- list
|
||||
- watch
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: tekton-dashboard-info
|
||||
namespace: tekton-pipelines
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: tekton-dashboard-info
|
||||
subjects:
|
||||
- apiGroup: rbac.authorization.k8s.io
|
||||
kind: Group
|
||||
name: system:authenticated
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
rbac.dashboard.tekton.dev/subject: tekton-dashboard
|
||||
name: tekton-dashboard-backend
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: tekton-dashboard-backend
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: tekton-dashboard
|
||||
namespace: tekton-pipelines
|
||||
---
|
||||
apiVersion: v1
|
||||
data:
|
||||
version: v0.36.0
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
name: dashboard-info
|
||||
namespace: tekton-pipelines
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
app: tekton-dashboard
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/name: dashboard
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
app.kubernetes.io/version: v0.36.0
|
||||
dashboard.tekton.dev/release: v0.36.0
|
||||
version: v0.36.0
|
||||
name: tekton-dashboard
|
||||
namespace: tekton-pipelines
|
||||
spec:
|
||||
ports:
|
||||
- name: http
|
||||
port: 9097
|
||||
protocol: TCP
|
||||
targetPort: 9097
|
||||
selector:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/name: dashboard
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: tekton-dashboard
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/name: dashboard
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
app.kubernetes.io/version: v0.36.0
|
||||
dashboard.tekton.dev/release: v0.36.0
|
||||
version: v0.36.0
|
||||
name: tekton-dashboard
|
||||
namespace: tekton-pipelines
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/name: dashboard
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: tekton-dashboard
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/name: dashboard
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
app.kubernetes.io/version: v0.36.0
|
||||
name: tekton-dashboard
|
||||
spec:
|
||||
containers:
|
||||
- args:
|
||||
- --port=9097
|
||||
- --logout-url=
|
||||
- --pipelines-namespace=tekton-pipelines
|
||||
- --triggers-namespace=tekton-pipelines
|
||||
- --read-only=no
|
||||
- --log-level=info
|
||||
- --log-format=json
|
||||
- --namespace=
|
||||
- --stream-logs=true
|
||||
- --external-logs=
|
||||
env:
|
||||
- name: INSTALLED_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
image: gcr.io/tekton-releases/github.com/tektoncd/dashboard/cmd/dashboard:v0.36.0@sha256:e7058eabec6bc53bfb3505b637ea6208e6e81ff71a29a5f47a32fa0ed03cb5e4
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 9097
|
||||
name: tekton-dashboard
|
||||
ports:
|
||||
- containerPort: 9097
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /readiness
|
||||
port: 9097
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
runAsGroup: 65532
|
||||
runAsNonRoot: true
|
||||
runAsUser: 65532
|
||||
seccompProfile:
|
||||
type: RuntimeDefault
|
||||
nodeSelector:
|
||||
kubernetes.io/os: linux
|
||||
serviceAccountName: tekton-dashboard
|
||||
volumes: []
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/component: dashboard
|
||||
app.kubernetes.io/instance: default
|
||||
app.kubernetes.io/part-of: tekton-dashboard
|
||||
rbac.dashboard.tekton.dev/subject: tekton-dashboard
|
||||
name: tekton-dashboard-tenant
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: tekton-dashboard-tenant
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: tekton-dashboard
|
||||
namespace: tekton-pipelines
|
||||
2
dev/tekton/delete-succeeded.sh
Executable file
2
dev/tekton/delete-succeeded.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
microk8s kubectl delete pod --field-selector=status.phase==Succeeded
|
||||
microk8s kubectl delete pod --field-selector=status.phase==Failed
|
||||
2
dev/tekton/examples/buildah/Dockerfile
Executable file
2
dev/tekton/examples/buildah/Dockerfile
Executable file
@@ -0,0 +1,2 @@
|
||||
FROM alpine
|
||||
ENTRYPOINT echo hallo Allard Krings
|
||||
15
dev/tekton/examples/buildah/README.md
Executable file
15
dev/tekton/examples/buildah/README.md
Executable file
@@ -0,0 +1,15 @@
|
||||
Dit is een pipeline voor buildah.
|
||||
|
||||
let op:
|
||||
|
||||
gebruik de buildah task van Tekton zelf:
|
||||
|
||||
kubectl apply -f
|
||||
https://api.hub.tekton.dev/v1/resource/tekton/task/buildah/0.5/raw
|
||||
|
||||
Deze task heeft een workspace "dockerconfig" die via de pipeline is gekoppeld aan
|
||||
workspace "dockerconfig-ws" die op zijn beurt in de pipelinerun gekoppeld is aan
|
||||
een secret "dockerconfig-secret dat gedefinieerd wordt conform
|
||||
dockerconfig-secret.yaml
|
||||
|
||||
|
||||
19
dev/tekton/examples/buildah/buildah-workspace-pv.yaml
Executable file
19
dev/tekton/examples/buildah/buildah-workspace-pv.yaml
Executable file
@@ -0,0 +1,19 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolume
|
||||
metadata:
|
||||
name: buildah-workspace-pv
|
||||
spec:
|
||||
storageClassName: ""
|
||||
capacity:
|
||||
storage: 1Gi
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
persistentVolumeReclaimPolicy: Retain
|
||||
mountOptions:
|
||||
- hard
|
||||
- nfsvers=4.1
|
||||
nfs:
|
||||
server: 192.168.40.100
|
||||
path: /mnt/nfs_share/tekton-buildah
|
||||
readOnly: false
|
||||
|
||||
16
dev/tekton/examples/buildah/buildah-workspace-pvc.yaml
Executable file
16
dev/tekton/examples/buildah/buildah-workspace-pvc.yaml
Executable file
@@ -0,0 +1,16 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: buildah-workspace-pvc
|
||||
spec:
|
||||
storageClassName: ""
|
||||
volumeName: buildah-workspace-pv
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
volumeMode: Filesystem
|
||||
resources:
|
||||
requests:
|
||||
storage: 1Gi
|
||||
|
||||
|
||||
|
||||
2
dev/tekton/examples/buildah/create-secret-userid-password.sh
Executable file
2
dev/tekton/examples/buildah/create-secret-userid-password.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
microk8s kubectl create secret generic harbor-userid-password \
|
||||
--from-literal='Username=admin' --from-literal='Password=Harbor01@'
|
||||
6
dev/tekton/examples/buildah/docker-credentials.yaml
Executable file
6
dev/tekton/examples/buildah/docker-credentials.yaml
Executable file
@@ -0,0 +1,6 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: docker-credentials
|
||||
data:
|
||||
config.json: ewoJImF1dGhzIjogewoJCSJodHRwczovL2luZGV4LmRvY2tlci5pby92MS8iOiB7CgkJCSJhdXRoIjogIllXeHNZWEprYTNKcGJtZHpPa3QxWW1WeWJtVjBaWE13TVVBPSIKCQl9Cgl9Cn0=
|
||||
13
dev/tekton/examples/buildah/dockerconfig-secret.yaml
Executable file
13
dev/tekton/examples/buildah/dockerconfig-secret.yaml
Executable file
@@ -0,0 +1,13 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: dockerconfig-secret
|
||||
stringData:
|
||||
config.json: |
|
||||
{
|
||||
"auths": {
|
||||
"harbor.alldcs.nl": {
|
||||
"auth": "YWRtaW46SGFyYm9yMDFA"
|
||||
}
|
||||
}
|
||||
}
|
||||
4
dev/tekton/examples/buildah/medium/helloallard/Dockerfile
Executable file
4
dev/tekton/examples/buildah/medium/helloallard/Dockerfile
Executable file
@@ -0,0 +1,4 @@
|
||||
FROM alpine
|
||||
|
||||
ENTRYPOINT echo hallo Allard Krings
|
||||
#
|
||||
32
dev/tekton/examples/buildah/medium/pipeline.yaml
Executable file
32
dev/tekton/examples/buildah/medium/pipeline.yaml
Executable file
@@ -0,0 +1,32 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: tutorial-pipeline
|
||||
spec:
|
||||
workspaces:
|
||||
- name: myworkspace
|
||||
tasks:
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: myworkspace
|
||||
params:
|
||||
- name: url
|
||||
value: https://github.com/AllardKrings/helloallard.git
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build
|
||||
taskRef:
|
||||
name: buildah
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: harbor.alldcs.nl/allard/helloallard
|
||||
- name: IMAGE_PUSH_SECRET_NAME
|
||||
value: harbor-credentials
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: myworkspace
|
||||
16
dev/tekton/examples/buildah/medium/pipelinerun.yaml
Executable file
16
dev/tekton/examples/buildah/medium/pipelinerun.yaml
Executable file
@@ -0,0 +1,16 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
generateName: tutorial-pipeline-
|
||||
spec:
|
||||
pipelineRef:
|
||||
name: tutorial-pipeline
|
||||
workspaces:
|
||||
- name: myworkspace
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 50Mi
|
||||
24
dev/tekton/examples/buildah/pipeline-run.yaml
Executable file
24
dev/tekton/examples/buildah/pipeline-run.yaml
Executable file
@@ -0,0 +1,24 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: PipelineRun
|
||||
metadata:
|
||||
generateName: buildah-clone-build-push-
|
||||
spec:
|
||||
pipelineRef:
|
||||
name: buildah-clone-build-push
|
||||
workspaces:
|
||||
- name: myworkspace
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 50Mi
|
||||
- name: dockerconfig-ws
|
||||
secret:
|
||||
secretName: dockerconfig-secret
|
||||
params:
|
||||
- name: repo-url
|
||||
value: https://github.com/AllardKrings/helloallard.git
|
||||
- name: image-reference
|
||||
value: harbor.alldcs.nl/allard/hello-allard:1.0
|
||||
35
dev/tekton/examples/buildah/pipeline.yaml
Executable file
35
dev/tekton/examples/buildah/pipeline.yaml
Executable file
@@ -0,0 +1,35 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: Pipeline
|
||||
metadata:
|
||||
name: buildah-clone-build-push
|
||||
spec:
|
||||
workspaces:
|
||||
- name: myworkspace
|
||||
- name: dockerconfig-ws
|
||||
tasks:
|
||||
- name: fetch-repository
|
||||
taskRef:
|
||||
name: git-clone
|
||||
workspaces:
|
||||
- name: output
|
||||
workspace: myworkspace
|
||||
params:
|
||||
- name: url
|
||||
value: https://github.com/AllardKrings/helloallard.git
|
||||
- name: deleteExisting
|
||||
value: "true"
|
||||
- name: build
|
||||
taskRef:
|
||||
name: buildah
|
||||
runAfter:
|
||||
- fetch-repository
|
||||
params:
|
||||
- name: IMAGE
|
||||
value: harbor.alldcs.nl/allard/helloallard:1.0
|
||||
- name: TLSVERIFY
|
||||
value: "false"
|
||||
workspaces:
|
||||
- name: source
|
||||
workspace: myworkspace
|
||||
- name: dockerconfig
|
||||
workspace: dockerconfig-ws
|
||||
63
dev/tekton/examples/chains/kaniko-chains.yaml
Executable file
63
dev/tekton/examples/chains/kaniko-chains.yaml
Executable file
@@ -0,0 +1,63 @@
|
||||
apiVersion: tekton.dev/v1beta1
|
||||
kind: Task
|
||||
metadata:
|
||||
name: kaniko-chains
|
||||
spec:
|
||||
description: >-
|
||||
This Task builds a simple Dockerfile with kaniko and pushes to a registry.
|
||||
This Task stores the image name and digest as results, allowing Tekton Chains to pick up
|
||||
that an image was built & sign it.
|
||||
params:
|
||||
- name: IMAGE
|
||||
description: Name (reference) of the image to build.
|
||||
- name: DOCKERFILE
|
||||
description: Path to the Dockerfile to build.
|
||||
default: ./Dockerfile
|
||||
- name: CONTEXT
|
||||
description: The build context used by Kaniko.
|
||||
default: ./
|
||||
- name: EXTRA_ARGS
|
||||
default: ""
|
||||
- name: BUILDER_IMAGE
|
||||
description: The image on which builds will run (default is v1.5.1)
|
||||
default: gcr.io/kaniko-project/executor:v1.5.1@sha256:c6166717f7fe0b7da44908c986137ecfeab21f31ec3992f6e128fff8a94be8a5
|
||||
workspaces:
|
||||
- name: source
|
||||
description: Holds the context and Dockerfile
|
||||
- name: dockerconfig
|
||||
description: Includes a docker `config.json`
|
||||
optional: true
|
||||
mountPath: /kaniko/.docker
|
||||
results:
|
||||
- name: IMAGE_DIGEST
|
||||
description: Digest of the image just built.
|
||||
- name: IMAGE_URL
|
||||
description: URL of the image just built.
|
||||
steps:
|
||||
- name: add-dockerfile
|
||||
workingDir: $(workspaces.source.path)
|
||||
image: bash
|
||||
script: |
|
||||
set -e
|
||||
echo "FROM alpine@sha256:69e70a79f2d41ab5d637de98c1e0b055206ba40a8145e7bddb55ccc04e13cf8f" | tee $(params.DOCKERFILE)
|
||||
- name: build-and-push
|
||||
workingDir: $(workspaces.source.path)
|
||||
image: $(params.BUILDER_IMAGE)
|
||||
args:
|
||||
- $(params.EXTRA_ARGS)
|
||||
- --dockerfile=$(params.DOCKERFILE)
|
||||
- --context=$(workspaces.source.path)/$(params.CONTEXT) # The user does not need to care the workspace and the source.
|
||||
- --destination=$(params.IMAGE)
|
||||
- --digest-file=$(results.IMAGE_DIGEST.path)
|
||||
# kaniko assumes it is running as root, which means this example fails on platforms
|
||||
# that default to run containers as random uid (like OpenShift). Adding this securityContext
|
||||
# makes it explicit that it needs to run as root.
|
||||
securityContext:
|
||||
runAsUser: 0
|
||||
- name: write-url
|
||||
image: bash
|
||||
script: |
|
||||
set -e
|
||||
echo $(params.IMAGE) | tee $(results.IMAGE_URL.path)
|
||||
securityContext:
|
||||
runAsUser: 0
|
||||
7
dev/tekton/examples/example-bank/.env.template
Executable file
7
dev/tekton/examples/example-bank/.env.template
Executable file
@@ -0,0 +1,7 @@
|
||||
APP_ID_IAM_APIKEY=
|
||||
APP_ID_MANAGEMENT_URL=
|
||||
APP_ID_CLIENT_ID=
|
||||
APP_ID_CLIENT_SECRET=
|
||||
APP_ID_TOKEN_URL=
|
||||
PROXY_USER_MICROSERVICE=user-service:9080
|
||||
PROXY_TRANSACTION_MICROSERVICE=transaction-service:9080
|
||||
5
dev/tekton/examples/example-bank/.gitignore
vendored
Executable file
5
dev/tekton/examples/example-bank/.gitignore
vendored
Executable file
@@ -0,0 +1,5 @@
|
||||
node_modules
|
||||
.env
|
||||
.~/
|
||||
.DS_Store
|
||||
.vscode
|
||||
14
dev/tekton/examples/example-bank/CONTRIBUTING.md
Executable file
14
dev/tekton/examples/example-bank/CONTRIBUTING.md
Executable file
@@ -0,0 +1,14 @@
|
||||
# Contributing
|
||||
|
||||
This is an open source project, and we appreciate your help!
|
||||
|
||||
We use the GitHub issue tracker to discuss new features and non-trivial bugs.
|
||||
|
||||
In addition to the issue tracker, [#journeys on
|
||||
Slack](https://dwopen.slack.com) is the best way to get into contact with the
|
||||
project's maintainers.
|
||||
|
||||
To contribute code, documentation, or tests, please submit a pull request to
|
||||
the GitHub repository. Generally, we expect two maintainers to review your pull
|
||||
request before it is approved for merging. For more details, see the
|
||||
[MAINTAINERS](MAINTAINERS.md) page.
|
||||
15
dev/tekton/examples/example-bank/Dockerfile
Executable file
15
dev/tekton/examples/example-bank/Dockerfile
Executable file
@@ -0,0 +1,15 @@
|
||||
FROM docker.io/library/node:12.16.1-alpine
|
||||
|
||||
ENV NODE_ENV production
|
||||
ENV PORT 8080
|
||||
|
||||
RUN mkdir /app
|
||||
COPY public /app/public
|
||||
COPY app.js /app/
|
||||
COPY package.json /app/package.json
|
||||
COPY routes /app/routes
|
||||
WORKDIR /app
|
||||
RUN npm install
|
||||
|
||||
CMD ["node", "app.js"]
|
||||
|
||||
201
dev/tekton/examples/example-bank/LICENSE
Executable file
201
dev/tekton/examples/example-bank/LICENSE
Executable file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
69
dev/tekton/examples/example-bank/MAINTAINERS.md
Executable file
69
dev/tekton/examples/example-bank/MAINTAINERS.md
Executable file
@@ -0,0 +1,69 @@
|
||||
# Maintainers Guide
|
||||
|
||||
This guide is intended for maintainers - anybody with commit access to one or
|
||||
more Code Pattern repositories.
|
||||
|
||||
## Methodology
|
||||
|
||||
This repository does not have a traditional release management cycle, but
|
||||
should instead be maintained as a useful, working, and polished reference at
|
||||
all times. While all work can therefore be focused on the master branch, the
|
||||
quality of this branch should never be compromised.
|
||||
|
||||
The remainder of this document details how to merge pull requests to the
|
||||
repositories.
|
||||
|
||||
## Merge approval
|
||||
|
||||
The project maintainers use LGTM (Looks Good To Me) in comments on the pull
|
||||
request to indicate acceptance prior to merging. A change requires LGTMs from
|
||||
two project maintainers. If the code is written by a maintainer, the change
|
||||
only requires one additional LGTM.
|
||||
|
||||
## Reviewing Pull Requests
|
||||
|
||||
We recommend reviewing pull requests directly within GitHub. This allows a
|
||||
public commentary on changes, providing transparency for all users. When
|
||||
providing feedback be civil, courteous, and kind. Disagreement is fine, so long
|
||||
as the discourse is carried out politely. If we see a record of uncivil or
|
||||
abusive comments, we will revoke your commit privileges and invite you to leave
|
||||
the project.
|
||||
|
||||
During your review, consider the following points:
|
||||
|
||||
### Does the change have positive impact?
|
||||
|
||||
Some proposed changes may not represent a positive impact to the project. Ask
|
||||
whether or not the change will make understanding the code easier, or if it
|
||||
could simply be a personal preference on the part of the author (see
|
||||
[bikeshedding](https://en.wiktionary.org/wiki/bikeshedding)).
|
||||
|
||||
Pull requests that do not have a clear positive impact should be closed without
|
||||
merging.
|
||||
|
||||
### Do the changes make sense?
|
||||
|
||||
If you do not understand what the changes are or what they accomplish, ask the
|
||||
author for clarification. Ask the author to add comments and/or clarify test
|
||||
case names to make the intentions clear.
|
||||
|
||||
At times, such clarification will reveal that the author may not be using the
|
||||
code correctly, or is unaware of features that accommodate their needs. If you
|
||||
feel this is the case, work up a code sample that would address the pull
|
||||
request for them, and feel free to close the pull request once they confirm.
|
||||
|
||||
### Does the change introduce a new feature?
|
||||
|
||||
For any given pull request, ask yourself "is this a new feature?" If so, does
|
||||
the pull request (or associated issue) contain narrative indicating the need
|
||||
for the feature? If not, ask them to provide that information.
|
||||
|
||||
Are new unit tests in place that test all new behaviors introduced? If not, do
|
||||
not merge the feature until they are! Is documentation in place for the new
|
||||
feature? (See the documentation guidelines). If not do not merge the feature
|
||||
until it is! Is the feature necessary for general use cases? Try and keep the
|
||||
scope of any given component narrow. If a proposed feature does not fit that
|
||||
scope, recommend to the user that they maintain the feature on their own, and
|
||||
close the request. You may also recommend that they see if the feature gains
|
||||
traction among other users, and suggest they re-submit when they can show such
|
||||
support.
|
||||
495
dev/tekton/examples/example-bank/README.md
Executable file
495
dev/tekton/examples/example-bank/README.md
Executable file
@@ -0,0 +1,495 @@
|
||||
# Building a data privacy focused mobile back-end
|
||||
|
||||
In this pattern, we show how to deploy a microservice based back-end in OpenShift 4.3. To simulate a series of mobile views, we deploy a Node.js based service.
|
||||
|
||||
## Introduction
|
||||
|
||||
As people become more aware of data and concerned about their online privacy, regulations around the world have started requiring software projects to think about how customers' data is handled. This pattern deploys a set of microservices to act as a back-end for a mobile bank application, such as those often used by businesses who want to better understand how people use their services by collecting data. Although inspired by regulations such GDPR (Europe's general data protection regulations), as this is not a real public facing application, we implement a few data privacy features as a way of demonstrating how one might go about building a privacy focused back-end in OpenShift 4.
|
||||
|
||||
The GDPR standard defines requirements around what operations need to be available to users ("subjects"). However, GDPR is technology neutral, so it ends up being the responsibility of the implementors to build the architecture that implements the requirements. In addition, with the move toward microservice architectures and containerization, we have technology such as service mesh that may be useful in the context of a data privacy service.
|
||||
|
||||
## Included Components
|
||||
|
||||
- [IBM Managed OpenShift](https://www.ibm.com/cloud/openshift)
|
||||
- [OpenLiberty](https://openliberty.io)
|
||||
- [App ID](https://www.ibm.com/cloud/app-id)
|
||||
- [LogDNA](https://www.ibm.com/cloud/log-analysis)
|
||||
|
||||
# Prerequisites
|
||||
|
||||
1. Log in, or create an cccount on [IBM Cloud](https://cloud.ibm.com)
|
||||
2. Provision an OpenShift 4.3 cluster on [IBM Cloud](https://cloud.ibm.com/docs/openshift?topic=openshift-openshift_tutorial)
|
||||
3. Create a [project](https://docs.openshift.com/container-platform/4.3/applications/projects/configuring-project-creation.html) called `example-bank`.
|
||||
|
||||
## Why OpenShift?
|
||||
|
||||
OpenShift is RedHat's customized distribution of Kubernetes. With OpenShift, you get everything that you know and love about Kubernetes, with a few extra features, such as OperatorHub, for finding and installing new in-cluster services, a convenient CLI for navigating between different projects. For a quick look into new features, see: [Intro to OpenShift 4](https://developer.ibm.com/articles/intro-to-openshift-4/).
|
||||
|
||||
## Project Requirements
|
||||
|
||||
In this pattern, we will be looking to build a hypothetical credit card rewards back-end for a financial organization that wants to encourage the use of credit cards by allowing their users to earn points from their transactions.
|
||||
|
||||
Credit card rewards programs are common for businesses that want to incentivize customers to use credit frequently. As regulations come online, users typically have the ability to opt-out of data collection efforts. In addition, users want the ability to delete data.
|
||||
|
||||
We have implemented a few important data privacy features inspired by real data privacy regulations:
|
||||
|
||||
* Authorization verification with IBM App ID
|
||||
* Right to erasure: implemented via a special Kubernetes `CronJob` that checks for deletion requests every 24h.
|
||||
* Consent for data collection - requiring users to 'opt-in' requirement.
|
||||
* Logging: IBM LogDNA is used to aggregate log data from back-end services, making it possible to review user activity as well as monitor usage.
|
||||
|
||||
# Architecture
|
||||
|
||||
The example bank system includes several microservices for handling user authentication and transacton mechanics.
|
||||
|
||||

|
||||
|
||||
|
||||
## Introduction to the Mobile Simulator
|
||||
|
||||
|
||||
|
||||
The JavaScript simulator app presents a Web based view of a mobile app run by a Node service running inside the OpenShift cluster. <br>
|
||||
|
||||
| | | | | |
|
||||
|:-------------------------:|:-------------------------:|:-------------------------:|:-------------------------:|:-------------------------:|
|
||||
|<img width="1000" alt="sim1" src="images/loyalty-phone.png"> 1. Home screen |<img width="1000" alt="sim1" src="images/loyalty-bank.png"> 1. Login screen | <img width="1000" alt="sim2" src="images/loyalty-transactions.png"> 2. Transactions dashboard | <img width="1000" alt="sim3" src="images/loyalty-spending.png"> 3. Analysis | <img width="1000" alt="sim4" src="images/loyalty-profile.png"> 4. Account |
|
||||
|
||||
<strong>Home screen</strong><br>
|
||||
|
||||
The mobile simulator home screen has a fictitious banking application that the user can create accounts on. The other apps generates transactions for the chosen category.
|
||||
|
||||
<strong>Login screen</strong><br>
|
||||
|
||||
From the dropdown menu inside the simulated phone app, pick one of the available accounts, and click **sign in** to see that user's point accumulation.
|
||||
|
||||
<strong>Transactions dashboard</strong><br>
|
||||
|
||||
This section shows transactions data for the logged in user.
|
||||
|
||||
<strong>Analysis</strong><br>
|
||||
|
||||
This sections shows how much the users spent on each category based on the transactions generated.
|
||||
|
||||
<strong>Account</strong><br>
|
||||
|
||||
From this page, the user can delete his data.
|
||||
|
||||
## User authentication
|
||||
|
||||

|
||||
|
||||
1. The user creates an account using the mobile app simulator. This hits an API from the nodejs server. The nodejs server then hits an API from the App ID service that would create the user's account in its own cloud directory.
|
||||
2. The mobile app simulator then logs in the user after account creation. The App ID service then creates valid access tokens and ID tokens for the user. The mobile app stores these tokens for later use in authentication.
|
||||
3. Using the access token from the previous step, the mobile app can now successfully call the protected APIs in the Liberty microservice. The mobile app calls the API with the access token in the authorization header to create the user profile in the database.
|
||||
4. The Liberty service is integrated with the App ID instance. This verifies the access token in the authorization header from the request.
|
||||
5. When the token is valid, the user profile is created in the database. The access token contains the user ID of the user that sent the request.
|
||||
|
||||
|
||||
- Auth token flow with AppId as identity provider and Liberty's use of token to
|
||||
authenticate users:
|
||||
|
||||
The Liberty microservices are protected APIs that require authorization headers. If the request does not have one, it will not allow the request to be processed, thus sending a 401 Unauthorized response. The microservices makes use of a managed identity provider, App ID for this authentication. This makes it easier to protect APIs and manage identity information of users.
|
||||
|
||||
The mobile app simulator is integrated with the App ID instance and whenever a user logs in, the app receives access tokens and stores them for later use in requests to the protected APIs. The tokens expire in an hour by default which would require users to authenticate again after expiration.
|
||||
|
||||
Whenever a request with a token in the authorization header is sent, the Liberty microservice uses the App ID integration to make sure that the token is valid. Then it continues to process the request. The liberty microservice also makes use of the subject ID or user ID in the token to identify which user is making the request. For example, when a user asks for his number of points earned, it needs to pull the right profile from the database. This is where the user ID in the token payload can be made use of.
|
||||
|
||||
# Deployment
|
||||
|
||||
There are two options for deployment: an automated deployment process driven by Tekton pipelines, and a manual process driven by CLI. In either case, the following common steps should be completed first:
|
||||
|
||||
1. Create an OpenShift 4.3 cluster.
|
||||
2. Complete the PostgreSQL database deployment process (see below).
|
||||
3. Follow the App ID configuration below.
|
||||
4. Set up the required Kubernetes secrets for each service.
|
||||
|
||||
### Automated deployment
|
||||
|
||||
The steps to use the Tekton pipelines [here.](https://developer.ibm.com/tutorials/tekton-pipeline-deploy-a-mobile-app-backend-openshift-4/)
|
||||
|
||||
### App ID Configuration
|
||||
|
||||
Create an [App ID](https://cloud.ibm.com/catalog/services/app-id) instance. Once created, do the following to configure the instance for this pattern.
|
||||
|
||||
**Note** The `.env.template` file referred to in the instructions is part of the code available in GitHub after running `git clone https://github.com/IBM/example-bank.git`.
|
||||
|
||||
* Allow Sign-up and Sign-in using username and password by going to the tab `Cloud Directory` > `Settings`
|
||||
|
||||

|
||||
|
||||
* Disable Email Verification by going to the tab `Cloud Directory` > `Email Templates` > `Email verification`
|
||||
|
||||

|
||||
|
||||
* Add an application in the `Applications` tab. Select "Regular web application"
|
||||
|
||||

|
||||
|
||||
|
||||
* Create the `admin` role.
|
||||
|
||||

|
||||
|
||||
* Create Service credentials with the `Writer` Role so that the simulator can create simulated users with the App ID instance. Take note of the `apikey` and `managementUrl` and place them in the `.env.template` file. The values belong in `APP_ID_IAM_APIKEY` and `APP_ID_MANAGEMENT_URL` respectively.
|
||||
|
||||

|
||||
|
||||
|
||||
Take note of the `clientId`, `secret`, `oAuthServerUrl` and place them in the `.env.template` file of this repo. The values belong in `APP_ID_CLIENT_ID`, `APP_ID_CLIENT_SECRET`, `APP_ID_TOKEN_URL` respectively.
|
||||
|
||||

|
||||
|
||||
* Rename `.env.template` file to `.env` file
|
||||
|
||||
## Secrets from App ID
|
||||
|
||||
Open the credentials screen to view the client IDs and keys needed for the back-end to interact with the App ID via its REST API endpoint.
|
||||
|
||||
The service credentials have the following fields - some of these are used in the `bank-oidc-secret` as described below:
|
||||
```
|
||||
{
|
||||
"apikey": "APIKEY",
|
||||
"appidServiceEndpoint": "https://us-south.appid.cloud.ibm.com",
|
||||
"clientId": "CLIENTID",
|
||||
"discoveryEndpoint": "https://us-south.appid.cloud.ibm.com/oauth/v4/3d17f53d-4600-4f32-bb2c-207f4e2f6060/.well-known/openid-configuration",
|
||||
"iam_apikey_description": "Auto-generated for key <uuid>",
|
||||
"iam_apikey_name": "write-credentials-for-creating-users",
|
||||
"iam_role_crn": "crn:v1:bluemix:public:iam::::serviceRole:Writer",
|
||||
"iam_serviceid_crn": "CRN",
|
||||
"managementUrl": "https://us-south.appid.cloud.ibm.com/management/v4/3d17f53d-4600-4f32-bb2c-207f4e2f6060",
|
||||
"oauthServerUrl": "https://us-south.appid.cloud.ibm.com/oauth/v4/3d17f53d-4600-4f32-bb2c-207f4e2f6060",
|
||||
"profilesUrl": "https://us-south.appid.cloud.ibm.com",
|
||||
"secret": "SECRET_STRING",
|
||||
"tenantId": "TENANTID_STRING",
|
||||
"version": 4
|
||||
}
|
||||
```
|
||||
|
||||
Map these fields into the secret as follows:
|
||||
|
||||
OIDC_JWKENDPOINT: oathServerUrl/publickeys
|
||||
|
||||
OIDC_ISSUERIDENTIFIER: oathServerUrl
|
||||
|
||||
OIDC_AUDIENCES: client ID of the application - see above.
|
||||
|
||||
|
||||
```
|
||||
kubectl create secret generic bank-oidc-secret --from-literal=OIDC_JWKENDPOINTURL=https://us-south.appid.cloud.ibm.com/oauth/v4/3d17f53d-4600-4f32-bb2c-207f4e2f6060/publickeys --from-literal=OIDC_ISSUERIDENTIFIER=https://us-south.appid.cloud.ibm.com/oauth/v4/3d17f53d-4600-4f32-bb2c-207f4e2f6060 --from-literal=OIDC_AUDIENCES=<client ID>
|
||||
```
|
||||
|
||||
## Database setup
|
||||
|
||||
The data in the example bank app lives in a PostgreSQL database.
|
||||
|
||||
#### Bank Database design
|
||||
|
||||
The database schema allows us to manage user profiles and track their transactions.
|
||||
|
||||

|
||||
|
||||
In this pattern, the database is created in a database instance created inside the OpenShift cluster. See [operator tutorial](https://developer.ibm.com/tutorials/operator-hub-openshift-4-operators-ibm-cloud/) and database load as described below. Take note of these important elements of the database configuration:
|
||||
|
||||
1. Database name
|
||||
2. Username
|
||||
3. Password
|
||||
|
||||
These will be used to create a Kubernetes secret that's used by all the services in the cluster.
|
||||
|
||||
## Loading the Database Schema
|
||||
|
||||
To load the schema, we are going to use a `Job` Kubernetes resource. This allows a task to
|
||||
be run to completion to perform a task.
|
||||
|
||||
Follow instructions [here](https://developer.ibm.com/tutorials/operator-hub-openshift-4-operators-ibm-cloud/) to create a database in the
|
||||
project where the back-end services are deployed.
|
||||
|
||||
After deploying the PostgreSQL database, create a secret for your database credentials.
|
||||
|
||||
```
|
||||
# kubectl create secret generic bank-db-secret --from-literal=DB_SERVERNAME=<db_name> --from-literal=DB_PORTNUMBER=<db_port> --from-literal=DB_DATABASENAME=example --from-literal=DB_USER=<db_user> --from-literal=DB_PASSWORD=<db_password>
|
||||
secret/bank-db-secret created
|
||||
```
|
||||
|
||||
> Default Port is `5432`. Default username and password is `postgres`
|
||||
|
||||
Verify the new secret appears in your project:
|
||||
|
||||
```
|
||||
oc get secrets
|
||||
bank-db-secret Opaque 5 35s
|
||||
```
|
||||
|
||||
Build and deploy the image to load the database.
|
||||
|
||||
```
|
||||
oc apply -f data_model/job.yaml
|
||||
```
|
||||
|
||||
You can verify the successful deployment this way:
|
||||
|
||||
1. Find the Jobs run:
|
||||
|
||||
```
|
||||
$ oc get jobs
|
||||
NAME COMPLETIONS DURATION AGE
|
||||
cc-schema-load 1/1 29s 15m
|
||||
|
||||
$ oc get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
cc-schema-load-xcfrs 0/1 Completed 0 15m
|
||||
```
|
||||
|
||||
2. Then, check the logs for the job. You will see the output of the
|
||||
SQL schema loading steps from the job container.
|
||||
|
||||
```
|
||||
$ oc logs cc-schema-load-xcfrs
|
||||
CREATE EXTENSION
|
||||
CREATE DATABASE
|
||||
You are now connected to database "example" as user "postgres".
|
||||
CREATE SCHEMA
|
||||
SET
|
||||
CREATE TABLE
|
||||
CREATE TABLE
|
||||
CREATE TABLE
|
||||
```
|
||||
|
||||
## Manual deployment of services
|
||||
|
||||
## Check out the code and build images.
|
||||
|
||||
### User and Transaction services
|
||||
|
||||
The user and transaction services manage registered users and transactions using Open Liberty and JPA to handle database operations.
|
||||
|
||||
- Check out the code for all services.
|
||||
|
||||
```
|
||||
git clone https://github.com/IBM/example-bank.git
|
||||
cd bank-app-backend
|
||||
```
|
||||
|
||||
1. Follow the instructions in the README.md file to build the microservices with Maven.
|
||||
2. Build the images and push them to an image repository like Docker Hub that is accessible to OpenShift.
|
||||
|
||||
**Note 1:** All images referred to in the deployment scripts are pre-built and in Docker hub. You can use the deployments as is without rebuilding the images.
|
||||
|
||||
**Note 2:** *If you are using the IBM Container Registry (ICR) to store images, IBM OpenShift clusters are provisioned with a image pull secret for ICR images only in the default namespace/project. Deployments to other prjects from ICR will require imagePullSecrets to be created.*
|
||||
|
||||
Modify the deployment.yaml image path to point to the image and deploy both services:
|
||||
|
||||
```
|
||||
oc apply -f transaction-service/deployment.yaml -f user-service/deployment.yaml
|
||||
```
|
||||
|
||||
Verify the services are running:
|
||||
|
||||
```
|
||||
$ oc get services
|
||||
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
transaction-service ClusterIP 172.21.215.251 <none> 9080/TCP 3d23h
|
||||
user-service ClusterIP 172.21.64.7 <none> 9080/TCP 3d23h
|
||||
|
||||
$ oc get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
transaction-service-55b9bfb4cd-jzkgq 1/1 Running 0 26s
|
||||
user-service-55b99c5c44-dpd9k 1/1 Running 0 25s
|
||||
...
|
||||
```
|
||||
|
||||
### Mobile Simulator
|
||||
|
||||
- Verify that the `.env` file is correctly set up as described above in the App ID section. This will be used by both the node image at runtime and in creating a Kubernetes secret:
|
||||
|
||||
```
|
||||
$ cd .. # if you're not in the root directory of this repo yet
|
||||
$ cat .env
|
||||
APP_ID_IAM_APIKEY=<key>
|
||||
APP_ID_MANAGEMENT_URL=https://us-south.appid.cloud.ibm.com/management/v4/<id>
|
||||
APP_ID_CLIENT_ID=<client_id>
|
||||
APP_ID_CLIENT_SECRET=<client_secret>
|
||||
APP_ID_TOKEN_URL=https://us-south.appid.cloud.ibm.com/oauth/v4/<id>
|
||||
PROXY_USER_MICROSERVICE=user-service:9080
|
||||
PROXY_TRANSACTION_MICROSERVICE=transaction-service:9080
|
||||
```
|
||||
|
||||
This uses the .env file to create a secret used by the node process at runtime to communicate with the transaction and user services.
|
||||
|
||||
```
|
||||
kubectl create secret generic mobile-simulator-secrets --from-env-file=.env
|
||||
```
|
||||
- Build the docker image and push to your image repository.
|
||||
|
||||
```
|
||||
docker build -t <repository> .
|
||||
docker push <image>
|
||||
```
|
||||
|
||||
- Modify the `deployment.yaml` image path to point to the image.
|
||||
|
||||
```
|
||||
oc apply -f deployment.yaml
|
||||
```
|
||||
|
||||
|
||||
### Process Transaction - Serverless Application (Knative Serving)
|
||||
|
||||
This part requires the OpenShift Serverless installed in your OpenShift cluster. To install, you can follow through these instructions
|
||||
|
||||
- [Installing the OpenShift Serverless Operator](https://access.redhat.com/documentation/en-us/openshift_container_platform/4.3/html/serverless_applications/installing-openshift-serverless-1#serverless-install-web-console_installing-openshift-serverless)
|
||||
- [Installing Knative Serving](https://access.redhat.com/documentation/en-us/openshift_container_platform/4.3/html/serverless_applications/installing-openshift-serverless-1#installing-knative-serving)
|
||||
|
||||
After installing Knative Serving, you can now proceed in deploying the serverless application.
|
||||
|
||||
This example serverless application handles the awarding of points for every transaction made. The application is only ran whenever there are new transactions.
|
||||
|
||||
- Build and push the image on your own repository
|
||||
```
|
||||
docker build -t <your-repository/image-name> bank-knative-service
|
||||
docker push <your-repository/image-name>
|
||||
```
|
||||
|
||||
- Modify `bank-knative-service/deployment.yaml` file to use the image you just built
|
||||
|
||||
```
|
||||
# spec:
|
||||
# containers:
|
||||
# - image: <your-repository/image-name>
|
||||
```
|
||||
|
||||
- Create an admin scoped user
|
||||
|
||||
A user with an admin scoped is required to access the API that rewards the transactions with points from the transactions microservice. Create one from the App ID dashboard
|
||||
|
||||
| | | |
|
||||
|:-------------------------:|:-------------------------:|:-------------------------:|
|
||||
|<img width="1000" alt="sim1" src="images/loyalty-user-test.png"> 1. Create a user |<img width="1000" alt="sim1" src="images/loyalty-user-role.png"> 2. Click on `+` sign to add a role | <img width="1000" alt="sim2" src="images/loyalty-user-role-added.png"> 3. Choose `admin` role |
|
||||
|
||||
|
||||
- Create a secret for the username and password you just created
|
||||
|
||||
```
|
||||
kubectl create secret generic bank-oidc-adminuser --from-literal=APP_ID_ADMIN_USER=<your-username> --from-literal=APP_ID_ADMIN_PASSWORD=<your-password>
|
||||
```
|
||||
|
||||
- Deploy the knative service
|
||||
|
||||
```
|
||||
oc apply -f bank-knative-service/deployment.yaml
|
||||
```
|
||||
|
||||
- Check Knative Serving status and also make sure the URL matches the environment variable `KNATIVE_SERVICE_URL` defined in `bank-app-backend/transaction-service/deployment.yaml`
|
||||
|
||||
```
|
||||
oc get kservice # or kn service list - if you have kn cli installed
|
||||
# NAME URL LATEST AGE CONDITIONS READY REASON
|
||||
# process-transaction http://process-transaction.example-bank.svc.cluster.local process-transaction-9chv6 34d 3 OK / 3 True
|
||||
```
|
||||
|
||||
> The serverless application can be reached at `http://process-transaction.example-bank.svc.cluster.local` in the example above. If it doesn't match with the one you deployed in the step [User and transaction services](#user-and-transaction-services), fix the `KNATIVE_SERVICE_URL` value in the `bank-app-backend/transaction-service/deployment.yaml` file and redeploy it again with `oc apply`
|
||||
|
||||
### Access the application
|
||||
|
||||
Once deployed, you can list the routes. You should see at least one route - for the mobile simulator service, ending in `.cloud`:
|
||||
|
||||
```
|
||||
$ oc get routes
|
||||
NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD
|
||||
mobile-simulator-service mobile-simulator-service-pattern.koyfman-feb10-f2c6cdc6801be85fd188b09d006f13e3-0000.us-south.containers.appdomain.cloud transaction-service <all> None
|
||||
|
||||
```
|
||||
|
||||
The URL of the mobile simulator is: `mobile-simulator-service-pattern.koyfman-feb10-f2c6cdc6801be85fd188b09d006f13e3-0000.us-south.containers.appdomain.cloud`
|
||||
|
||||
### Erasure service
|
||||
|
||||
The erasure service is a Kubernetes `CronJob` that runs daily to anonymize data for users who have made a deletion request.
|
||||
|
||||
- Build and push image
|
||||
|
||||
```
|
||||
cd bank-user-cleanup-utility
|
||||
|
||||
mvn package
|
||||
docker build -t <your_repo>/bank-user-cleanup-utility:1.0-SNAPSHOT .
|
||||
docker push <your_repo>/bank-user-cleanup-utility:1.0-SNAPSHOT
|
||||
```
|
||||
|
||||
- Update the image name in the `job.yaml` file to point at the image in the repository used above.
|
||||
|
||||
- Create secrets for the erasure service.
|
||||
|
||||
The erasure service requires three secrets to communicate with the PostgreSQL database and App ID. The `bank-db-secret` was defined previously, as it's used by the other services. The two secrets are:
|
||||
|
||||
1. `bank-appid-secret`: This secret defines environment variables for connecting the App ID, and includes the following parameters:
|
||||
```
|
||||
kubectl create secret generic bank-appid-secret --from-literal=APPID_TENANTID=<tenant id> --from-literal=APPID_SERVICE_URL=https://us-south.appid.cloud.ibm.com
|
||||
```
|
||||
|
||||
2. `bank-iam-secret`: This secret uses the IAM key to allow the service to authenticate to AppId.
|
||||
```
|
||||
kubectl create secret generic bank-iam-secret --from-literal=IAM_APIKEY=<IAM_KEY> --from-literal=IAM_SERVICE_URL=https://iam.cloud.ibm.com/identity/token
|
||||
```
|
||||
|
||||
Here are the steps to retrieve this token:
|
||||
|
||||
Via UI console:
|
||||
|
||||
On the top right menu bar, click Manage > Access (IAM).
|
||||
Select IBM Cloud API Keys in the left menu bar.
|
||||
Click the Create an IBM Cloud API Key button.
|
||||
Provide a name and click the Create button.
|
||||
|
||||
CLI method is documented here: https://cloud.ibm.com/docs/iam?topic=iam-userapikey#create_user_key
|
||||
|
||||
2. Deploy job:
|
||||
|
||||
```
|
||||
oc apply -f job.yaml
|
||||
```
|
||||
|
||||
Note that the CronJob won't run immediately upon, since it's scheduled to run every 24 hours.
|
||||
|
||||
To run it on-demand, create a `Job` resources from the `CronJob`:
|
||||
|
||||
```
|
||||
oc create job --from=cronjob/bank-user-cleanup-utility delete-now
|
||||
```
|
||||
|
||||
When you list `jobs` you will see the completed delete-now job, as well as completed database load job. If you check the logs of the delete job, you'll see which users have been processed by the job.
|
||||
|
||||
```
|
||||
$ oc get jobs
|
||||
NAME COMPLETIONS DURATION AGE
|
||||
delete-now 1/1 33s 45h
|
||||
cc-schema-load 1/1 6s 3d
|
||||
```
|
||||
|
||||
## Data cleanup
|
||||
|
||||
Data erasure is a two-phase operation, one synchronous and one scheduled. When an authenticated `DELETE` REST call is made for a given user, the unique ID that ties the database user entry to AppId is cleared from the local in-cluster Postgres instance. As this is the only way to connect the data the bank app to the real user identity (name, etc.), we've effectively anonymized the transactions data. The Java `User` service then flags the account as deleted, which can be useful for logging purposes.
|
||||
|
||||
The erasure service operates as a Kubernetes `CronJob` that checks that the user has been deleted from our database, and also removes them from App ID, effectively unregistering the user.
|
||||
|
||||
## LogDNA Integration
|
||||
|
||||
Finally, we connect our service with LogDNA to aggregate messages from the internal services.
|
||||
|
||||
Follow the instructions to deploy LogDNA to the OpenShift cluster here: https://cloud.ibm.com/observe/logging.
|
||||
|
||||
Once deployed, your instance of LogDNA will be keeping track of any logs that are created within the application.
|
||||
|
||||

|
||||
|
||||
There can be a lot to sift through. Use one of the filters from the dropdown menus at the top of the screen to limit which logs you are viewing. For instance, you can only see logs dealing with App ID by selecting it from the **Apps** menu:
|
||||
|
||||

|
||||
|
||||
## License
|
||||
|
||||
This code pattern is licensed under the Apache License, Version 2. Separate third-party code objects invoked within this code pattern are licensed by their respective providers pursuant to their own separate licenses. Contributions are subject to the [Developer Certificate of Origin, Version 1.1](https://developercertificate.org/) and the [Apache License, Version 2](https://www.apache.org/licenses/LICENSE-2.0.txt).
|
||||
|
||||
[Apache License FAQ](https://www.apache.org/foundation/license-faq.html#WhatDoesItMEAN)
|
||||
|
||||
3
dev/tekton/examples/example-bank/_config.yml
Executable file
3
dev/tekton/examples/example-bank/_config.yml
Executable file
@@ -0,0 +1,3 @@
|
||||
markdown: kramdown
|
||||
kramdown:
|
||||
parse_block_html: true
|
||||
53
dev/tekton/examples/example-bank/app.js
Executable file
53
dev/tekton/examples/example-bank/app.js
Executable file
@@ -0,0 +1,53 @@
|
||||
/*eslint-env node*/
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// node.js starter application for Bluemix
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
// This application uses express as its web server
|
||||
// for more info, see: http://expressjs.com
|
||||
var express = require('express');
|
||||
|
||||
// cfenv provides access to your Cloud Foundry environment
|
||||
// for more info, see: https://www.npmjs.com/package/cfenv
|
||||
var cfenv = require('cfenv');
|
||||
|
||||
// create a new express server
|
||||
var app = express();
|
||||
|
||||
var port = process.env.PORT || 8060;
|
||||
|
||||
let DEVMODE = process.env.DEVMODE
|
||||
|
||||
if (DEVMODE) {
|
||||
app.get('/javascript/clientHelpers/libertyclient.js', (req, res) => {res.sendFile('public/javascript/clientHelpers/libertyclient-devmode.js', {root: __dirname})})
|
||||
app.get('/javascript/clientHelpers/demoaccounts.js', (req, res) => {res.sendFile('public/javascript/clientHelpers/demoaccounts-devmode.js', {root: __dirname})})
|
||||
}
|
||||
// serve the files out of ./public as our main files
|
||||
app.use(express.static(__dirname + '/public'));
|
||||
|
||||
// get the app environment from Cloud Foundry
|
||||
var appEnv = cfenv.getAppEnv();
|
||||
var log4js = require('log4js');
|
||||
var logger = log4js.getLogger();
|
||||
|
||||
logger.level = 'debug';
|
||||
logger.debug("launching bank simulated UI");
|
||||
|
||||
app.use(require("body-parser").json());
|
||||
app.use(require("body-parser").urlencoded({extended: false}));
|
||||
// use createUser route
|
||||
|
||||
if (!DEVMODE) {
|
||||
app.use('/demo', require('./routes/createUser'))
|
||||
// proxy for testing locally
|
||||
let proxy = require('express-http-proxy')
|
||||
let USER_MICROSREVICE = process.env.PROXY_USER_MICROSERVICE
|
||||
let TRANSACTION_MICROSERVICE = process.env.PROXY_TRANSACTION_MICROSERVICE
|
||||
app.use('/proxy_user', proxy(USER_MICROSREVICE))
|
||||
app.use('/proxy_transaction', proxy(TRANSACTION_MICROSERVICE))
|
||||
}
|
||||
|
||||
// start server on the specified port and binding host
|
||||
app.listen(port);
|
||||
logger.debug("Listening on port ", port);
|
||||
37
dev/tekton/examples/example-bank/bank-app-backend/.gitignore
vendored
Executable file
37
dev/tekton/examples/example-bank/bank-app-backend/.gitignore
vendored
Executable file
@@ -0,0 +1,37 @@
|
||||
**/target
|
||||
!.keep
|
||||
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
/build/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
|
||||
## Local configuration files
|
||||
/local/config/*
|
||||
|
||||
*.swo
|
||||
*.swp
|
||||
*.~
|
||||
115
dev/tekton/examples/example-bank/bank-app-backend/README.md
Executable file
115
dev/tekton/examples/example-bank/bank-app-backend/README.md
Executable file
@@ -0,0 +1,115 @@
|
||||
|
||||
## Building individual microservices
|
||||
|
||||
### User service
|
||||
|
||||
```
|
||||
mvn -pl :user-service -am package
|
||||
docker build -t user-service:1.0-SNAPSHOT user-service
|
||||
```
|
||||
|
||||
### Transaction service
|
||||
```
|
||||
mvn -pl :transaction-service -am package
|
||||
docker build -t transaction-service:1.0-SNAPSHOT transaction-service
|
||||
```
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
### Secrets
|
||||
|
||||
```
|
||||
kubectl create secret generic bank-db-secret --from-literal=DB_SERVERNAME=<host> --from-literal=DB_PORTNUMBER=<port> --from-literal=DB_DATABASENAME=ibmclouddb --from-literal=DB_USER=<user> --from-literal=DB_PASSWORD=<password>
|
||||
kubectl create secret generic bank-oidc-secret --from-literal=OIDC_JWKENDPOINTURL=<oauthServerUrl>/publickeys --from-literal=OIDC_ISSUERIDENTIFIER=<issuer> --from-literal=OIDC_AUDIENCES=<audience>
|
||||
```
|
||||
|
||||
|
||||
## Curl commands
|
||||
|
||||
### Users
|
||||
|
||||
```
|
||||
curl -X POST -H "Authorization: Bearer <access-token>" -H "Content-Type: application/json" -d "{\"consentGiven\": \"true\"}" -k https://localhost:9443/bank/v1/users
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k https://localhost:9443/bank/v1/users/self
|
||||
|
||||
curl -X PUT "Authorization: Bearer <access-token>" -H "Content-Type: application/json" -d "{\"consentGiven\": \"false\"}" -k https://localhost:9443/bank/v1/users/self
|
||||
|
||||
curl -X DELETE "Authorization: Bearer <access-token>" -k https://localhost:9443/bank/v1/users/self
|
||||
```
|
||||
|
||||
|
||||
### User Events
|
||||
|
||||
```
|
||||
curl -X POST "Authorization: Bearer <access-token>" -H "Content-Type: application/json" -d "{\"eventId\": \"871859e4-9fca-4bcf-adb5-e7d063d0747e\"}" -k https://localhost:9443/bank/v1/userEvents
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k https://localhost:9443/bank/v1/userEvents/self
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k https://localhost:9443/bank/v1/userEvents/self/info
|
||||
```
|
||||
|
||||
|
||||
### Events
|
||||
|
||||
```
|
||||
curl -X POST "Authorization: Bearer <access-token>" -H "Content-Type: application/json" -d "{\"eventName\": \"Event name\", \"pointValue\": 100}" -k https://localhost:9444/bank/v1/events
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k https://localhost:9444/bank/v1/events/{eventId}
|
||||
|
||||
curl -X PUT "Authorization: Bearer <access-token>" -H "Content-Type: application/json" -d "{\"eventName\": \"Event name\", \"pointValue\": 100}" -k https://localhost:9444/bank/v1/events/{eventId}
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k https://localhost:9444/bank/v1/events
|
||||
|
||||
curl -X GET "Authorization: Bearer <access-token>" -k "https://localhost:9444/bank/v1/events?id=&id=&id="
|
||||
|
||||
```
|
||||
|
||||
## Running the integration tests
|
||||
|
||||
### Set environment variables
|
||||
|
||||
Base URL where users and events services are deployed
|
||||
```
|
||||
export USERS_BASEURL=http://<host>:<port>
|
||||
export EVENTS_BASEURL=http://<host>:<port>
|
||||
```
|
||||
|
||||
Prefix for test user names and the password they should use. These users are created dynamically by the tests.
|
||||
```
|
||||
export TEST_USER_PREFIX=<testUsername>
|
||||
export TEST_PASSWORD=<testUserPassword>
|
||||
```
|
||||
|
||||
Admin user name and password. This user name must exist in App Id prior to running the test and must have the admin role.
|
||||
```
|
||||
export TEST_ADMIN_USER=<adminUsername>
|
||||
export TEST_ADMIN_PASSWORD=<adminUserPassword>
|
||||
```
|
||||
|
||||
App Id service URL. Change to correct URL for the region where your App Id instance is deployed.
|
||||
```
|
||||
export APPID_SERVICE_URL=https://us-south.appid.cloud.ibm.com
|
||||
```
|
||||
|
||||
App Id tenant id, client id, and client password (secret)
|
||||
```
|
||||
export APPID_TENANTID=<tenant id>
|
||||
export OIDC_CLIENTID=<client id>
|
||||
export OIDC_CLIENTPASSWORD=<client secret>
|
||||
export OIDC_ISSUERIDENTIFIER=%APPID_SERVICE_URL%/%APPID_TENANTID%
|
||||
```
|
||||
|
||||
IAM API key (needed for authentication to App Id)
|
||||
```
|
||||
export IAM_APIKEY=<apikey>
|
||||
export IAM_SERVICE_URL=https://iam.cloud.ibm.com/identity/token
|
||||
```
|
||||
|
||||
|
||||
### Run the tests
|
||||
|
||||
```
|
||||
mvn -pl :integration-tests -am verify
|
||||
```
|
||||
34
dev/tekton/examples/example-bank/bank-app-backend/common/pom.xml
Executable file
34
dev/tekton/examples/example-bank/bank-app-backend/common/pom.xml
Executable file
@@ -0,0 +1,34 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>common</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<!-- Open Liberty Features -->
|
||||
<dependency>
|
||||
<groupId>io.openliberty.features</groupId>
|
||||
<artifactId>microProfile-3.0</artifactId>
|
||||
<type>esa</type>
|
||||
</dependency>
|
||||
<!-- lombok -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.16</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.eclipse.microprofile.jwt.Claim;
|
||||
import org.eclipse.microprofile.jwt.Claims;
|
||||
|
||||
public class BaseResource {
|
||||
|
||||
@Inject
|
||||
@Claim("sub")
|
||||
private String subject;
|
||||
|
||||
@Inject
|
||||
@Claim(standard = Claims.raw_token)
|
||||
private String rawToken;
|
||||
|
||||
protected String getCallerSubject() {
|
||||
return subject;
|
||||
}
|
||||
|
||||
protected String getCallerCredentials() {
|
||||
return "Bearer " + rawToken;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.ibm.codey.bank.accounts.json;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
@Getter @Setter @ToString
|
||||
public class UserRegistration {
|
||||
|
||||
@JsonbProperty
|
||||
private boolean consentGiven;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.ibm.codey.bank.accounts.json;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
@Getter @Setter @ToString
|
||||
public class UserRegistrationInfo {
|
||||
|
||||
@JsonbProperty
|
||||
private String userId;
|
||||
|
||||
@JsonbProperty
|
||||
private boolean consentGiven;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.ibm.codey.bank.catalog;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CompletionStage;
|
||||
|
||||
import javax.enterprise.context.Dependent;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
|
||||
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
|
||||
|
||||
@Dependent
|
||||
@RegisterRestClient
|
||||
public interface KnativeService {
|
||||
|
||||
@POST
|
||||
@Path("process")
|
||||
public CompletionStage<String> processTransaction(@QueryParam("transactionId") String transactionId, @QueryParam("category") String category, @QueryParam("amount") String amount);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package com.ibm.codey.bank.catalog;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.enterprise.context.Dependent;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
|
||||
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistration;
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistrationInfo;
|
||||
|
||||
@Dependent
|
||||
@RegisterRestClient
|
||||
public interface UserService {
|
||||
|
||||
@GET
|
||||
@Path("self")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public UserRegistrationInfo getUserConsent(@HeaderParam("Authorization") String authorizationHeader);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.ibm.codey.bank.catalog.json;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
@Getter @Setter @ToString
|
||||
public class CreateTransactionDefinition {
|
||||
|
||||
@JsonbProperty
|
||||
private String transactionName;
|
||||
|
||||
@JsonbProperty
|
||||
private String category;
|
||||
|
||||
@JsonbProperty
|
||||
private BigDecimal amount;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
package com.ibm.codey.bank.catalog.json;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
@Getter @Setter @ToString
|
||||
public class RewardTransactionDefinition {
|
||||
|
||||
@JsonbProperty
|
||||
private BigDecimal pointsEarned;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package com.ibm.codey.bank.interceptor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.interceptor.AroundInvoke;
|
||||
import javax.interceptor.InvocationContext;
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.eclipse.microprofile.jwt.Claim;
|
||||
|
||||
/*
|
||||
* This interceptor is used with the JAXRS resource classes to log any exception and return a 500 status code to the client.
|
||||
* This could have been accomplished with an ExceptionMapper as well but an interceptor lets us also log information about
|
||||
* the failing method and input parameters.
|
||||
*/
|
||||
public class LoggingInterceptor {
|
||||
|
||||
private static final Logger log = Logger.getLogger(LoggingInterceptor.class.getName());
|
||||
|
||||
@Inject
|
||||
@Claim("sub")
|
||||
private String subject;
|
||||
|
||||
@AroundInvoke
|
||||
public Object logInvocation(InvocationContext ctx) {
|
||||
try {
|
||||
Object result = ctx.proceed();
|
||||
logRequestAndResult(ctx, result);
|
||||
return result;
|
||||
} catch(Throwable e) {
|
||||
String clz = ctx.getMethod().getDeclaringClass().getName();
|
||||
String method = ctx.getMethod().getName();
|
||||
Object[] params = ctx.getParameters();
|
||||
if (params != null && params.length > 0) {
|
||||
log.log(Level.SEVERE, "***** Exception in " + clz + "." + method, params);
|
||||
} else {
|
||||
log.log(Level.SEVERE, "***** Exception in " + clz + "." + method);
|
||||
}
|
||||
e.printStackTrace();
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
|
||||
private void logRequestAndResult(InvocationContext ctx, Object result) {
|
||||
String methodName = ctx.getMethod().getName();
|
||||
Object[] params = ctx.getParameters();
|
||||
JsonObjectBuilder requestBuilder = Json.createObjectBuilder()
|
||||
.add("subject", subject)
|
||||
.add("action", methodName);
|
||||
if (params != null && params.length > 0) {
|
||||
requestBuilder.add("input", Arrays.toString(params));
|
||||
}
|
||||
if (result instanceof Response) {
|
||||
Response response = (Response)result;
|
||||
requestBuilder.add("statuscode", response.getStatus());
|
||||
}
|
||||
log.log(Level.INFO, "API REQUEST", requestBuilder.build());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
package com.ibm.codey.bank.interceptor;
|
||||
|
||||
import javax.annotation.Priority;
|
||||
import javax.inject.Inject;
|
||||
import javax.interceptor.AroundInvoke;
|
||||
import javax.interceptor.Interceptor;
|
||||
import javax.interceptor.InvocationContext;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.eclipse.microprofile.jwt.Claim;
|
||||
|
||||
import com.ibm.codey.bank.interceptor.binding.RequiresAuthorization;
|
||||
|
||||
/*
|
||||
* This interceptor is used with the JAXRS resource classes to enforce a client scope for authorization purposes.
|
||||
*/
|
||||
@RequiresAuthorization @Interceptor
|
||||
@Priority(Interceptor.Priority.APPLICATION)
|
||||
public class SecurityInterceptor {
|
||||
|
||||
@Inject
|
||||
@Claim("scope")
|
||||
private String scope;
|
||||
|
||||
@AroundInvoke
|
||||
public Object checkScope(InvocationContext ctx) throws Exception {
|
||||
String[] scopeList = scope.split(" ");
|
||||
for(String hasScope : scopeList) {
|
||||
if (hasScope.equals("admin")) {
|
||||
Object result = ctx.proceed();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return Response.status(Response.Status.FORBIDDEN).entity("admin permission required").build();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.ibm.codey.bank.interceptor.binding;
|
||||
|
||||
import static java.lang.annotation.ElementType.METHOD;
|
||||
import static java.lang.annotation.ElementType.TYPE;
|
||||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
||||
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import javax.interceptor.InterceptorBinding;
|
||||
|
||||
@Inherited
|
||||
@InterceptorBinding
|
||||
@Target({TYPE, METHOD})
|
||||
@Retention(RUNTIME)
|
||||
public @interface RequiresAuthorization {
|
||||
}
|
||||
43
dev/tekton/examples/example-bank/bank-app-backend/integration-tests/pom.xml
Executable file
43
dev/tekton/examples/example-bank/bank-app-backend/integration-tests/pom.xml
Executable file
@@ -0,0 +1,43 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>integration-tests</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>common</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.9</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Plugin to run functional tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,175 @@
|
||||
package it.com.ibm.codey.loyalty;
|
||||
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.json.bind.Jsonb;
|
||||
import javax.json.bind.JsonbBuilder;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.client.ClientBuilder;
|
||||
import javax.ws.rs.client.Entity;
|
||||
import javax.ws.rs.client.WebTarget;
|
||||
import javax.ws.rs.core.HttpHeaders;
|
||||
import javax.ws.rs.core.MultivaluedHashMap;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import com.ibm.codey.loyalty.accounts.json.UserRegistration;
|
||||
|
||||
import it.com.ibm.codey.loyalty.util.TestSecurityHelper;
|
||||
|
||||
public class EndpointTestBase {
|
||||
|
||||
protected static String USERS_BASEURL;
|
||||
protected static String EVENTS_BASEURL;
|
||||
|
||||
protected static String TEST_USER_PREFIX;
|
||||
protected static String TEST_USER;
|
||||
protected static String TEST_PASSWORD;
|
||||
protected static String userAccessToken;
|
||||
|
||||
protected static String TEST_ADMIN_USER;
|
||||
protected static String TEST_ADMIN_PASSWORD;
|
||||
protected static String adminAccessToken;
|
||||
|
||||
protected static final String USERS_ENDPOINT = "/loyalty/v1/users";
|
||||
protected static final String USERS_SELF_ENDPOINT = "/loyalty/v1/users/self";
|
||||
protected static final String USER_EVENTS_ENDPOINT = "/loyalty/v1/userEvents";
|
||||
protected static final String USER_EVENTS_SELF_ENDPOINT = "/loyalty/v1/userEvents/self";
|
||||
protected static final String USER_EVENTS_SELF_INFO_ENDPOINT = "/loyalty/v1/userEvents/self/info";
|
||||
protected static final String EVENTS_ENDPOINT = "/loyalty/v1/events";
|
||||
|
||||
protected static boolean CONSENT_GIVEN = true;
|
||||
protected static boolean CONSENT_NOT_GIVEN = false;
|
||||
|
||||
static {
|
||||
USERS_BASEURL = System.getenv("USERS_BASEURL");
|
||||
EVENTS_BASEURL = System.getenv("EVENTS_BASEURL");
|
||||
TEST_USER_PREFIX = System.getenv("TEST_USER_PREFIX");
|
||||
TEST_PASSWORD = System.getenv("TEST_PASSWORD");
|
||||
TEST_ADMIN_USER = System.getenv("TEST_ADMIN_USER");
|
||||
TEST_ADMIN_PASSWORD = System.getenv("TEST_ADMIN_PASSWORD");
|
||||
}
|
||||
|
||||
private Client client;
|
||||
|
||||
protected void setup() {
|
||||
client = ClientBuilder.newClient();
|
||||
TEST_USER = TEST_USER_PREFIX + (int) ((Math.random() * 999999) + 1);
|
||||
}
|
||||
|
||||
protected void teardown() {
|
||||
client.close();
|
||||
}
|
||||
|
||||
protected <T> T get(String baseUrl, String endpoint, Map<String, Object> queryParams, String accessToken, Response.Status expectedStatusCode, Type returnType) {
|
||||
String url = baseUrl + endpoint;
|
||||
WebTarget target = client.target(url);
|
||||
if (queryParams != null) {
|
||||
for (String key: queryParams.keySet()) {
|
||||
target = target.queryParam(key, queryParams.get(key));
|
||||
}
|
||||
}
|
||||
MultivaluedHashMap<String,Object> headers = new MultivaluedHashMap<String,Object>();
|
||||
if (accessToken != null) {
|
||||
String authHeader = "Bearer " + accessToken;
|
||||
headers.putSingle(HttpHeaders.AUTHORIZATION, authHeader);
|
||||
}
|
||||
try (Response response = target.request().headers(headers).get()) {
|
||||
checkStatusCode(url, response, expectedStatusCode);
|
||||
if (returnType == Void.class) {
|
||||
return null;
|
||||
}
|
||||
String jsonString = response.readEntity(String.class);
|
||||
if (returnType.equals(String.class)) {
|
||||
return (T)jsonString;
|
||||
}
|
||||
Jsonb jsonb = JsonbBuilder.create();
|
||||
return jsonb.fromJson(jsonString, returnType);
|
||||
}
|
||||
}
|
||||
|
||||
protected <T> T put(String baseUrl, String endpoint, Object body, String accessToken, Response.Status expectedStatusCode, Class<T> returnType) {
|
||||
String url = baseUrl + endpoint;
|
||||
Jsonb jsonb = JsonbBuilder.create();
|
||||
String jsonBody = jsonb.toJson(body);
|
||||
MultivaluedHashMap<String,Object> headers = new MultivaluedHashMap<String,Object>();
|
||||
if (accessToken != null) {
|
||||
String authHeader = "Bearer " + accessToken;
|
||||
headers.putSingle(HttpHeaders.AUTHORIZATION, authHeader);
|
||||
}
|
||||
try (Response response = client.target(url).request().headers(headers).buildPut(Entity.json(jsonBody)).invoke()) {
|
||||
checkStatusCode(url, response, expectedStatusCode);
|
||||
if (returnType == Void.class) {
|
||||
return null;
|
||||
}
|
||||
String jsonString = response.readEntity(String.class);
|
||||
if (returnType.equals(String.class)) {
|
||||
return (T)jsonString;
|
||||
}
|
||||
return jsonb.fromJson(jsonString, returnType);
|
||||
}
|
||||
}
|
||||
|
||||
protected <T> T post(String baseUrl, String endpoint, Object body, String accessToken, Response.Status expectedStatusCode, Class<T> returnType) {
|
||||
String url = baseUrl + endpoint;
|
||||
Jsonb jsonb = JsonbBuilder.create();
|
||||
String jsonBody = jsonb.toJson(body);
|
||||
MultivaluedHashMap<String,Object> headers = new MultivaluedHashMap<String,Object>();
|
||||
if (accessToken != null) {
|
||||
String authHeader = "Bearer " + accessToken;
|
||||
headers.putSingle(HttpHeaders.AUTHORIZATION, authHeader);
|
||||
}
|
||||
try (Response response = client.target(url).request().headers(headers).buildPost(Entity.json(jsonBody)).invoke()) {
|
||||
checkStatusCode(url, response, expectedStatusCode);
|
||||
if (returnType == Void.class) {
|
||||
return null;
|
||||
}
|
||||
String jsonString = response.readEntity(String.class);
|
||||
if (returnType.equals(String.class)) {
|
||||
return (T)jsonString;
|
||||
}
|
||||
return jsonb.fromJson(jsonString, returnType);
|
||||
}
|
||||
}
|
||||
|
||||
protected void delete(String baseUrl, String endpoint, String accessToken, Response.Status expectedStatusCode) {
|
||||
String url = baseUrl + endpoint;
|
||||
MultivaluedHashMap<String,Object> headers = new MultivaluedHashMap<String,Object>();
|
||||
if (accessToken != null) {
|
||||
String authHeader = "Bearer " + accessToken;
|
||||
headers.putSingle(HttpHeaders.AUTHORIZATION, authHeader);
|
||||
}
|
||||
try (Response response = client.target(url).request().headers(headers).buildDelete().invoke()) {
|
||||
checkStatusCode(url, response, expectedStatusCode);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setupUser() {
|
||||
// Create a user in the user registry.
|
||||
TestSecurityHelper.createUser(TEST_USER, TEST_PASSWORD);
|
||||
|
||||
// Log the user in and obtain an access token for invoking the API.
|
||||
userAccessToken = TestSecurityHelper.signOn(TEST_USER, TEST_PASSWORD);
|
||||
|
||||
// Create user registration
|
||||
UserRegistration userRegistration = new UserRegistration();
|
||||
userRegistration.setConsentGiven(CONSENT_GIVEN);
|
||||
post(USERS_BASEURL, USERS_ENDPOINT, userRegistration, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
}
|
||||
|
||||
protected void removeUser() {
|
||||
// Use DELETE to remove user registration.
|
||||
delete(USERS_BASEURL, USERS_SELF_ENDPOINT, userAccessToken, Response.Status.NO_CONTENT);
|
||||
}
|
||||
|
||||
private void checkStatusCode(String url, Response response, Response.Status expectedStatusCode) {
|
||||
if (expectedStatusCode.getStatusCode() != response.getStatus()) {
|
||||
fail("Unexpected response code " + response.getStatus() +
|
||||
" (expected " + expectedStatusCode.getStatusCode() +
|
||||
") from " + url + " Response=" + response.readEntity(String.class));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
package it.com.ibm.codey.loyalty.accounts;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.ibm.codey.loyalty.accounts.json.UserRegistration;
|
||||
|
||||
import it.com.ibm.codey.loyalty.EndpointTestBase;
|
||||
import it.com.ibm.codey.loyalty.util.TestSecurityHelper;
|
||||
|
||||
public class UserEndpointTest extends EndpointTestBase {
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
super.setup();
|
||||
}
|
||||
|
||||
@After
|
||||
public void teardown() {
|
||||
super.teardown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUserRegistrationAndDeletion() {
|
||||
try {
|
||||
setupUser();
|
||||
// Use GET to get the user registration.
|
||||
UserRegistration checkUserRegistration = get(USERS_BASEURL, USERS_SELF_ENDPOINT, null, userAccessToken, Response.Status.OK, UserRegistration.class);
|
||||
assertEquals("Consent flag is incorrect", CONSENT_GIVEN, checkUserRegistration.isConsentGiven());
|
||||
} finally {
|
||||
removeUser();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUserRegistrationModificationAndDeletion() {
|
||||
try {
|
||||
setupUser();
|
||||
// Use PUT to change the user registration.
|
||||
UserRegistration userRegistration = new UserRegistration();
|
||||
userRegistration.setConsentGiven(CONSENT_NOT_GIVEN);
|
||||
put(USERS_BASEURL, USERS_SELF_ENDPOINT, userRegistration, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
// Use GET to get the user registration.
|
||||
UserRegistration checkUserRegistration = get(USERS_BASEURL, USERS_SELF_ENDPOINT, null, userAccessToken, Response.Status.OK, UserRegistration.class);
|
||||
assertEquals("Consent flag is incorrect", CONSENT_NOT_GIVEN, checkUserRegistration.isConsentGiven());
|
||||
} finally {
|
||||
removeUser();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthenticationFailure() {
|
||||
// Make calls without an authentication header and verify they are rejected.
|
||||
UserRegistration userRegistration = new UserRegistration();
|
||||
userRegistration.setConsentGiven(CONSENT_GIVEN);
|
||||
post(USERS_BASEURL, USERS_ENDPOINT, userRegistration, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
get(USERS_BASEURL, USERS_SELF_ENDPOINT, null, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
put(USERS_BASEURL, USERS_SELF_ENDPOINT, userRegistration, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
delete(USERS_BASEURL, USERS_SELF_ENDPOINT, null, Response.Status.UNAUTHORIZED);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
package it.com.ibm.codey.loyalty.accounts;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.ibm.codey.loyalty.accounts.json.UserEventCheckIn;
|
||||
import com.ibm.codey.loyalty.accounts.json.UserEventInfo;
|
||||
import com.ibm.codey.loyalty.accounts.json.UserRegistration;
|
||||
import com.ibm.codey.loyalty.catalog.json.EventDefinition;
|
||||
|
||||
import it.com.ibm.codey.loyalty.EndpointTestBase;
|
||||
import it.com.ibm.codey.loyalty.util.TestSecurityHelper;
|
||||
|
||||
public class UserEventsEndpointTest extends EndpointTestBase {
|
||||
|
||||
private static String normalPointsEventId, doublePointsEventId;
|
||||
|
||||
private static final int NORMAL_POINTS = 10;
|
||||
private static final int DOUBLE_POINTS = NORMAL_POINTS*2;
|
||||
|
||||
private static final String NORMAL_POINTS_EVENT_NAME = "test event normal points";
|
||||
private static final String DOUBLE_POINTS_EVENT_NAME = "test event double points";
|
||||
|
||||
private static boolean eventsCreated = false;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
super.setup();
|
||||
// Create events. These are reused for all tests.
|
||||
// This isn't done in a BeforeClass method because it depends on the non-static post() method in the superclass.
|
||||
if (!eventsCreated) {
|
||||
adminAccessToken = TestSecurityHelper.signOn(TEST_ADMIN_USER, TEST_ADMIN_PASSWORD);
|
||||
normalPointsEventId = createEvent(NORMAL_POINTS_EVENT_NAME, NORMAL_POINTS);
|
||||
doublePointsEventId = createEvent(DOUBLE_POINTS_EVENT_NAME, DOUBLE_POINTS);
|
||||
eventsCreated = true;
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void teardown() {
|
||||
super.teardown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEventCheckin() {
|
||||
try {
|
||||
setupUser();
|
||||
|
||||
// Verify no events attended or points earned yet
|
||||
UserEventInfo userEventInfo = get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, userAccessToken, Response.Status.OK, UserEventInfo.class);
|
||||
assertEquals("initial event count is incorrect", 0, userEventInfo.getEventCount());
|
||||
assertEquals("initial points earned is incorrect", 0, userEventInfo.getPointsEarned());
|
||||
|
||||
// Check in to first event
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(normalPointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
|
||||
// Verify check in to first event
|
||||
String[] eventIds = get(USERS_BASEURL, USER_EVENTS_SELF_ENDPOINT, null, userAccessToken, Response.Status.OK, String[].class);
|
||||
assertEquals("GET returned incorrect number of events checked in", 1, eventIds.length);
|
||||
assertEquals("Event id is incorrect", normalPointsEventId, eventIds[0]);
|
||||
|
||||
// Verify points earned
|
||||
UserEventInfo userEventInfo2 = get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, userAccessToken, Response.Status.OK, UserEventInfo.class);
|
||||
assertEquals("event count is incorrect", 1, userEventInfo2.getEventCount());
|
||||
assertEquals("points earned is incorrect", NORMAL_POINTS, userEventInfo2.getPointsEarned());
|
||||
|
||||
// Check in to second event
|
||||
UserEventCheckIn checkIn2 = new UserEventCheckIn();
|
||||
checkIn2.setEventId(doublePointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn2, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
|
||||
// Verify check in to both events
|
||||
String[] eventIds2 = get(USERS_BASEURL, USER_EVENTS_SELF_ENDPOINT, null, userAccessToken, Response.Status.OK, String[].class);
|
||||
assertEquals("GET returned incorrect number of events checked in", 2, eventIds2.length);
|
||||
if (eventIds2[0].equals(normalPointsEventId)) {
|
||||
assertEquals("Event id [1] is incorrect", doublePointsEventId, eventIds2[1]);
|
||||
} else {
|
||||
assertEquals("Event id [0] is incorrect", doublePointsEventId, eventIds2[0]);
|
||||
assertEquals("Event id [1] is incorrect", normalPointsEventId, eventIds2[1]);
|
||||
}
|
||||
|
||||
// Verify points earned
|
||||
UserEventInfo userEventInfo3 = get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, userAccessToken, Response.Status.OK, UserEventInfo.class);
|
||||
assertEquals("event count is incorrect", 2, userEventInfo3.getEventCount());
|
||||
assertEquals("points earned is incorrect", NORMAL_POINTS+DOUBLE_POINTS, userEventInfo3.getPointsEarned());
|
||||
} finally {
|
||||
removeUser();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDuplicateEventCheckin() {
|
||||
try {
|
||||
setupUser();
|
||||
// Check in to first event
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(normalPointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
// Check in to first event again
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
} finally {
|
||||
removeUser();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithNonConsentedUser() {
|
||||
try {
|
||||
setupUser();
|
||||
// Use PUT to change user registration to withdraw consent
|
||||
UserRegistration userRegistration = new UserRegistration();
|
||||
userRegistration.setConsentGiven(CONSENT_NOT_GIVEN);
|
||||
put(USERS_BASEURL, USERS_SELF_ENDPOINT, userRegistration, userAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
// Try to check into an event or get information
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(normalPointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.CONFLICT, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_ENDPOINT, null, userAccessToken, Response.Status.CONFLICT, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, userAccessToken, Response.Status.CONFLICT, Void.class);
|
||||
} finally {
|
||||
removeUser();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithUnregisteredUser() {
|
||||
setupUser();
|
||||
removeUser();
|
||||
// Try to check into an event or get information
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(normalPointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_ENDPOINT, null, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthenticationFailure() {
|
||||
// Make calls without an authentication header
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(normalPointsEventId);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_ENDPOINT, null, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
get(USERS_BASEURL, USER_EVENTS_SELF_INFO_ENDPOINT, null, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadEventId() {
|
||||
String badEventId1 = "1";
|
||||
String badEventId2 = "/deadbeef-0000-0000-0000-badbadbadbad";
|
||||
UserEventCheckIn checkIn1 = new UserEventCheckIn();
|
||||
checkIn1.setEventId(badEventId1);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn1, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
UserEventCheckIn checkIn2 = new UserEventCheckIn();
|
||||
checkIn2.setEventId(badEventId2);
|
||||
post(USERS_BASEURL, USER_EVENTS_ENDPOINT, checkIn2, userAccessToken, Response.Status.BAD_REQUEST, Void.class);
|
||||
}
|
||||
|
||||
private String createEvent(String eventName, int pointValue) {
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
String eventId = post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, adminAccessToken, Response.Status.CREATED, String.class);
|
||||
return eventId;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
package it.com.ibm.codey.loyalty.catalog;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
|
||||
import javax.ws.rs.core.GenericType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.ibm.codey.loyalty.catalog.json.EventDefinition;
|
||||
|
||||
import it.com.ibm.codey.loyalty.EndpointTestBase;
|
||||
import it.com.ibm.codey.loyalty.util.TestSecurityHelper;
|
||||
|
||||
public class EventsEndpointTest extends EndpointTestBase {
|
||||
|
||||
private String eventName;
|
||||
private int pointValue;
|
||||
private String eventDescription;
|
||||
private String eventLocation;
|
||||
private OffsetDateTime startTime;
|
||||
private OffsetDateTime endTime;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
super.setup();
|
||||
// Set up a normal user to test methods which don't require admin.
|
||||
setupUser();
|
||||
// Set up an admin user.
|
||||
adminAccessToken = TestSecurityHelper.signOn(TEST_ADMIN_USER, TEST_ADMIN_PASSWORD);
|
||||
// Set up event attributes.
|
||||
String suffix = RandomStringUtils.randomAlphabetic(8);
|
||||
eventName = "test event " + suffix;
|
||||
eventDescription = "all about " + suffix;
|
||||
eventLocation = "at " + suffix;
|
||||
startTime = OffsetDateTime.now();
|
||||
endTime = OffsetDateTime.now().plusHours(1);
|
||||
pointValue = (int) ((Math.random() * 99) + 1);
|
||||
}
|
||||
|
||||
@After
|
||||
public void teardown() {
|
||||
removeUser();
|
||||
super.teardown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateEvent() {
|
||||
// Use POST to create an event.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
eventDefinition.setEventDescription(eventDescription);
|
||||
eventDefinition.setEventLocation(eventLocation);
|
||||
eventDefinition.setStartTime(startTime);
|
||||
eventDefinition.setEndTime(endTime);
|
||||
String eventId = post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, adminAccessToken, Response.Status.CREATED, String.class);
|
||||
// Use GET to get the event. This method does not require admin.
|
||||
EventDefinition checkEventDefinition = get(EVENTS_BASEURL, EVENTS_ENDPOINT + '/' + eventId, null, userAccessToken, Response.Status.OK, EventDefinition.class);
|
||||
assertEquals("Event name is incorrect", eventName, checkEventDefinition.getEventName());
|
||||
assertEquals("Point value is incorrect", pointValue, checkEventDefinition.getPointValue());
|
||||
assertEquals("Event description is incorrect", eventDescription, checkEventDefinition.getEventDescription());
|
||||
assertEquals("Event location is incorrect", eventLocation, checkEventDefinition.getEventLocation());
|
||||
assertEquals("Event start time is incorrect", startTime.toInstant(), checkEventDefinition.getStartTime().toInstant()); // Use toInstant to normalize timezones
|
||||
assertEquals("Event end time is incorrect", endTime.toInstant(), checkEventDefinition.getEndTime().toInstant());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAllEvents() {
|
||||
// Use POST to create an event. An admin user must do this.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
eventDefinition.setEventDescription(eventDescription);
|
||||
eventDefinition.setEventLocation(eventLocation);
|
||||
eventDefinition.setStartTime(startTime);
|
||||
eventDefinition.setEndTime(endTime);
|
||||
String eventId = post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, adminAccessToken, Response.Status.CREATED, String.class);
|
||||
// Use GET to get all events. This method does not require admin.
|
||||
GenericType<Map<String, EventDefinition>> eventDefinitionMapType = new GenericType<Map<String, EventDefinition>>() {};
|
||||
Map<String, EventDefinition> eventDefinitionsMap = get(EVENTS_BASEURL, EVENTS_ENDPOINT, null, userAccessToken, Response.Status.OK, eventDefinitionMapType.getType());
|
||||
assertNotNull("GET did not return any events", eventDefinitionsMap);
|
||||
EventDefinition checkEventDefinition = eventDefinitionsMap.get(eventId);
|
||||
assertNotNull("GET did not return the event that was just created", checkEventDefinition);
|
||||
assertEquals("Event name is incorrect", eventName, checkEventDefinition.getEventName());
|
||||
assertEquals("Point value is incorrect", pointValue, checkEventDefinition.getPointValue());
|
||||
assertEquals("Event description is incorrect", eventDescription, checkEventDefinition.getEventDescription());
|
||||
assertEquals("Event location is incorrect", eventLocation, checkEventDefinition.getEventLocation());
|
||||
assertEquals("Event start time is incorrect", startTime.toInstant(), checkEventDefinition.getStartTime().toInstant()); // Use toInstant to normalize timezones
|
||||
assertEquals("Event end time is incorrect", endTime.toInstant(), checkEventDefinition.getEndTime().toInstant());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchEvent() {
|
||||
// Use POST to create an event. An admin user must do this.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
eventDefinition.setEventDescription(eventDescription);
|
||||
eventDefinition.setEventLocation(eventLocation);
|
||||
eventDefinition.setStartTime(startTime);
|
||||
eventDefinition.setEndTime(endTime);
|
||||
String eventId = post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, adminAccessToken, Response.Status.CREATED, String.class);
|
||||
// Use GET to search for this event. This method does not require admin.
|
||||
Map<String,Object> queryParams = Collections.singletonMap("id", eventId);
|
||||
GenericType<Map<String, EventDefinition>> eventDefinitionMapType = new GenericType<Map<String, EventDefinition>>() {};
|
||||
Map<String, EventDefinition> eventDefinitionsMap = get(EVENTS_BASEURL, EVENTS_ENDPOINT, queryParams, userAccessToken, Response.Status.OK, eventDefinitionMapType.getType());
|
||||
assertNotNull("GET did not return any events", eventDefinitionsMap);
|
||||
EventDefinition checkEventDefinition = eventDefinitionsMap.get(eventId);
|
||||
assertNotNull("GET did not return the event that was just created", checkEventDefinition);
|
||||
assertEquals("Event name is incorrect", eventName, checkEventDefinition.getEventName());
|
||||
assertEquals("Point value is incorrect", pointValue, checkEventDefinition.getPointValue());
|
||||
assertEquals("Event description is incorrect", eventDescription, checkEventDefinition.getEventDescription());
|
||||
assertEquals("Event location is incorrect", eventLocation, checkEventDefinition.getEventLocation());
|
||||
assertEquals("Event start time is incorrect", startTime.toInstant(), checkEventDefinition.getStartTime().toInstant()); // Use toInstant to normalize timezones
|
||||
assertEquals("Event end time is incorrect", endTime.toInstant(), checkEventDefinition.getEndTime().toInstant());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateEvent() {
|
||||
// Use POST to create an event. An admin user must do this.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
eventDefinition.setEventDescription(eventDescription);
|
||||
eventDefinition.setEventLocation(eventLocation);
|
||||
eventDefinition.setStartTime(startTime);
|
||||
eventDefinition.setEndTime(endTime);
|
||||
String eventId = post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, adminAccessToken, Response.Status.CREATED, String.class);
|
||||
// Use PUT to modify the event. An admin user must do this.
|
||||
eventDefinition.setEventName(eventName + eventName);
|
||||
eventDefinition.setPointValue(pointValue*2);
|
||||
put(EVENTS_BASEURL, EVENTS_ENDPOINT + '/' + eventId, eventDefinition, adminAccessToken, Response.Status.NO_CONTENT, Void.class);
|
||||
// Use GET to get the event. This method does not require admin.
|
||||
EventDefinition checkEventDefinition = get(EVENTS_BASEURL, EVENTS_ENDPOINT + '/' + eventId, null, userAccessToken, Response.Status.OK, EventDefinition.class);
|
||||
assertEquals("Event name is incorrect", eventDefinition.getEventName(), checkEventDefinition.getEventName());
|
||||
assertEquals("Point value is incorrect", eventDefinition.getPointValue(), checkEventDefinition.getPointValue());
|
||||
assertEquals("Event description is incorrect", eventDescription, checkEventDefinition.getEventDescription());
|
||||
assertEquals("Event location is incorrect", eventLocation, checkEventDefinition.getEventLocation());
|
||||
assertEquals("Event start time is incorrect", startTime.toInstant(), checkEventDefinition.getStartTime().toInstant()); // Use toInstant to normalize timezones
|
||||
assertEquals("Event end time is incorrect", endTime.toInstant(), checkEventDefinition.getEndTime().toInstant());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthenticationFailure() {
|
||||
// Make calls without an authentication header.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
put(EVENTS_BASEURL, EVENTS_ENDPOINT + "/deadbeef-0000-0000-0000-badbadbadbad", eventDefinition, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
get(EVENTS_BASEURL, EVENTS_ENDPOINT, null, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
get(EVENTS_BASEURL, EVENTS_ENDPOINT + "/deadbeef-0000-0000-0000-badbadbadbad", null, null, Response.Status.UNAUTHORIZED, Void.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthorizationFailure() {
|
||||
// Normal users do not have access to POST or PUT.
|
||||
EventDefinition eventDefinition = new EventDefinition();
|
||||
eventDefinition.setEventName(eventName);
|
||||
eventDefinition.setPointValue(pointValue);
|
||||
post(EVENTS_BASEURL, EVENTS_ENDPOINT, eventDefinition, userAccessToken, Response.Status.FORBIDDEN, Void.class);
|
||||
put(EVENTS_BASEURL, EVENTS_ENDPOINT + "/deadbeef-0000-0000-0000-badbadbadbad", eventDefinition, userAccessToken, Response.Status.FORBIDDEN, Void.class);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
package it.com.ibm.codey.loyalty.util;
|
||||
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Base64;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonObject;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.client.ClientBuilder;
|
||||
import javax.ws.rs.client.Entity;
|
||||
import javax.ws.rs.core.Form;
|
||||
import javax.ws.rs.core.HttpHeaders;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.apache.cxf.jaxrs.provider.jsrjsonp.JsrJsonpProvider;
|
||||
|
||||
public class TestSecurityHelper {
|
||||
|
||||
private static String APPID_SERVICE_URL;
|
||||
|
||||
private static String APPID_TENANTID;
|
||||
|
||||
private static String IAM_APIKEY;
|
||||
|
||||
private static String IAM_SERVICE_URL;
|
||||
|
||||
private static String OIDC_ISSUERIDENTIFIER;
|
||||
|
||||
private static String OIDC_CLIENTID;
|
||||
|
||||
private static String OIDC_CLIENTPASSWORD;
|
||||
|
||||
private static String iamAuthHeader;
|
||||
|
||||
private static String oidcAuthHeader;
|
||||
|
||||
static {
|
||||
APPID_SERVICE_URL = System.getenv("APPID_SERVICE_URL");
|
||||
APPID_TENANTID = System.getenv("APPID_TENANTID");
|
||||
IAM_APIKEY = System.getenv("IAM_APIKEY");
|
||||
IAM_SERVICE_URL = System.getenv("IAM_SERVICE_URL");
|
||||
OIDC_ISSUERIDENTIFIER = System.getenv("OIDC_ISSUERIDENTIFIER");
|
||||
OIDC_CLIENTID = System.getenv("OIDC_CLIENTID");
|
||||
OIDC_CLIENTPASSWORD = System.getenv("OIDC_CLIENTPASSWORD");
|
||||
String oidcClientCredentials = OIDC_CLIENTID + ":" + OIDC_CLIENTPASSWORD;
|
||||
oidcAuthHeader = "Basic " + Base64.getEncoder().encodeToString(oidcClientCredentials.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static void createUser(String user, String password) {
|
||||
Client client = ClientBuilder.newClient();
|
||||
client.register(JsrJsonpProvider.class);
|
||||
// Get IAM bearer token when creating the first user. The token can be reused after that.
|
||||
if (iamAuthHeader == null) {
|
||||
Form form = new Form();
|
||||
form.param("grant_type", "urn:ibm:params:oauth:grant-type:apikey");
|
||||
form.param("apikey", IAM_APIKEY);
|
||||
String iamToken;
|
||||
try (Response response = client.target(IAM_SERVICE_URL).request(MediaType.APPLICATION_JSON).buildPost(Entity.form(form)).invoke()) {
|
||||
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
|
||||
throw new RuntimeException("TEST CASE FAILURE. Cannot obtain IAM access token. Status code " + response.getStatus() + " Response =" + response.readEntity(JsonObject.class));
|
||||
}
|
||||
JsonObject obj = response.readEntity(JsonObject.class);
|
||||
iamToken = obj.getString("access_token");
|
||||
}
|
||||
iamAuthHeader = "Bearer " + iamToken;
|
||||
}
|
||||
// Create the user
|
||||
JsonObject request = Json.createObjectBuilder()
|
||||
.add("userName", user)
|
||||
.add("password", password)
|
||||
.add("active", true)
|
||||
.add("emails", Json.createArrayBuilder()
|
||||
.add(Json.createObjectBuilder()
|
||||
.add("value", "ibmtestloyalty@yopmail.com")
|
||||
.add("primary", true))
|
||||
).build();
|
||||
String createUserURL = APPID_SERVICE_URL + "/management/v4/" + APPID_TENANTID + "/cloud_directory/Users";
|
||||
try (Response response = client.target(createUserURL).request(MediaType.APPLICATION_JSON).header(HttpHeaders.AUTHORIZATION, iamAuthHeader).buildPost(Entity.json(request)).invoke()) {
|
||||
if (response.getStatus() != Response.Status.CREATED.getStatusCode()) {
|
||||
throw new RuntimeException("TEST CASE FAILURE. Cannot create user. Status code " + response.getStatus() + " Response =" + response.readEntity(JsonObject.class));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static String signOn(String user, String password) {
|
||||
String url = OIDC_ISSUERIDENTIFIER + "/token";
|
||||
Form form = new Form();
|
||||
form.param("grant_type", "password");
|
||||
form.param("username", user);
|
||||
form.param("password", password);
|
||||
Client client = ClientBuilder.newClient();
|
||||
client.register(JsrJsonpProvider.class);
|
||||
try (Response response = client.target(url).request(MediaType.APPLICATION_JSON).header(HttpHeaders.AUTHORIZATION, oidcAuthHeader).buildPost(Entity.form(form)).invoke()) {
|
||||
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
|
||||
throw new RuntimeException("TEST CASE FAILURE. Cannot obtain access token. Status code " + response.getStatus() + " Response =" + response.readEntity(JsonObject.class));
|
||||
}
|
||||
JsonObject obj = response.readEntity(JsonObject.class);
|
||||
return obj.getString("access_token");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
216
dev/tekton/examples/example-bank/bank-app-backend/pom.xml
Executable file
216
dev/tekton/examples/example-bank/bank-app-backend/pom.xml
Executable file
@@ -0,0 +1,216 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
<!-- Plugins -->
|
||||
<version.maven-war-plugin>3.2.2</version.maven-war-plugin>
|
||||
<version.maven-surefire-plugin>3.0.0-M1</version.maven-surefire-plugin>
|
||||
<version.maven-failsafe-plugin>3.0.0-M1</version.maven-failsafe-plugin>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- lombok -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.16</version>
|
||||
</dependency>
|
||||
<!-- For tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.13.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-rs-client</artifactId>
|
||||
<version>3.2.6</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-rs-extension-providers</artifactId>
|
||||
<version>3.2.6</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.json</artifactId>
|
||||
<version>1.1.4</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Support for JDK 9 and above -->
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>2.3.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-core</artifactId>
|
||||
<version>2.3.0.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.activation</groupId>
|
||||
<artifactId>activation</artifactId>
|
||||
<version>1.1.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- JSON-B provider for integration tests -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse</groupId>
|
||||
<artifactId>yasson</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.openliberty.features</groupId>
|
||||
<artifactId>features-bom</artifactId>
|
||||
<version>19.0.0.12</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
<version>${version.maven-war-plugin}</version>
|
||||
<configuration>
|
||||
<failOnMissingWebXml>false</failOnMissingWebXml>
|
||||
<packagingExcludes>pom.xml</packagingExcludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Plugin to run unit tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>${version.maven-surefire-plugin}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>test</phase>
|
||||
<id>default-test</id>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>**/it/**</exclude>
|
||||
</excludes>
|
||||
<reportsDirectory>
|
||||
${project.build.directory}/test-reports/unit
|
||||
</reportsDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<skipTests>${skipTests}</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Plugin to run functional tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<version>${version.maven-failsafe-plugin}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>integration-test</phase>
|
||||
<id>integration-test</id>
|
||||
<goals>
|
||||
<goal>integration-test</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<includes>
|
||||
<include>**/it/**</include>
|
||||
</includes>
|
||||
<systemPropertyVariables>
|
||||
<liberty.test.port>${http.port}</liberty.test.port>
|
||||
<war.name>${app.name}</war.name>
|
||||
</systemPropertyVariables>
|
||||
<trimStackTrace>false</trimStackTrace>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>verify-results</id>
|
||||
<goals>
|
||||
<goal>verify</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<summaryFile>
|
||||
${project.build.directory}/test-reports/it/failsafe-summary.xml
|
||||
</summaryFile>
|
||||
<reportsDirectory>
|
||||
${project.build.directory}/test-reports/it
|
||||
</reportsDirectory>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<configuration>
|
||||
<source>16</source>
|
||||
<target>16</target>
|
||||
<fork>true</fork>
|
||||
<compilerArgs>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED</arg>
|
||||
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.jvm=ALL-UNNAMED</arg>
|
||||
</compilerArgs>
|
||||
<annotationProcessorPaths>
|
||||
<path>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.16</version>
|
||||
</path>
|
||||
</annotationProcessorPaths>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<modules>
|
||||
<module>common</module>
|
||||
<module>transaction-service</module>
|
||||
<module>user-service</module>
|
||||
<module>integration-tests</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,11 @@
|
||||
FROM open-liberty:19.0.0.12-kernel-java8-openj9
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get upgrade -y e2fsprogs libgnutls30 libgcrypt20 libsasl2-2
|
||||
USER 1001
|
||||
|
||||
COPY --chown=1001:0 src/main/liberty/config/ /config/
|
||||
COPY --chown=1001:0 src/main/resources/security/ /config/resources/security/
|
||||
COPY --chown=1001:0 target/*.war /config/apps/
|
||||
COPY --chown=1001:0 target/jdbc/* /config/jdbc/
|
||||
RUN configure.sh
|
||||
@@ -0,0 +1,60 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: transaction-service
|
||||
labels:
|
||||
app: transaction-service
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: transaction-service
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: transaction-service
|
||||
annotations:
|
||||
sidecar.istio.io/inject: "false"
|
||||
spec:
|
||||
containers:
|
||||
- name: transaction-service
|
||||
image: ykoyfman/bank-transaction-service:1.0
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- name: http-server
|
||||
containerPort: 9080
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: bank-db-secret
|
||||
- secretRef:
|
||||
name: bank-oidc-secret
|
||||
env:
|
||||
- name: USER_SERVICE_URL
|
||||
value: "http://user-service:9080/bank/v1/users"
|
||||
- name: KNATIVE_SERVICE_URL
|
||||
value: "http://process-transaction.example-bank.svc.cluster.local"
|
||||
- name: WLP_LOGGING_CONSOLE_LOGLEVEL
|
||||
value: INFO
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: transaction-service
|
||||
labels:
|
||||
app: transaction-service
|
||||
spec:
|
||||
ports:
|
||||
- port: 9080
|
||||
targetPort: 9080
|
||||
selector:
|
||||
app: transaction-service
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Route
|
||||
metadata:
|
||||
name: transaction-service
|
||||
spec:
|
||||
to:
|
||||
kind: Service
|
||||
name: transaction-service
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>transaction-service</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>war</packaging>
|
||||
|
||||
<dependencies>
|
||||
<!-- Open Liberty Features -->
|
||||
<dependency>
|
||||
<groupId>io.openliberty.features</groupId>
|
||||
<artifactId>microProfile-3.0</artifactId>
|
||||
<type>esa</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>common</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<finalName>${project.artifactId}</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
</plugin>
|
||||
<!-- Add JDBC driver to package -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-jdbc-driver</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>copy</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.2.8</version>
|
||||
<outputDirectory>${project.build.directory}/jdbc</outputDirectory>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Plugin to run unit tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
</plugin>
|
||||
<!-- Plugin to run functional tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,25 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
|
||||
import org.eclipse.microprofile.health.HealthCheck;
|
||||
import org.eclipse.microprofile.health.HealthCheckResponse;
|
||||
import org.eclipse.microprofile.health.Liveness;
|
||||
|
||||
@Liveness
|
||||
@ApplicationScoped
|
||||
public class LivenessCheck implements HealthCheck {
|
||||
|
||||
private boolean isAlive() {
|
||||
// perform health checks here
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
boolean up = isAlive();
|
||||
return HealthCheckResponse.named(this.getClass().getSimpleName()).state(up).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.ws.rs.ApplicationPath;
|
||||
import javax.ws.rs.core.Application;
|
||||
|
||||
@ApplicationPath("/bank")
|
||||
public class LoyaltyApplication extends Application {
|
||||
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
|
||||
import org.eclipse.microprofile.health.HealthCheck;
|
||||
import org.eclipse.microprofile.health.HealthCheckResponse;
|
||||
import org.eclipse.microprofile.health.Readiness;
|
||||
|
||||
@Readiness
|
||||
@ApplicationScoped
|
||||
public class ReadinessCheck implements HealthCheck {
|
||||
|
||||
private boolean isReady() {
|
||||
// perform readiness checks, e.g. database connection, etc.
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
boolean up = isReady();
|
||||
return HealthCheckResponse.named(this.getClass().getSimpleName()).state(up).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,189 @@
|
||||
package com.ibm.codey.bank.catalog;
|
||||
|
||||
import java.net.URL;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
import javax.inject.Inject;
|
||||
import javax.interceptor.Interceptors;
|
||||
import javax.transaction.Transactional;
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.PUT;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.eclipse.microprofile.rest.client.RestClientBuilder;
|
||||
|
||||
import com.ibm.codey.bank.BaseResource;
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistration;
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistrationInfo;
|
||||
import com.ibm.codey.bank.catalog.dao.TransactionDao;
|
||||
import com.ibm.codey.bank.catalog.json.CreateTransactionDefinition;
|
||||
import com.ibm.codey.bank.catalog.json.RewardTransactionDefinition;
|
||||
import com.ibm.codey.bank.catalog.models.Category;
|
||||
import com.ibm.codey.bank.catalog.models.Transaction;
|
||||
import com.ibm.codey.bank.interceptor.LoggingInterceptor;
|
||||
import com.ibm.codey.bank.interceptor.binding.RequiresAuthorization;
|
||||
|
||||
@RequestScoped
|
||||
@Interceptors(LoggingInterceptor.class)
|
||||
@Path("v1/transactions")
|
||||
public class TransactionResource extends BaseResource {
|
||||
|
||||
@Inject
|
||||
private TransactionDao transactionDao;
|
||||
|
||||
@Inject
|
||||
@ConfigProperty(name = "USER_SERVICE_URL")
|
||||
private URL userServiceURL;
|
||||
|
||||
/**
|
||||
* This method creates a transaction.
|
||||
*/
|
||||
@POST
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response createTransaction(CreateTransactionDefinition createTransactionDefinition) {
|
||||
|
||||
Transaction newTransaction = new Transaction();
|
||||
// create new uuid for new transaction
|
||||
String transactionId = UUID.randomUUID().toString();
|
||||
|
||||
// get subject
|
||||
String subject = this.getCallerSubject();
|
||||
// get user
|
||||
UserService userService = RestClientBuilder.newBuilder().baseUrl(userServiceURL).build(UserService.class);
|
||||
try {
|
||||
UserRegistrationInfo userRegistration = userService.getUserConsent(this.getCallerCredentials());
|
||||
if (!userRegistration.isConsentGiven()) {
|
||||
return Response.status(Response.Status.CONFLICT).entity("User has not consented to program").build();
|
||||
}
|
||||
|
||||
newTransaction.setTransactionId(transactionId);
|
||||
newTransaction.setUserId(userRegistration.getUserId());
|
||||
newTransaction.setTransactionName(createTransactionDefinition.getTransactionName());
|
||||
newTransaction.setCategory(createTransactionDefinition.getCategory());
|
||||
newTransaction.setAmount(createTransactionDefinition.getAmount());
|
||||
newTransaction.setProcessed(false);
|
||||
newTransaction.setDate(OffsetDateTime.now());
|
||||
transactionDao.createTransaction(newTransaction);
|
||||
|
||||
return Response.status(Response.Status.NO_CONTENT).build();
|
||||
} catch(WebApplicationException wae) {
|
||||
int status = wae.getResponse().getStatus();
|
||||
if (status == Response.Status.NOT_FOUND.getStatusCode()) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User not registered").build();
|
||||
} else {
|
||||
wae.printStackTrace();
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method gets the transactions of a user.
|
||||
*/
|
||||
@GET
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response getTransactions() {
|
||||
// get subject
|
||||
String subject = this.getCallerSubject();
|
||||
// get user
|
||||
UserService userService = RestClientBuilder.newBuilder().baseUrl(userServiceURL).build(UserService.class);
|
||||
try {
|
||||
UserRegistrationInfo userRegistration = userService.getUserConsent(this.getCallerCredentials());
|
||||
if (!userRegistration.isConsentGiven()) {
|
||||
return Response.status(Response.Status.CONFLICT).entity("User has not consented to program").build();
|
||||
}
|
||||
|
||||
List<Transaction> transactions = transactionDao.findTransactionsByUser(userRegistration.getUserId());
|
||||
return Response.status(Response.Status.OK).entity(transactions).build();
|
||||
} catch(WebApplicationException wae) {
|
||||
int status = wae.getResponse().getStatus();
|
||||
if (status == Response.Status.NOT_FOUND.getStatusCode()) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User not registered").build();
|
||||
} else {
|
||||
wae.printStackTrace();
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method gets the spending categories of a user.
|
||||
*/
|
||||
@GET
|
||||
@Path("spending")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response getCategory() {
|
||||
// get subject
|
||||
String subject = this.getCallerSubject();
|
||||
// get user
|
||||
UserService userService = RestClientBuilder.newBuilder().baseUrl(userServiceURL).build(UserService.class);
|
||||
try {
|
||||
UserRegistrationInfo userRegistration = userService.getUserConsent(this.getCallerCredentials());
|
||||
if (!userRegistration.isConsentGiven()) {
|
||||
return Response.status(Response.Status.CONFLICT).entity("User has not consented to program").build();
|
||||
}
|
||||
|
||||
List<Category> categories = transactionDao.groupCategoriesForUser(userRegistration.getUserId());
|
||||
return Response.status(Response.Status.OK).entity(categories).build();
|
||||
} catch(WebApplicationException wae) {
|
||||
int status = wae.getResponse().getStatus();
|
||||
if (status == Response.Status.NOT_FOUND.getStatusCode()) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User not registered").build();
|
||||
} else {
|
||||
wae.printStackTrace();
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: require admin scope
|
||||
/**
|
||||
* This method updates a transaction.
|
||||
*/
|
||||
@PUT
|
||||
@Path("reward/{transactionId}")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
@RequiresAuthorization
|
||||
public Response updateTransaction(@PathParam("transactionId") String transactionId, RewardTransactionDefinition rewardTransactionDefinition) {
|
||||
// Validate UUID is formatted correctly.
|
||||
try {
|
||||
UUID.fromString(transactionId);
|
||||
} catch(IllegalArgumentException iae) {
|
||||
return Response.status(Response.Status.BAD_REQUEST).entity("Invalid transaction id").build();
|
||||
}
|
||||
|
||||
Transaction transaction = transactionDao.findTransactionById(transactionId);
|
||||
if (transaction == null) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("Transaction not found").build();
|
||||
}
|
||||
|
||||
if (transaction.isProcessed()) {
|
||||
return Response.status(Response.Status.BAD_REQUEST).entity("Transaction already processed").build();
|
||||
}
|
||||
|
||||
transaction.setPointsEarned(rewardTransactionDefinition.getPointsEarned());
|
||||
transaction.setProcessed(true);
|
||||
transactionDao.updateTransaction(transaction);
|
||||
|
||||
return Response.status(Response.Status.NO_CONTENT).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
package com.ibm.codey.bank.catalog.dao;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import com.ibm.codey.bank.catalog.models.Category;
|
||||
import com.ibm.codey.bank.catalog.models.Transaction;
|
||||
|
||||
@RequestScoped
|
||||
public class TransactionDao {
|
||||
|
||||
@PersistenceContext(name = "jpa-unit")
|
||||
private EntityManager em;
|
||||
|
||||
public void createTransaction(Transaction transaction) {
|
||||
em.persist(transaction);
|
||||
}
|
||||
|
||||
public void updateTransaction(Transaction transaction) {
|
||||
em.merge(transaction);
|
||||
}
|
||||
|
||||
public List<Transaction> findTransactions() {
|
||||
return em.createNamedQuery("Transaction.findTransactions", Transaction.class)
|
||||
.getResultList();
|
||||
}
|
||||
|
||||
public List<Transaction> findTransactionsByUser(String userId) {
|
||||
return em.createNamedQuery("Transaction.findTransactionsByUser", Transaction.class)
|
||||
.setParameter("userId", userId)
|
||||
.getResultList();
|
||||
}
|
||||
|
||||
public Transaction findTransactionById(String transactionId) {
|
||||
try {
|
||||
return em.createNamedQuery("Transaction.findTransactionByIdOnly", Transaction.class)
|
||||
.setParameter("transactionId", transactionId)
|
||||
.getSingleResult();
|
||||
} catch(NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Transaction findTransactionById(String transactionId, String userId) {
|
||||
try {
|
||||
return em.createNamedQuery("Transaction.findTransactionById", Transaction.class)
|
||||
.setParameter("transactionId", transactionId)
|
||||
.setParameter("userId", userId)
|
||||
.getSingleResult();
|
||||
} catch(NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public List<Category> groupCategoriesForUser(String userId) {
|
||||
try {
|
||||
List<Object[][]> rows = em.createNamedQuery("Transaction.groupCategoriesForUser", Object[][].class)
|
||||
.setParameter("userId", userId)
|
||||
.getResultList();
|
||||
List<Category> response = new ArrayList<>();
|
||||
for (Object[] row: rows) {
|
||||
if (row.length == 2) {
|
||||
response.add(new Category(row[0].toString(), new BigDecimal(row[1].toString())));
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch(NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.ibm.codey.bank.catalog.models;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter @Setter
|
||||
public class Category {
|
||||
|
||||
private String category;
|
||||
private BigDecimal amount;
|
||||
|
||||
public Category(String category, BigDecimal amount) {
|
||||
this.category = category;
|
||||
this.amount = amount;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
package com.ibm.codey.bank.catalog.models;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.time.OffsetDateTime;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EntityListeners;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.IdClass;
|
||||
import javax.persistence.NamedQueries;
|
||||
import javax.persistence.NamedQuery;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Entity
|
||||
@Table(name = "transactions")
|
||||
@IdClass(TransactionPK.class)
|
||||
@NamedQueries({
|
||||
@NamedQuery(name = "Transaction.findTransactions", query = "SELECT t FROM Transaction t"),
|
||||
@NamedQuery(name = "Transaction.findTransactionsByUser", query = "SELECT t FROM Transaction t WHERE t.userId = :userId"),
|
||||
@NamedQuery(name = "Transaction.findTransactionById", query = "SELECT t FROM Transaction t WHERE t.transactionId = :transactionId AND t.userId = :userId"),
|
||||
@NamedQuery(name = "Transaction.findTransactionByIdOnly", query = "SELECT t FROM Transaction t WHERE t.transactionId = :transactionId"),
|
||||
@NamedQuery(name = "Transaction.groupCategoriesForUser", query = "SELECT COALESCE(t.category, 'Uncategorized'), SUM (t.amount) FROM Transaction t WHERE t.userId = :userId GROUP BY t.category")
|
||||
})
|
||||
@Getter @Setter
|
||||
@EntityListeners(TransactionListener.class)
|
||||
public class Transaction implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Column(name = "transaction_id")
|
||||
@Id
|
||||
private String transactionId;
|
||||
|
||||
@Id
|
||||
@Column(name = "usr")
|
||||
private String userId;
|
||||
|
||||
@Column(name = "transaction_name")
|
||||
private String transactionName;
|
||||
|
||||
@Column(name = "amount")
|
||||
private BigDecimal amount;
|
||||
|
||||
@Column(name = "category")
|
||||
private String category;
|
||||
|
||||
@Column(name = "points_earned")
|
||||
private BigDecimal pointsEarned;
|
||||
|
||||
@Column(name = "processed")
|
||||
private boolean processed;
|
||||
|
||||
@Column(name = "date")
|
||||
private OffsetDateTime date;
|
||||
|
||||
public Transaction() {
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.ibm.codey.bank.catalog.models;
|
||||
|
||||
import java.net.URL;
|
||||
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
import javax.inject.Inject;
|
||||
import javax.persistence.PostPersist;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.eclipse.microprofile.rest.client.RestClientBuilder;
|
||||
|
||||
import com.ibm.codey.bank.catalog.KnativeService;
|
||||
|
||||
@RequestScoped
|
||||
public class TransactionListener {
|
||||
|
||||
@Inject
|
||||
@ConfigProperty(name = "KNATIVE_SERVICE_URL")
|
||||
private URL knativeServiceURL;
|
||||
|
||||
@PostPersist
|
||||
public void sendToProcessing(Transaction transaction) {
|
||||
KnativeService knativeService = RestClientBuilder.newBuilder().baseUrl(knativeServiceURL).build(KnativeService.class);
|
||||
|
||||
try {
|
||||
knativeService.processTransaction(transaction.getTransactionId(), transaction.getCategory(), transaction.getAmount().toString());
|
||||
} catch (WebApplicationException wae) {
|
||||
System.out.print("web app exception");
|
||||
int status = wae.getResponse().getStatus();
|
||||
if (status == Response.Status.NOT_FOUND.getStatusCode()) {
|
||||
// TODO: ..
|
||||
} else {
|
||||
wae.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.ibm.codey.bank.catalog.models;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter @Setter
|
||||
public class TransactionPK implements Serializable {
|
||||
|
||||
private String transactionId;
|
||||
|
||||
private String userId;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
default.http.port=9080
|
||||
default.https.port=9443
|
||||
@@ -0,0 +1,2 @@
|
||||
# This option is needed when using an IBM JRE to avoid a handshake failure when making a secure JDBC connection.
|
||||
-Dcom.ibm.jsse2.overrideDefaultTLS=true
|
||||
@@ -0,0 +1,53 @@
|
||||
<server description="Liberty server">
|
||||
|
||||
<featureManager>
|
||||
<feature>jpa-2.2</feature>
|
||||
<feature>microProfile-3.0</feature>
|
||||
<feature>mpJwt-1.1</feature>
|
||||
</featureManager>
|
||||
|
||||
<logging traceSpecification="eclipselink=all" maxFileSize="20" maxFiles="10"/>
|
||||
|
||||
<keyStore id="digicertRootCA" password="digicert" location="${server.config.dir}/resources/security/digicert-root-ca.jks"/>
|
||||
<ssl id="defaultSSLConfig" keyStoreRef="defaultKeyStore" trustStoreRef="digicertRootCA" />
|
||||
|
||||
<httpEndpoint host="*" httpPort="${default.http.port}"
|
||||
httpsPort="${default.https.port}" id="defaultHttpEndpoint"/>
|
||||
|
||||
<mpJwt
|
||||
id="jwt"
|
||||
issuer="${OIDC_ISSUERIDENTIFIER}"
|
||||
jwksUri="${OIDC_JWKENDPOINTURL}"
|
||||
audiences="${OIDC_AUDIENCES}"
|
||||
userNameAttribute="sub"
|
||||
/>
|
||||
|
||||
<library id="PostgresLib">
|
||||
<fileset dir="${server.config.dir}/jdbc"/>
|
||||
</library>
|
||||
|
||||
<dataSource id="AccountsDataSource" jndiName="jdbc/AccountsDataSource">
|
||||
<jdbcDriver libraryRef="PostgresLib" />
|
||||
<!-- Idle connections to this server are timing out after 5 minutes.
|
||||
It is recommended to set maxIdleTime to half of that value to avoid jdbc failures (e.g. broken pipe).
|
||||
Reap time is reduced from default of 3 minutes to close idle connections in time. -->
|
||||
<connectionManager maxIdleTime="2m30s" reapTime="60s"/>
|
||||
<properties.postgresql
|
||||
serverName="${DB_SERVERNAME}"
|
||||
portNumber="${DB_PORTNUMBER}"
|
||||
databaseName="${DB_DATABASENAME}"
|
||||
user="${DB_USER}"
|
||||
password="${DB_PASSWORD}"
|
||||
ssl="false"
|
||||
/>
|
||||
</dataSource>
|
||||
|
||||
<webApplication location="transaction-service.war" contextRoot="/">
|
||||
<application-bnd>
|
||||
<security-role name="authenticated">
|
||||
<special-subject type="ALL_AUTHENTICATED_USERS"/>
|
||||
</security-role>
|
||||
</application-bnd>
|
||||
</webApplication>
|
||||
|
||||
</server>
|
||||
@@ -0,0 +1,10 @@
|
||||
<entity-mappings xmlns="http://java.sun.com/xml/ns/persistence/orm"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm orm_2_0.xsd"
|
||||
version="2.0">
|
||||
<persistence-unit-metadata>
|
||||
<persistence-unit-defaults>
|
||||
<schema>bank</schema>
|
||||
</persistence-unit-defaults>
|
||||
</persistence-unit-metadata>
|
||||
</entity-mappings>
|
||||
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<persistence version="2.2"
|
||||
xmlns="http://xmlns.jcp.org/xml/ns/persistence"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence
|
||||
http://xmlns.jcp.org/xml/ns/persistence/persistence_2_2.xsd">
|
||||
<persistence-unit name="jpa-unit" transaction-type="JTA">
|
||||
<jta-data-source>jdbc/AccountsDataSource</jta-data-source>
|
||||
<shared-cache-mode>NONE</shared-cache-mode>
|
||||
<properties>
|
||||
<property name="eclipselink.target-database" value="PostgreSQL"/>
|
||||
<property name="eclipselink.logging.level" value="ALL"/>
|
||||
<property name="eclipselink.logging.parameters" value="true"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
Binary file not shown.
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://xmlns.jcp.org/xml/ns/javaee"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/beans_1_1.xsd"
|
||||
bean-discovery-mode="all">
|
||||
</beans>
|
||||
@@ -0,0 +1,27 @@
|
||||
<web-app
|
||||
xmlns="http://xmlns.jcp.org/xml/ns/javaee"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/web-app_3_1.xsd"
|
||||
version="3.1">
|
||||
|
||||
<display-name>transaction-service</display-name>
|
||||
|
||||
<security-role>
|
||||
<role-name>authenticated</role-name>
|
||||
</security-role>
|
||||
|
||||
<security-constraint>
|
||||
<display-name>Security Constraints</display-name>
|
||||
<web-resource-collection>
|
||||
<web-resource-name>ProtectedArea</web-resource-name>
|
||||
<url-pattern>/*</url-pattern>
|
||||
</web-resource-collection>
|
||||
<auth-constraint>
|
||||
<role-name>authenticated</role-name>
|
||||
</auth-constraint>
|
||||
<user-data-constraint>
|
||||
<transport-guarantee>NONE</transport-guarantee>
|
||||
</user-data-constraint>
|
||||
</security-constraint>
|
||||
|
||||
</web-app>
|
||||
11
dev/tekton/examples/example-bank/bank-app-backend/user-service/Dockerfile
Executable file
11
dev/tekton/examples/example-bank/bank-app-backend/user-service/Dockerfile
Executable file
@@ -0,0 +1,11 @@
|
||||
FROM open-liberty:19.0.0.12-kernel-java8-openj9
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get upgrade -y e2fsprogs libgnutls30 libgcrypt20 libsasl2-2
|
||||
USER 1001
|
||||
|
||||
COPY --chown=1001:0 src/main/liberty/config/ /config/
|
||||
COPY --chown=1001:0 src/main/resources/security/ /config/resources/security/
|
||||
COPY --chown=1001:0 target/*.war /config/apps/
|
||||
COPY --chown=1001:0 target/jdbc/* /config/jdbc/
|
||||
RUN configure.sh
|
||||
@@ -0,0 +1,51 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: user-service
|
||||
labels:
|
||||
app: user-service
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: user-service
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: user-service
|
||||
spec:
|
||||
containers:
|
||||
- name: user-service
|
||||
image: anthonyamanse/user-service:example-bank-1.0
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- name: http-server
|
||||
containerPort: 9080
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: bank-db-secret
|
||||
- secretRef:
|
||||
name: bank-oidc-secret
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: user-service
|
||||
labels:
|
||||
app: user-service
|
||||
spec:
|
||||
ports:
|
||||
- port: 9080
|
||||
targetPort: 9080
|
||||
selector:
|
||||
app: user-service
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Route
|
||||
metadata:
|
||||
name: user-service
|
||||
spec:
|
||||
to:
|
||||
kind: Service
|
||||
name: user-service
|
||||
|
||||
78
dev/tekton/examples/example-bank/bank-app-backend/user-service/pom.xml
Executable file
78
dev/tekton/examples/example-bank/bank-app-backend/user-service/pom.xml
Executable file
@@ -0,0 +1,78 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>user-service</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>war</packaging>
|
||||
|
||||
<dependencies>
|
||||
<!-- Open Liberty Features -->
|
||||
<dependency>
|
||||
<groupId>io.openliberty.features</groupId>
|
||||
<artifactId>microProfile-3.0</artifactId>
|
||||
<type>esa</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.codey.bank</groupId>
|
||||
<artifactId>common</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<finalName>${project.artifactId}</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
</plugin>
|
||||
<!-- Add JDBC driver to package -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-jdbc-driver</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>copy</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.2.8</version>
|
||||
<outputDirectory>${project.build.directory}/jdbc</outputDirectory>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Plugin to run unit tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
</plugin>
|
||||
<!-- Plugin to run functional tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,25 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
|
||||
import org.eclipse.microprofile.health.HealthCheck;
|
||||
import org.eclipse.microprofile.health.HealthCheckResponse;
|
||||
import org.eclipse.microprofile.health.Liveness;
|
||||
|
||||
@Liveness
|
||||
@ApplicationScoped
|
||||
public class LivenessCheck implements HealthCheck {
|
||||
|
||||
private boolean isAlive() {
|
||||
// perform health checks here
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
boolean up = isAlive();
|
||||
return HealthCheckResponse.named(this.getClass().getSimpleName()).state(up).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.ws.rs.ApplicationPath;
|
||||
import javax.ws.rs.core.Application;
|
||||
|
||||
@ApplicationPath("/bank")
|
||||
public class LoyaltyApplication extends Application {
|
||||
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package com.ibm.codey.bank;
|
||||
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
|
||||
import org.eclipse.microprofile.health.HealthCheck;
|
||||
import org.eclipse.microprofile.health.HealthCheckResponse;
|
||||
import org.eclipse.microprofile.health.Readiness;
|
||||
|
||||
@Readiness
|
||||
@ApplicationScoped
|
||||
public class ReadinessCheck implements HealthCheck {
|
||||
|
||||
private boolean isReady() {
|
||||
// perform readiness checks, e.g. database connection, etc.
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
boolean up = isReady();
|
||||
return HealthCheckResponse.named(this.getClass().getSimpleName()).state(up).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
package com.ibm.codey.bank.accounts;
|
||||
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
import javax.inject.Inject;
|
||||
import javax.interceptor.Interceptors;
|
||||
import javax.transaction.Transactional;
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.PUT;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import com.ibm.codey.bank.BaseResource;
|
||||
import com.ibm.codey.bank.accounts.dao.UserDao;
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistration;
|
||||
import com.ibm.codey.bank.accounts.json.UserRegistrationInfo;
|
||||
import com.ibm.codey.bank.accounts.models.User;
|
||||
import com.ibm.codey.bank.interceptor.LoggingInterceptor;
|
||||
|
||||
@RequestScoped
|
||||
@Interceptors(LoggingInterceptor.class)
|
||||
@Path("v1/users")
|
||||
public class UserResource extends BaseResource {
|
||||
|
||||
@Inject
|
||||
private UserDao userDAO;
|
||||
|
||||
/**
|
||||
* This method creates a new user.
|
||||
*/
|
||||
@POST
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response registerUser(UserRegistration userRegistration) {
|
||||
String subject = this.getCallerSubject();
|
||||
if (subject == null) {
|
||||
return Response.status(Response.Status.UNAUTHORIZED).entity("Missing subject").build();
|
||||
}
|
||||
if (userDAO.findUserByRegistryId(subject) != null) {
|
||||
return Response.status(Response.Status.BAD_REQUEST).entity("User is already registered").build();
|
||||
}
|
||||
User newUser = new User();
|
||||
newUser.setSubject(subject);
|
||||
newUser.setConsentGiven(userRegistration.isConsentGiven());
|
||||
userDAO.createUser(newUser);
|
||||
return Response.status(Response.Status.NO_CONTENT).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the user registration data for a user.
|
||||
*/
|
||||
@GET
|
||||
@Path("self")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response getUser() {
|
||||
String subject = this.getCallerSubject();
|
||||
if (subject == null) {
|
||||
return Response.status(Response.Status.UNAUTHORIZED).entity("Missing subject").build();
|
||||
}
|
||||
User prevUser = userDAO.findUserByRegistryId(subject);
|
||||
if (prevUser == null) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User is not registered").build();
|
||||
}
|
||||
UserRegistrationInfo userRegistration = new UserRegistrationInfo();
|
||||
userRegistration.setUserId(prevUser.getUserId());
|
||||
userRegistration.setConsentGiven(prevUser.isConsentGiven());
|
||||
return Response.status(Response.Status.OK).entity(userRegistration).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method updates the user registration data for a user.
|
||||
*/
|
||||
@PUT
|
||||
@Path("self")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Transactional
|
||||
public Response updateUser(UserRegistration userRegistration) {
|
||||
String subject = this.getCallerSubject();
|
||||
if (subject == null) {
|
||||
return Response.status(Response.Status.UNAUTHORIZED).entity("Missing subject").build();
|
||||
}
|
||||
User prevUser = userDAO.findUserByRegistryId(subject);
|
||||
if (prevUser == null) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User is not registered").build();
|
||||
}
|
||||
if (prevUser.isDeleteRequested()) {
|
||||
return Response.status(Response.Status.CONFLICT).entity("User has requested deletion").build();
|
||||
}
|
||||
prevUser.setConsentGiven(userRegistration.isConsentGiven());
|
||||
userDAO.updateUser(prevUser);
|
||||
return Response.status(Response.Status.NO_CONTENT).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method schedules an asynchronous process to remove the user from the system.
|
||||
*/
|
||||
@DELETE
|
||||
@Path("self")
|
||||
@Transactional
|
||||
public Response deleteUser() {
|
||||
String subject = this.getCallerSubject();
|
||||
if (subject == null) {
|
||||
return Response.status(Response.Status.UNAUTHORIZED).entity("Missing subject").build();
|
||||
}
|
||||
User prevUser = userDAO.findUserByRegistryId(subject);
|
||||
if (prevUser == null) {
|
||||
return Response.status(Response.Status.NOT_FOUND).entity("User is not registered").build();
|
||||
}
|
||||
prevUser.setDeleteRequested(true);
|
||||
prevUser.setSubject(null);
|
||||
userDAO.updateUser(prevUser);
|
||||
return Response.status(Response.Status.NO_CONTENT).build();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
package com.ibm.codey.bank.accounts.dao;
|
||||
|
||||
import java.util.List;
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.LockModeType;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import com.ibm.codey.bank.accounts.models.User;
|
||||
|
||||
@RequestScoped
|
||||
public class UserDao {
|
||||
|
||||
@PersistenceContext(name = "jpa-unit")
|
||||
private EntityManager em;
|
||||
|
||||
public void createUser(User user) {
|
||||
em.persist(user);
|
||||
}
|
||||
|
||||
public void updateUser(User user) {
|
||||
em.merge(user);
|
||||
}
|
||||
|
||||
public User findUserByRegistryId(String subject) {
|
||||
try {
|
||||
return em.createNamedQuery("User.findUserByRegistryId", User.class)
|
||||
.setParameter("subject", subject).getSingleResult();
|
||||
} catch(NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
package com.ibm.codey.bank.accounts.models;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.NamedQueries;
|
||||
import javax.persistence.NamedQuery;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Entity
|
||||
@Table(name = "users")
|
||||
@NamedQueries({
|
||||
@NamedQuery(name = "User.findUserByRegistryId", query = "SELECT e FROM User e WHERE e.subject = :subject"),
|
||||
})
|
||||
@Getter @Setter
|
||||
public class User implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Column(name = "user_id")
|
||||
@Id
|
||||
@Setter(AccessLevel.NONE)
|
||||
private String userId;
|
||||
|
||||
@Column(name = "subject", unique=true)
|
||||
private String subject;
|
||||
|
||||
@Column(name = "consent_given")
|
||||
private boolean consentGiven;
|
||||
|
||||
@Column(name = "delete_requested")
|
||||
private boolean deleteRequested;
|
||||
|
||||
public User() {
|
||||
this.userId = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
default.http.port=9080
|
||||
default.https.port=9443
|
||||
@@ -0,0 +1,2 @@
|
||||
# This option is needed when using an IBM JRE to avoid a handshake failure when making a secure JDBC connection.
|
||||
-Dcom.ibm.jsse2.overrideDefaultTLS=true
|
||||
@@ -0,0 +1,53 @@
|
||||
<server description="Liberty server">
|
||||
|
||||
<featureManager>
|
||||
<feature>jpa-2.2</feature>
|
||||
<feature>microProfile-3.0</feature>
|
||||
<feature>mpJwt-1.1</feature>
|
||||
</featureManager>
|
||||
|
||||
<logging traceSpecification="eclipselink=all" maxFileSize="20" maxFiles="10"/>
|
||||
|
||||
<keyStore id="digicertRootCA" password="digicert" location="${server.config.dir}/resources/security/digicert-root-ca.jks"/>
|
||||
<ssl id="defaultSSLConfig" keyStoreRef="defaultKeyStore" trustStoreRef="digicertRootCA" />
|
||||
|
||||
<httpEndpoint host="*" httpPort="${default.http.port}"
|
||||
httpsPort="${default.https.port}" id="defaultHttpEndpoint"/>
|
||||
|
||||
<mpJwt
|
||||
id="jwt"
|
||||
issuer="${OIDC_ISSUERIDENTIFIER}"
|
||||
jwksUri="${OIDC_JWKENDPOINTURL}"
|
||||
audiences="${OIDC_AUDIENCES}"
|
||||
userNameAttribute="sub"
|
||||
/>
|
||||
|
||||
<library id="PostgresLib">
|
||||
<fileset dir="${server.config.dir}/jdbc"/>
|
||||
</library>
|
||||
|
||||
<dataSource id="AccountsDataSource" jndiName="jdbc/AccountsDataSource">
|
||||
<jdbcDriver libraryRef="PostgresLib" />
|
||||
<!-- Idle connections to this server are timing out after 5 minutes.
|
||||
It is recommended to set maxIdleTime to half of that value to avoid jdbc failures (e.g. broken pipe).
|
||||
Reap time is reduced from default of 3 minutes to close idle connections in time. -->
|
||||
<connectionManager maxIdleTime="2m30s" reapTime="60s"/>
|
||||
<properties.postgresql
|
||||
serverName="${DB_SERVERNAME}"
|
||||
portNumber="${DB_PORTNUMBER}"
|
||||
databaseName="${DB_DATABASENAME}"
|
||||
user="${DB_USER}"
|
||||
password="${DB_PASSWORD}"
|
||||
ssl="false"
|
||||
/>
|
||||
</dataSource>
|
||||
|
||||
<webApplication location="user-service.war" contextRoot="/">
|
||||
<application-bnd>
|
||||
<security-role name="authenticated">
|
||||
<special-subject type="ALL_AUTHENTICATED_USERS"/>
|
||||
</security-role>
|
||||
</application-bnd>
|
||||
</webApplication>
|
||||
|
||||
</server>
|
||||
@@ -0,0 +1,10 @@
|
||||
<entity-mappings xmlns="http://java.sun.com/xml/ns/persistence/orm"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm orm_2_0.xsd"
|
||||
version="2.0">
|
||||
<persistence-unit-metadata>
|
||||
<persistence-unit-defaults>
|
||||
<schema>bank</schema>
|
||||
</persistence-unit-defaults>
|
||||
</persistence-unit-metadata>
|
||||
</entity-mappings>
|
||||
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<persistence version="2.2"
|
||||
xmlns="http://xmlns.jcp.org/xml/ns/persistence"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence
|
||||
http://xmlns.jcp.org/xml/ns/persistence/persistence_2_2.xsd">
|
||||
<persistence-unit name="jpa-unit" transaction-type="JTA">
|
||||
<jta-data-source>jdbc/AccountsDataSource</jta-data-source>
|
||||
<shared-cache-mode>NONE</shared-cache-mode>
|
||||
<properties>
|
||||
<property name="eclipselink.target-database" value="PostgreSQL"/>
|
||||
<property name="eclipselink.logging.level" value="ALL"/>
|
||||
<property name="eclipselink.logging.parameters" value="true"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
Binary file not shown.
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://xmlns.jcp.org/xml/ns/javaee"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/beans_1_1.xsd"
|
||||
bean-discovery-mode="all">
|
||||
</beans>
|
||||
@@ -0,0 +1,27 @@
|
||||
<web-app
|
||||
xmlns="http://xmlns.jcp.org/xml/ns/javaee"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/web-app_3_1.xsd"
|
||||
version="3.1">
|
||||
|
||||
<display-name>user-service</display-name>
|
||||
|
||||
<security-role>
|
||||
<role-name>authenticated</role-name>
|
||||
</security-role>
|
||||
|
||||
<security-constraint>
|
||||
<display-name>Security Constraints</display-name>
|
||||
<web-resource-collection>
|
||||
<web-resource-name>ProtectedArea</web-resource-name>
|
||||
<url-pattern>/*</url-pattern>
|
||||
</web-resource-collection>
|
||||
<auth-constraint>
|
||||
<role-name>authenticated</role-name>
|
||||
</auth-constraint>
|
||||
<user-data-constraint>
|
||||
<transport-guarantee>NONE</transport-guarantee>
|
||||
</user-data-constraint>
|
||||
</security-constraint>
|
||||
|
||||
</web-app>
|
||||
20
dev/tekton/examples/example-bank/bank-knative-service/Dockerfile
Executable file
20
dev/tekton/examples/example-bank/bank-knative-service/Dockerfile
Executable file
@@ -0,0 +1,20 @@
|
||||
# Use the official lightweight Node.js 12 image.
|
||||
# https://hub.docker.com/_/node
|
||||
FROM node:12-slim
|
||||
|
||||
# Create and change to the app directory.
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy application dependency manifests to the container image.
|
||||
# A wildcard is used to ensure both package.json AND package-lock.json are copied.
|
||||
# Copying this separately prevents re-running npm install on every code change.
|
||||
COPY package*.json ./
|
||||
|
||||
# Install production dependencies.
|
||||
RUN npm install --only=production
|
||||
|
||||
# Copy local code to the container image.
|
||||
COPY . ./
|
||||
|
||||
# Run the web service on container startup.
|
||||
CMD [ "npm", "start" ]
|
||||
28
dev/tekton/examples/example-bank/bank-knative-service/deployment.yaml
Executable file
28
dev/tekton/examples/example-bank/bank-knative-service/deployment.yaml
Executable file
@@ -0,0 +1,28 @@
|
||||
apiVersion: serving.knative.dev/v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: process-transaction
|
||||
# local to cluster only
|
||||
labels:
|
||||
serving.knative.dev/visibility: cluster-local
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
# Target 10 requests in-flight per pod.
|
||||
autoscaling.knative.dev/target: "10"
|
||||
# Disable scale to zero with a minScale of 1.
|
||||
# autoscaling.knative.dev/minScale: "1"
|
||||
# Limit scaling to 50 pods.
|
||||
# autoscaling.knative.dev/maxScale: "50"
|
||||
spec:
|
||||
containers:
|
||||
- image: anthonyamanse/knative-transaction-process:with-auth
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: bank-oidc-adminuser
|
||||
- secretRef:
|
||||
name: mobile-simulator-secrets
|
||||
env:
|
||||
- name: TRANSACTION_SERVICE_URL
|
||||
value: "http://transaction-service:9080/bank/v1/transactions"
|
||||
112
dev/tekton/examples/example-bank/bank-knative-service/index.js
Executable file
112
dev/tekton/examples/example-bank/bank-knative-service/index.js
Executable file
@@ -0,0 +1,112 @@
|
||||
const express = require('express');
|
||||
const app = express();
|
||||
const axios = require('axios');
|
||||
const qs = require('qs');
|
||||
const jwt_decode = require('jwt-decode')
|
||||
|
||||
let transactionServiceUrl = process.env.TRANSACTION_SERVICE_URL
|
||||
let appIdTokenUrl = process.env.APP_ID_TOKEN_URL
|
||||
let appIdClientId = process.env.APP_ID_CLIENT_ID
|
||||
let appIdClientSecret = process.env.APP_ID_CLIENT_SECRET
|
||||
let appIdAdminUser = process.env.APP_ID_ADMIN_USER
|
||||
let appIdAdminPassword = process.env.APP_ID_ADMIN_PASSWORD
|
||||
|
||||
let appIdResult;
|
||||
|
||||
app.post('/process', (req, res) => {
|
||||
console.log('received request')
|
||||
console.log(req.query)
|
||||
if (!appIdResult) {
|
||||
getAppIdToken(appIdAdminUser, appIdAdminPassword)
|
||||
.then(function (response) {
|
||||
appIdResult = response.data
|
||||
sendToRewardEndpoint(req, res, appIdResult.access_token)
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log(error)
|
||||
res.status('404').send('Error getting admin token')
|
||||
})
|
||||
} else {
|
||||
console.log('found app id result in global variable')
|
||||
// check if token is expired
|
||||
if (isAccessTokenExpired(appIdResult.access_token)) {
|
||||
console.log('token found is expired. getting new one...')
|
||||
getAppIdToken(appIdAdminUser, appIdAdminPassword)
|
||||
.then(function (response) {
|
||||
appIdResult = response.data
|
||||
sendToRewardEndpoint(req, res, appIdResult.access_token)
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log(error)
|
||||
res.status('404').send('Error getting admin token')
|
||||
})
|
||||
} else {
|
||||
sendToRewardEndpoint(req, res, appIdResult.access_token)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
function sendToRewardEndpoint(req, res, authToken) {
|
||||
if (req.query.transactionId && req.query.category && req.query.amount) {
|
||||
let pointsEarned = computeReward(req.query.category, req.query.amount);
|
||||
axios({
|
||||
headers: {
|
||||
'Authorization': 'Bearer ' + authToken
|
||||
},
|
||||
method: 'put',
|
||||
url: transactionServiceUrl + '/reward/' + req.query.transactionId,
|
||||
data: {
|
||||
pointsEarned
|
||||
}
|
||||
})
|
||||
.then(function (response) {
|
||||
if (response.status == '204') {
|
||||
res.status('200').send('OK')
|
||||
} else {
|
||||
console.log({status: error.response.status, data: error.response.data})
|
||||
res.status('404').send({result: 'Failed to post to transaction API', response })
|
||||
}
|
||||
}).catch(function (error) {
|
||||
console.log("Error in PUT /transactions/reward/{transactionId}")
|
||||
console.log({status: error.response.status, data: error.response.data})
|
||||
res.status('404').send({error})
|
||||
})
|
||||
} else {
|
||||
res.status('404').send('transactionId, category, and amount must be present in query parameters.')
|
||||
}
|
||||
}
|
||||
|
||||
function computeReward(category, amount) {
|
||||
return amount;
|
||||
}
|
||||
|
||||
function getAppIdToken(username, password) {
|
||||
let data = {
|
||||
username,
|
||||
password,
|
||||
grant_type: 'password'
|
||||
}
|
||||
return axios({
|
||||
method: 'post',
|
||||
url: appIdTokenUrl + '/token',
|
||||
headers: {
|
||||
'Authorization': 'Basic ' + Buffer.from(appIdClientId + ":" + appIdClientSecret).toString('base64'),
|
||||
'Content-Type' : 'application/x-www-form-urlencoded'
|
||||
},
|
||||
data: qs.stringify(data)
|
||||
})
|
||||
}
|
||||
|
||||
function isAccessTokenExpired(access_token) {
|
||||
if (new Date().getTime() - (jwt_decode(access_token).exp * 1000) >= 0) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const port = process.env.PORT || 8080;
|
||||
app.listen(port, () => {
|
||||
console.log('Hello world listening on port', port);
|
||||
});
|
||||
17
dev/tekton/examples/example-bank/bank-knative-service/package.json
Executable file
17
dev/tekton/examples/example-bank/bank-knative-service/package.json
Executable file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "bank-knative-service",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node index.js",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^0.19.2",
|
||||
"express": "^4.17.1",
|
||||
"jwt-decode": "^2.2.0"
|
||||
}
|
||||
}
|
||||
36
dev/tekton/examples/example-bank/bank-user-cleanup-utility/.gitignore
vendored
Executable file
36
dev/tekton/examples/example-bank/bank-user-cleanup-utility/.gitignore
vendored
Executable file
@@ -0,0 +1,36 @@
|
||||
**/target
|
||||
!.keep
|
||||
|
||||
|
||||
### STS ###
|
||||
.apt_generated
|
||||
.classpath
|
||||
.factorypath
|
||||
.project
|
||||
.settings
|
||||
.springBeans
|
||||
.sts4-cache
|
||||
|
||||
### IntelliJ IDEA ###
|
||||
.idea
|
||||
*.iws
|
||||
*.iml
|
||||
*.ipr
|
||||
|
||||
### NetBeans ###
|
||||
/nbproject/private/
|
||||
/nbbuild/
|
||||
/dist/
|
||||
/nbdist/
|
||||
/.nb-gradle/
|
||||
/build/
|
||||
|
||||
### VS Code ###
|
||||
.vscode/
|
||||
|
||||
|
||||
## Local configuration files
|
||||
/local/config/*
|
||||
|
||||
*.swo
|
||||
*.swp
|
||||
10
dev/tekton/examples/example-bank/bank-user-cleanup-utility/Dockerfile
Executable file
10
dev/tekton/examples/example-bank/bank-user-cleanup-utility/Dockerfile
Executable file
@@ -0,0 +1,10 @@
|
||||
FROM adoptopenjdk:8-jre-openj9
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get upgrade -y e2fsprogs libgnutls30 libgcrypt20 libsasl2-2
|
||||
RUN mkdir -p /opt/app/lib
|
||||
USER 1001
|
||||
|
||||
COPY target/user-cleanup-utility-1.0-SNAPSHOT.jar /opt/app
|
||||
COPY target/lib/* /opt/app/lib/
|
||||
CMD ["java", "-jar", "/opt/app/user-cleanup-utility-1.0-SNAPSHOT.jar"]
|
||||
15
dev/tekton/examples/example-bank/bank-user-cleanup-utility/README.md
Executable file
15
dev/tekton/examples/example-bank/bank-user-cleanup-utility/README.md
Executable file
@@ -0,0 +1,15 @@
|
||||
|
||||
## Build
|
||||
|
||||
```
|
||||
mvn package
|
||||
docker build -t bank-user-cleanup-utility:1.0-SNAPSHOT .
|
||||
```
|
||||
|
||||
### Secrets
|
||||
|
||||
```
|
||||
kubectl create secret generic bank-db-secret --from-literal=DB_SERVERNAME=48f106c1-94cb-4133-b99f-20991c91cb1a.bn2a2vgd01r3l0hfmvc0.databases.appdomain.cloud --from-literal=DB_PORTNUMBER=30389 --from-literal=DB_DATABASENAME=ibmclouddb --from-literal=DB_USER=ibm_cloud_0637cd24_8ac9_4dc7_b2d4_ebd080633f7f --from-literal=DB_PASSWORD=<password>
|
||||
kubectl create secret generic bank-iam-secret --from-literal=IAM_APIKEY=<apikey> --from-literal=IAM_SERVICE_URL=https://iam.cloud.ibm.com/identity/token
|
||||
kubectl create secret generic bank-appid-secret --from-literal=APPID_TENANTID=3d17f53d-4600-4f32-bb2c-207f4e2f6060 --from-literal=APPID_SERVICE_URL=https://us-south.appid.cloud.ibm.com
|
||||
```
|
||||
28
dev/tekton/examples/example-bank/bank-user-cleanup-utility/job.yaml
Executable file
28
dev/tekton/examples/example-bank/bank-user-cleanup-utility/job.yaml
Executable file
@@ -0,0 +1,28 @@
|
||||
apiVersion: batch/v1beta1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: bank-user-cleanup-utility
|
||||
labels:
|
||||
app: bank-user-cleanup-utility
|
||||
spec:
|
||||
schedule: "@hourly"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: bank-user-cleanup-utility
|
||||
image: ykoyfman/bank-cleanup:1.0
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: bank-db-secret
|
||||
- secretRef:
|
||||
name: bank-iam-secret
|
||||
- secretRef:
|
||||
name: bank-appid-secret
|
||||
env:
|
||||
- name: LAST_LOGIN_HOURS
|
||||
value: "24"
|
||||
backoffLimit: 0
|
||||
94
dev/tekton/examples/example-bank/bank-user-cleanup-utility/pom.xml
Executable file
94
dev/tekton/examples/example-bank/bank-user-cleanup-utility/pom.xml
Executable file
@@ -0,0 +1,94 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.ibm.codey.loyalty</groupId>
|
||||
<artifactId>user-cleanup-utility</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>42.2.10</version>
|
||||
</dependency>
|
||||
<!-- JSON-B API -->
|
||||
<dependency>
|
||||
<groupId>jakarta.json.bind</groupId>
|
||||
<artifactId>jakarta.json.bind-api</artifactId>
|
||||
<version>1.0.2</version>
|
||||
</dependency>
|
||||
<!-- JSON-B implementation -->
|
||||
<dependency>
|
||||
<groupId>org.jboss.resteasy</groupId>
|
||||
<artifactId>resteasy-json-binding-provider</artifactId>
|
||||
<version>4.4.2.Final</version>
|
||||
</dependency>
|
||||
<!-- Microprofile rest client API -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.microprofile.rest.client</groupId>
|
||||
<artifactId>microprofile-rest-client-api</artifactId>
|
||||
<version>1.3.3</version>
|
||||
</dependency>
|
||||
<!-- Microprofile rest client implementation -->
|
||||
<dependency>
|
||||
<groupId>org.jboss.resteasy</groupId>
|
||||
<artifactId>resteasy-client-microprofile</artifactId>
|
||||
<version>4.4.2.Final</version>
|
||||
</dependency>
|
||||
<!-- lombok -->
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.16</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<version>3.1.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>copy-dependencies</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/lib</outputDirectory>
|
||||
<includeScope>runtime</includeScope>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<addClasspath>true</addClasspath>
|
||||
<classpathPrefix>lib/</classpathPrefix>
|
||||
<mainClass>com.ibm.codey.loyalty.AccountDeletionProcessor</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,269 @@
|
||||
package com.ibm.codey.loyalty;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import com.ibm.codey.loyalty.external.appid.AppIDService;
|
||||
import com.ibm.codey.loyalty.external.appid.AppIDServiceGetUserRoleResponse;
|
||||
import com.ibm.codey.loyalty.external.appid.AppIDServiceGetUsersResponse;
|
||||
import com.ibm.codey.loyalty.external.iam.IAMTokenService;
|
||||
import com.ibm.codey.loyalty.external.iam.IAMTokenServiceResponse;
|
||||
|
||||
import org.eclipse.microprofile.rest.client.RestClientBuilder;
|
||||
|
||||
// This code deletes any App ID user who is no longer registered for the loyalty program.
|
||||
public class AccountDeletionProcessor {
|
||||
|
||||
private static final Logger log = Logger.getLogger(AccountDeletionProcessor.class.getName());
|
||||
|
||||
private static final String PROVIDER = "cloud_directory";
|
||||
|
||||
private static final int USERS_COUNT = 20;
|
||||
|
||||
private static URL IAM_SERVICE_URL;
|
||||
private static String IAM_APIKEY;
|
||||
|
||||
private static URL APPID_SERVICE_URL;
|
||||
private static String APPID_TENANTID;
|
||||
|
||||
private static String DB_SERVERNAME;
|
||||
private static String DB_PORTNUMBER;
|
||||
private static String DB_DATABASENAME;
|
||||
private static String DB_USER;
|
||||
private static String DB_PASSWORD;
|
||||
|
||||
private static int LAST_LOGIN_HOURS;
|
||||
|
||||
private Connection con;
|
||||
|
||||
private AppIDService appIdService;
|
||||
|
||||
private String authHeader;
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Gather environment variables
|
||||
try {
|
||||
IAM_SERVICE_URL = new URL(getEnvVar("IAM_SERVICE_URL"));
|
||||
APPID_SERVICE_URL = new URL(getEnvVar("APPID_SERVICE_URL"));
|
||||
} catch(MalformedURLException mue) {
|
||||
mue.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
IAM_APIKEY = getEnvVar("IAM_APIKEY");
|
||||
APPID_TENANTID = getEnvVar("APPID_TENANTID");
|
||||
DB_SERVERNAME = getEnvVar("DB_SERVERNAME");
|
||||
DB_PORTNUMBER = getEnvVar("DB_PORTNUMBER");
|
||||
DB_DATABASENAME = getEnvVar("DB_DATABASENAME");
|
||||
DB_USER = getEnvVar("DB_USER");
|
||||
DB_PASSWORD = getEnvVar("DB_PASSWORD");
|
||||
LAST_LOGIN_HOURS = Integer.valueOf(getEnvVar("LAST_LOGIN_HOURS"));
|
||||
new AccountDeletionProcessor().run();
|
||||
}
|
||||
|
||||
public void run() {
|
||||
// Connect to database
|
||||
getDBConnection();
|
||||
// Set up auth header for App Id with IAM token.
|
||||
authHeader = "Bearer " + getIamToken();
|
||||
// Set up client proxy to App Id service.
|
||||
appIdService = RestClientBuilder.newBuilder().baseUrl(APPID_SERVICE_URL).build(AppIDService.class);
|
||||
try {
|
||||
// Iterate through all App Id users a page at a time. Identify and collect unregistered users by provider id.
|
||||
Set<String> unregisteredUserProviderIds = new HashSet<String>();
|
||||
int startIndex = 0;
|
||||
AppIDServiceGetUsersResponse usersResponse;
|
||||
do {
|
||||
// Get a page of users. Collect the user's profile id and corresponding provider id.
|
||||
Map<String, String> profileIdToProviderIdMap = new HashMap<String,String>(USERS_COUNT);
|
||||
log.log(Level.INFO, "Obtaining a page of user data");
|
||||
usersResponse = appIdService.getUsers(authHeader, APPID_TENANTID, AppIDService.DATASCOPE_FULL, startIndex, USERS_COUNT);
|
||||
int numberOfUsersOnThisPage = usersResponse.getItemsPerPage();
|
||||
for (int i=0; i<usersResponse.getItemsPerPage() ; i++) {
|
||||
AppIDServiceGetUsersResponse.User user = usersResponse.getUsers()[i];
|
||||
AppIDServiceGetUsersResponse.Identity[] identities = user.getIdentities();
|
||||
if (identities != null && identities.length == 1 && identities[0].getProvider().equals(PROVIDER)) {
|
||||
// If the user hasn't recently logged in, save the profile id and provider id for further examination.
|
||||
if (!isRecentlyModified(identities[0].getIdpUserInfo().getMeta().getLastModified())) {
|
||||
profileIdToProviderIdMap.put(user.getProfileId(), identities[0].getProviderId());
|
||||
}
|
||||
}
|
||||
}
|
||||
startIndex += numberOfUsersOnThisPage;
|
||||
log.log(Level.INFO, "App Id users: " + profileIdToProviderIdMap.toString());
|
||||
// If there are no users on this page that weren't recently modified, continue to next page.
|
||||
if (profileIdToProviderIdMap.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
// Query users table for subjects matching these profile ids.
|
||||
Set<String> registeredProfileIds = queryUsers(profileIdToProviderIdMap.keySet());
|
||||
log.log(Level.INFO, "Registered users: " + registeredProfileIds.toString());
|
||||
// Remove from the map those users who are still registered in the users table.
|
||||
for(String profileId : registeredProfileIds) {
|
||||
profileIdToProviderIdMap.remove(profileId);
|
||||
}
|
||||
// Remove from the map those users who are admins.
|
||||
Iterator<Map.Entry<String, String>> iter = profileIdToProviderIdMap.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
Map.Entry<String,String> entry = iter.next();
|
||||
String profileId = entry.getKey();
|
||||
if (isAdmin(profileId)) {
|
||||
log.log(Level.INFO, "Admin: " + profileId);
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
// Whatever is left is an unregistered user. Save for deletion after completing the paged scan.
|
||||
unregisteredUserProviderIds.addAll(profileIdToProviderIdMap.values());
|
||||
} while(startIndex < usersResponse.getTotalResults());
|
||||
// Remove all unregistered users.
|
||||
if (unregisteredUserProviderIds.isEmpty()) {
|
||||
log.log(Level.INFO, "No App ID users need to be removed");
|
||||
} else {
|
||||
for(String providerId : unregisteredUserProviderIds) {
|
||||
log.log(Level.INFO, "Removing user: " + providerId);
|
||||
appIdService.removeUser(authHeader, APPID_TENANTID, providerId);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
appIdService.close();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
closeDBConnection();
|
||||
}
|
||||
}
|
||||
|
||||
private static String getEnvVar(String name) {
|
||||
String s = System.getenv(name);
|
||||
if (s == null) {
|
||||
throw new RuntimeException("Missing environment variable " + name);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
private void getDBConnection() {
|
||||
try {
|
||||
// Load the driver
|
||||
log.log(Level.INFO, "Loading the JDBC driver");
|
||||
Class.forName("org.postgresql.Driver");
|
||||
// Create the connection
|
||||
String url = "jdbc:postgresql://" + DB_SERVERNAME + ":" + DB_PORTNUMBER + "/" + DB_DATABASENAME;
|
||||
log.log(Level.INFO, "Creating a JDBC connection to " + url);
|
||||
Properties props = new Properties();
|
||||
props.setProperty("user", DB_USER);
|
||||
props.setProperty("password", DB_PASSWORD);
|
||||
props.setProperty("sslfactory","org.postgresql.ssl.NonValidatingFactory");
|
||||
con = DriverManager.getConnection(url, props);
|
||||
} catch (ClassNotFoundException e) {
|
||||
System.err.println("Could not load JDBC driver");
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
} catch(SQLException sqlex) {
|
||||
System.err.println("SQLException information");
|
||||
System.err.println ("Error msg: " + sqlex.getMessage());
|
||||
System.err.println ("SQLSTATE: " + sqlex.getSQLState());
|
||||
System.err.println ("Error code: " + sqlex.getErrorCode());
|
||||
sqlex.printStackTrace();
|
||||
throw new RuntimeException(sqlex);
|
||||
}
|
||||
}
|
||||
|
||||
private Set<String> queryUsers(Set<String> profileIds) {
|
||||
Set<String> registeredProfileIds = new HashSet<String>();
|
||||
try {
|
||||
// Create query statement
|
||||
StringJoiner sj = new StringJoiner(",", "(", ")");
|
||||
for(String id : profileIds) {
|
||||
sj.add("?");
|
||||
}
|
||||
String query = "SELECT SUBJECT FROM BANK.USERS WHERE SUBJECT IN " + sj.toString();
|
||||
// Execute query statement
|
||||
log.log(Level.INFO, "Querying database");
|
||||
PreparedStatement ps = con.prepareStatement(query);
|
||||
int index = 1;
|
||||
for(String id : profileIds) {
|
||||
ps.setString(index, id);
|
||||
index++;
|
||||
}
|
||||
ResultSet rs = ps.executeQuery();
|
||||
while(rs.next()) {
|
||||
registeredProfileIds.add(rs.getString("subject"));
|
||||
}
|
||||
// Close the ResultSet
|
||||
rs.close();
|
||||
// Close the PreparedStatement
|
||||
ps.close();
|
||||
}
|
||||
catch(SQLException sqlex) {
|
||||
System.err.println("SQLException information");
|
||||
System.err.println ("Error msg: " + sqlex.getMessage());
|
||||
System.err.println ("SQLSTATE: " + sqlex.getSQLState());
|
||||
System.err.println ("Error code: " + sqlex.getErrorCode());
|
||||
sqlex.printStackTrace();
|
||||
throw new RuntimeException(sqlex);
|
||||
}
|
||||
return registeredProfileIds;
|
||||
}
|
||||
|
||||
private void closeDBConnection() {
|
||||
try {
|
||||
con.close();
|
||||
}
|
||||
catch(SQLException sqlex) {
|
||||
System.err.println("SQLException information");
|
||||
System.err.println ("Error msg: " + sqlex.getMessage());
|
||||
System.err.println ("SQLSTATE: " + sqlex.getSQLState());
|
||||
System.err.println ("Error code: " + sqlex.getErrorCode());
|
||||
sqlex.printStackTrace();
|
||||
throw new RuntimeException(sqlex);
|
||||
}
|
||||
}
|
||||
|
||||
private String getIamToken() {
|
||||
// Get an IAM token for authentication to App ID API.
|
||||
log.log(Level.INFO, "Obtaining IAM access token");
|
||||
IAMTokenServiceResponse tokenResponse;
|
||||
try ( IAMTokenService iamTokenService = RestClientBuilder.newBuilder().baseUrl(IAM_SERVICE_URL).build(IAMTokenService.class) ) {
|
||||
tokenResponse = iamTokenService.getIAMTokenFromAPIKey(IAMTokenService.GRANT_TYPE_APIKEY, IAM_APIKEY);
|
||||
} catch(Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return tokenResponse.getAccessToken();
|
||||
}
|
||||
|
||||
private boolean isRecentlyModified(String lastModifiedString) {
|
||||
ZonedDateTime now = ZonedDateTime.now();
|
||||
ZonedDateTime lastModified = ZonedDateTime.parse(lastModifiedString);
|
||||
Duration duration = Duration.between(lastModified, now);
|
||||
long diffHours = (long) duration.getSeconds() / (60*60);
|
||||
return (diffHours < LAST_LOGIN_HOURS);
|
||||
}
|
||||
|
||||
private boolean isAdmin(String profileId) {
|
||||
boolean admin = false;
|
||||
AppIDServiceGetUserRoleResponse userProfileResponse = appIdService.getUserRoles(authHeader, APPID_TENANTID, profileId);
|
||||
for (AppIDServiceGetUserRoleResponse.Role role : userProfileResponse.getRoles()) {
|
||||
if (role.getName().equals("admin")) {
|
||||
admin = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return admin;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
package com.ibm.codey.loyalty.external.appid;
|
||||
|
||||
import javax.ws.rs.HeaderParam;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
public interface AppIDService extends AutoCloseable {
|
||||
|
||||
public static String DATASCOPE_FULL = "full";
|
||||
|
||||
@GET
|
||||
@Path("/management/v4/{tenantId}/users")
|
||||
@Produces({MediaType.APPLICATION_JSON})
|
||||
public AppIDServiceGetUsersResponse getUsers(
|
||||
@HeaderParam("Authorization") String authorizationHeader,
|
||||
@PathParam("tenantId") String tenantId,
|
||||
@QueryParam("dataScope") String dataScope,
|
||||
@QueryParam("startIndex") int startIndex,
|
||||
@QueryParam("count") int count
|
||||
);
|
||||
|
||||
@GET
|
||||
@Path("/management/v4/{tenantId}/users/{id}/roles")
|
||||
@Produces({MediaType.APPLICATION_JSON})
|
||||
public AppIDServiceGetUserRoleResponse getUserRoles(
|
||||
@HeaderParam("Authorization") String authorizationHeader,
|
||||
@PathParam("tenantId") String tenantId,
|
||||
@PathParam("id") String profileId
|
||||
);
|
||||
|
||||
@DELETE
|
||||
@Path("/management/v4/{tenantId}/cloud_directory/remove/{userId}")
|
||||
public void removeUser(
|
||||
@HeaderParam("Authorization") String authorizationHeader,
|
||||
@PathParam("tenantId") String tenantId,
|
||||
@PathParam("userId") String userId
|
||||
);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.ibm.codey.loyalty.external.appid;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter @Setter
|
||||
public class AppIDServiceGetUserRoleResponse {
|
||||
|
||||
@JsonbProperty("roles")
|
||||
private Role[] roles;
|
||||
|
||||
@Getter @Setter
|
||||
public static class Role {
|
||||
|
||||
@JsonbProperty("name")
|
||||
private String name;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package com.ibm.codey.loyalty.external.appid;
|
||||
|
||||
import javax.json.bind.annotation.JsonbProperty;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
|
||||
@Getter @Setter
|
||||
public class AppIDServiceGetUsersResponse {
|
||||
|
||||
@JsonbProperty("totalResults")
|
||||
private int totalResults;
|
||||
|
||||
@JsonbProperty("itemsPerPage")
|
||||
private int itemsPerPage;
|
||||
|
||||
@JsonbProperty("users")
|
||||
private User[] users;
|
||||
|
||||
@Getter @Setter
|
||||
public static class User {
|
||||
|
||||
@JsonbProperty("id")
|
||||
private String profileId;
|
||||
|
||||
@JsonbProperty("identities")
|
||||
private Identity[] identities;
|
||||
|
||||
}
|
||||
|
||||
@Getter @Setter
|
||||
public static class Identity {
|
||||
|
||||
@JsonbProperty("provider")
|
||||
private String provider;
|
||||
|
||||
@JsonbProperty("id")
|
||||
private String providerId;
|
||||
|
||||
@JsonbProperty("idpUserInfo")
|
||||
private IdpUserInfo idpUserInfo;
|
||||
|
||||
}
|
||||
|
||||
@Getter @Setter
|
||||
public static class IdpUserInfo {
|
||||
|
||||
@JsonbProperty("meta")
|
||||
private Meta meta;
|
||||
|
||||
}
|
||||
|
||||
@Getter @Setter
|
||||
public static class Meta {
|
||||
|
||||
@JsonbProperty("lastModified")
|
||||
private String lastModified;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.ibm.codey.loyalty.external.iam;
|
||||
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.FormParam;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
public interface IAMTokenService extends AutoCloseable {
|
||||
|
||||
public final static String GRANT_TYPE_APIKEY = "urn:ibm:params:oauth:grant-type:apikey";
|
||||
|
||||
@POST
|
||||
@Consumes({MediaType.APPLICATION_FORM_URLENCODED})
|
||||
@Produces({MediaType.APPLICATION_JSON})
|
||||
public IAMTokenServiceResponse getIAMTokenFromAPIKey(
|
||||
@FormParam("grant_type") String grantType,
|
||||
@FormParam("apikey") String apiKey
|
||||
);
|
||||
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user