diff --git a/charts/confluent/Chart.yaml b/charts/confluent/Chart.yaml index 4320008..d6fcc35 100644 --- a/charts/confluent/Chart.yaml +++ b/charts/confluent/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: lsdmesp-confluent -version: "0.6.0" -appVersion: "0.6.0" +version: "0.6.1" +appVersion: "0.6.1" description: 'LSDMESP CFK: LSD Event Streaming Platform with Confluent Operator' keywords: - lsdmesp diff --git a/charts/confluent/templates/020.schemaregistry.yaml b/charts/confluent/templates/020.schemaregistry.yaml index 82181d7..044530b 100644 --- a/charts/confluent/templates/020.schemaregistry.yaml +++ b/charts/confluent/templates/020.schemaregistry.yaml @@ -156,6 +156,17 @@ spec: client_type: $1 oneReplicaPerNode: true podTemplate: + probe: + liveness: + failureThreshold: 3 + periodSeconds: 30 + timeoutSeconds: 5 + readiness: + failureThreshold: 10 + initialDelaySeconds: 120 + periodSeconds: 45 + successThreshold: 1 + timeoutSeconds: 60 podSecurityContext: fsGroup: 1000 runAsUser: 1000 diff --git a/charts/confluent/templates/030.connect.yaml b/charts/confluent/templates/030.connect.yaml index d62b882..201b1ce 100644 --- a/charts/confluent/templates/030.connect.yaml +++ b/charts/confluent/templates/030.connect.yaml @@ -33,6 +33,9 @@ spec: - name: debezium-connector-mysql owner: debezium version: 2.2.1 + - name: debezium-connector-postgresql + owner: debezium + version: 2.5.4 - name: kafka-connect-elasticsearch-source owner: dariobalinzo version: 1.5.5 diff --git a/charts/confluent/templates/050.controlcenter-ingress.yaml b/charts/confluent/templates/050.controlcenter-ingress.yaml index e3f1fda..985521e 100644 --- a/charts/confluent/templates/050.controlcenter-ingress.yaml +++ b/charts/confluent/templates/050.controlcenter-ingress.yaml @@ -17,7 +17,7 @@ spec: paths: - backend: service: - name: controlcenter-0-internal + name: controlcenter port: number: 9021 path: / diff --git a/charts/confluent/values.yaml b/charts/confluent/values.yaml index b833067..a263104 100644 --- a/charts/confluent/values.yaml +++ b/charts/confluent/values.yaml @@ -276,7 +276,7 @@ lsdmesp: # -- Annotations for the Control Center ingress annotations: {} # -- Base domain configuration for the Control Center ingress - baseDomain: "example.com" + baseDomain: "apps.mesp.lsdopen.io" # -- TLS host configuration will be dynamically built based on the base domain. You can change the secret name here. tls: secretName: controlcenter-tls diff --git a/charts/demos/README.md b/charts/demos/README.md new file mode 100644 index 0000000..8b49124 --- /dev/null +++ b/charts/demos/README.md @@ -0,0 +1,221 @@ +# Create Kind cluster +`kind create cluster --config ~/.kindconf/lsdmesp.conf` + +# Create Ingress controller +`kubectl apply -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/main/deploy/static/provider/kind/deploy.yaml` + +# Deploy Postgres +`cd ./demos/bin` +`./deploy_postgres.sh` + +# Port-forward for Postgres +`kubectl port-forward svc/postgres 5432:5432 --namespace postgres` + +# Startup Confluent MESP cluster +`cd ./confluent` +`./install.sh` + +# Deploy Monolith app and Webapp, as well as scaled down Webapp 2, Webapp 3 and Microservice app +`cd ./demos/bin` +`./deploy_appmod.sh` + +# Prep Army Knife for ksql cli +Exec into the Army Knife and start the KSQL CLI, but first create a properties file (/root/ksql.client.properties) with the following: +``` +ssl.truststore.location=/root/lsdmesp.truststore.jks +ssl.truststore.password=112233 +``` + +# Port-forward for Monolith +`kubectl port-forward svc/monolith-service 8000:8000 --namespace app-modernisation` + +# Port-forward for Control Center +`kubectl port-forward controlcenter-0 9021:9021 --namespace lsdmesp-confluent` + +# Add CFRBs +`cd ./demos/templates` +`kubectl apply -f 010-cf-additional-cfrb.yaml -n lsdmesp-confluent` + +# Create topics +Create topics in CCC with user `peter` +``` +postgres.bank.transactions +postgres.bank.accounts +postgres.bank.customers +express.bank.transactions +``` + +Or from army knife: + +``` +kafka-topics --create --topic postgres.bank.transactions --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --partitions 6 --replication-factor 3 --command-config /root/etc/client.properties +kafka-topics --create --topic postgres.bank.accounts --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --partitions 6 --replication-factor 3 --command-config /root/etc/client.properties +kafka-topics --create --topic postgres.bank.customers --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --partitions 6 --replication-factor 3 --command-config /root/etc/client.properties +kafka-topics --create --topic express.bank.transactions --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --partitions 6 --replication-factor 3 --command-config /root/etc/client.properties +``` + +# Add transactions +Navigate to http://webapp.apps.mesp.lsdopen.io + +Add the following transactions: +``` +5208-0272-3184-5035:5 +5188-6083-8011-0307:100 +5188-6083-8011-0307:250 +5188-6083-8011-0307:400 +5588-6461-5550-9705:120 +``` + +# Create source connector +Upload `postgres_source.json` +Show data in topics. + +# Scale up Microservice deployment. Scale down webapp-mf1 and scale up webapp-mf2 +``` +kubectl scale deployment microservice-deployment -n app-modernisation --replicas=1 +kubectl scale deployment webapp-mf1-deployment -n app-modernisation --replicas=0 +kubectl scale deployment webapp-mf2-deployment -n app-modernisation --replicas=1 +``` + +# Add additional port forwarding for microservice +``` +kubectl port-forward svc/microservice-service 8001:8001 --namespace app-modernisation +``` + +# Add KSQL Streams +Start KSQL CLI: +`ksql --config-file /root/ksql.client.properties https://ksqldb:8088 -u peter -p peter-secret` +`SET 'auto.offset.reset' = 'earliest';` + +``` +CREATE STREAM postgres_bank_transactions WITH (KAFKA_TOPIC='postgres.bank.transactions', KEY_FORMAT ='AVRO', VALUE_FORMAT='AVRO'); +SELECT * FROM postgres_bank_transactions EMIT CHANGES; +CREATE TABLE balances WITH (kafka_topic='balances') AS + SELECT card_number, SUM(transaction_amount) AS balance + FROM postgres_bank_transactions + GROUP BY card_number +EMIT CHANGES; +SELECT * FROM balances EMIT CHANGES; +``` + +# Create some new transactions +``` +5188-6083-8011-0307:99 +5208-0272-3184-5035:140 +5208-0272-3184-5035:410 +5588-6461-5550-9705:118 +5588-6461-5550-9705:203 +``` + +# Part 3: setup + +# Create a new stream for the transactions received from the "microservice" +``` +CREATE STREAM express_bank_transactions ( + `key` VARCHAR KEY, + `transaction_id` VARCHAR, + `card_number` VARCHAR, + `transaction_amount` INTEGER, + `transaction_time` VARCHAR) +WITH (kafka_topic='express.bank.transactions', value_format='JSON'); +``` + +# Create a new stream for sinking back to Postgres +``` +CREATE STREAM jdbc_bank_transactions WITH (KAFKA_TOPIC='jdbc.bank.transactions', PARTITIONS=6, REPLICAS=3, VALUE_FORMAT='AVRO') AS + SELECT `key`, `transaction_id`,`card_number`, `transaction_amount`, `transaction_time` + FROM express_bank_transactions +EMIT CHANGES; +``` + +# Create a sink connector +Upload `postgres_sink.json` + +# Cutover to using only microservice +``` +kubectl scale deployment webapp-mf2-deployment -n app-modernisation --replicas=0 +kubectl scale deployment webapp-mf3-deployment -n app-modernisation --replicas=1 +kubectl scale deployment monolith-deployment -n app-modernisation --replicas=0 +``` + +# Check port-forwarding + +# Create some new transactions +``` +5188-6083-8011-0307:2500 +5208-0272-3184-5035:80 +5588-6461-5550-9705:60 +5588-6461-5550-9705:900 +5588-6461-5550-9705:45 +``` + +# Run queries in ksqlDB +``` +SELECT * FROM express_bank_transactions EMIT CHANGES; +SELECT * FROM jdbc_bank_transactions EMIT CHANGES; +``` + +# Show data in database + +# Demo Notification Service + +## Create the advanced streams + +- Copy ./streams/advanced-streams.ksql to the army knife and then run: + +`http --verify=false --auth=peter:peter-secret POST https://ksqldb:8088/ksql ksql=@advanced-streams.ksql --check-status` + +## Build and Push if there are any new changes to the app + +``` +cd apps/notification-service +mvn clean install -Pdocker +docker push lsdtrip/demo-notification-service +``` + +## Deploy + +`kc apply -f kube/demo-notification-service-deployment.yaml` + +## Clean-up + +Undeploy app: + +`kc delete -f kube/demo-notification-service-deployment.yaml` + +From ksql drop streams and tables: + +`ksql --config-file /root/ksql.client.properties https://ksqldb:8088 -u peter -p peter-secret` + +``` +drop stream jdbc_bank_transactions_enriched; +drop stream jdbc_bank_transactions_rekeyed; +drop table customers_accounts; +drop table accounts; +drop table customers; +drop stream accounts_stream; +drop stream customers_stream; +``` + +From army knife delete topics: + +``` +kafka-topics --delete --topic customers --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --command-config /root/etc/client.properties +kafka-topics --delete --topic accounts --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --command-config /root/etc/client.properties +kafka-topics --delete --topic customers_accounts --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --command-config /root/etc/client.properties +kafka-topics --delete --topic jdbc_bank_transactions_rekeyed --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --command-config /root/etc/client.properties +kafka-topics --delete --topic jdbc_bank_transactions_enriched --bootstrap-server $LSDMESP_BOOTSTRAP_SERVERS --command-config /root/etc/client.properties +``` + +``` +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/customers-key +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/customers-value +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/accounts-key +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/accounts-value +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/customers_accounts-key +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/customers_accounts-value +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/jdbc_bank_transactions_rekeyed-key +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/jdbc_bank_transactions_rekeyed-value +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/jdbc_bank_transactions_enriched-key +http --verify=false --auth=peter:peter-secret DELETE https://schemaregistry:8081/subjects/jdbc_bank_transactions_enriched-value +``` diff --git a/charts/demos/apps/notification-service/Dockerfile b/charts/demos/apps/notification-service/Dockerfile new file mode 100644 index 0000000..597f1e9 --- /dev/null +++ b/charts/demos/apps/notification-service/Dockerfile @@ -0,0 +1,25 @@ +FROM openjdk:11-jdk-slim + +# install required packages +RUN apt-get -yq update +RUN apt-get -yqq install wget +RUN apt-get -yqq install curl + +WORKDIR /opt +VOLUME /opt/etc + +# add app jar +ARG JAR_FILE +ADD target/${JAR_FILE} app.jar +ADD target/lib lib +ADD docker/container_entry.sh container_entry.sh + +# add user +RUN useradd -d /home/kafka -m kafka -s /bin/bash -c "Kafka User" +USER kafka + +# setup environment +ENV JAVA_OPTS="" +ENV ARGS="" + +ENTRYPOINT ["/opt/container_entry.sh"] diff --git a/charts/demos/apps/notification-service/docker/container_entry.sh b/charts/demos/apps/notification-service/docker/container_entry.sh new file mode 100755 index 0000000..88c9737 --- /dev/null +++ b/charts/demos/apps/notification-service/docker/container_entry.sh @@ -0,0 +1,4 @@ +#!/bin/sh +echo "Starting container with JAVA_OPTS: $JAVA_OPTS and ARGS: $ARGS" +exec java $JAVA_OPTS -jar app.jar $ARGS +echo "Exiting shell..." diff --git a/charts/demos/apps/notification-service/kube/demo-notification-service-deployment.yaml b/charts/demos/apps/notification-service/kube/demo-notification-service-deployment.yaml new file mode 100644 index 0000000..d74e15d --- /dev/null +++ b/charts/demos/apps/notification-service/kube/demo-notification-service-deployment.yaml @@ -0,0 +1,83 @@ +apiVersion: v1 +kind: Secret +metadata: + name: demo-notification-service-client-properties +stringData: + client.properties: |- + security.protocol=SASL_SSL + sasl.mechanism=PLAIN + sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="peter" password="peter-secret"; + ssl.truststore.location=/home/kafka/lsdmesp.truststore.jks + ssl.truststore.password=112233 + basic.auth.credentials.source=USER_INFO + basic.auth.user.info=peter:peter-secret + schema.registry.ssl.truststore.location=/home/kafka/lsdmesp.truststore.jks + schema.registry.ssl.truststore.password=112233 + mail.properties: |- + smtp.host=smtp.sendgrid.net + smtp.port=587 + smtp.username=apikey + smtp.password= + smtp.from.email=rainer@lsdopen.io + smtp.to.email=rainer@lsdopen.io +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: demo-notification-service + labels: + app: demo-notification-service +spec: + replicas: 1 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: demo-notification-service + template: + metadata: + labels: + app: demo-notification-service + spec: + containers: + - image: lsdtrip/demo-notification-service:latest + command: + - java + - -jar + - app.jar + - --bootstrap-servers + - kafka.lsdmesp-confluent.svc.cluster.local:9092 + - --schema-registry-url + - https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081 + - --source-topic + - jdbc_bank_transactions_enriched + - --command-config + - /opt/etc/client.properties + - --mail-config + - /opt/etc/mail.properties + imagePullPolicy: Always + name: demo-notification-service + lifecycle: + postStart: + exec: + command: [ 'sh', '-c', "keytool -keystore /home/kafka/lsdmesp.truststore.jks -alias CARoot -importcert -file /opt/certs/ca.pem -storepass 112233 -noprompt" ] + volumeMounts: + - mountPath: /opt/etc + name: demo-notification-service-client-properties + - mountPath: /opt/certs + name: ca-pair-sslcerts-tls-crt + volumes: + - secret: + defaultMode: 420 + secretName: demo-notification-service-client-properties + name: demo-notification-service-client-properties + - secret: + defaultMode: 420 + secretName: ca-pair-sslcerts + items: + - key: tls.crt + path: ca.pem + name: ca-pair-sslcerts-tls-crt diff --git a/charts/demos/apps/notification-service/pom.xml b/charts/demos/apps/notification-service/pom.xml new file mode 100644 index 0000000..844d072 --- /dev/null +++ b/charts/demos/apps/notification-service/pom.xml @@ -0,0 +1,173 @@ + + + 4.0.0 + + io.lsdopen.lsdmesp + demo-notification-service + 1.0.0-SNAPSHOT + jar + + Demo Notification Service + + + 11 + UTF-8 + UTF-8 + 7.4.0-ccs + + + + + confluent + Confluent + https://packages.confluent.io/maven/ + + + + + + confluent + https://packages.confluent.io/maven/ + + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + + io.confluent + kafka-avro-serializer + 7.4.0 + + + + org.eclipse.angus + angus-mail + 2.0.1 + + + + ch.qos.logback + logback-classic + 1.2.3 + + + + org.slf4j + slf4j-reload4j + 2.0.5 + + + + io.confluent + confluent-log4j + 1.2.17-cp11 + + + + com.beust + jcommander + 1.82 + + + + junit + junit + 4.13 + test + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + 11 + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.2.0 + + + + true + lib/ + io.lsdopen.lsdmesp.service.notification.ConsumerMain + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + ${project.build.directory}/lib + runtime + provided + + + + package + + copy-dependencies + + + + + + + + + + docker + + + false + + + + + + com.spotify + dockerfile-maven-plugin + 1.4.13 + + + default + + build + + + + + false + lsdtrip/${project.artifactId} + latest + + ${project.build.finalName}.jar + + + + + + + + + diff --git a/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/Args.java b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/Args.java new file mode 100644 index 0000000..44f3189 --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/Args.java @@ -0,0 +1,120 @@ +package io.lsdopen.lsdmesp.service.notification; + +import com.beust.jcommander.Parameter; + +public class Args { + @Parameter( + names = "--bootstrap-servers", + description = "List of Kafka brokers", + required = true + ) + private String bootstrapServers; + + @Parameter( + names = "--schema-registry-url", + description = "The URL of the schema registry", + required = true + ) + private String schemaRegistryUrl; + + @Parameter( + names = "--source-topic", + description = "The topic to consume data from", + required = true + ) + private String sourceTopic; + + @Parameter( + names = "--max-idle-millis", + description = "The maximum amount of time to wait for messages before quitting" + ) + private Long maxIdleTimeMillis; + + @Parameter( + names = "--command-config", + description = "A property file with additional (security) config" + ) + private String commandConfig; + + @Parameter( + names = "--mail-config", + description = "A property file with mail config", + required = true + ) + private String mailConfig; + + @Parameter( + names = "--help", + description = "Shows the description for all parameters", + help = true) + private boolean help; + + public String getBootstrapServers() { + return bootstrapServers; + } + + public void setBootstrapServers(String bootstrapServers) { + this.bootstrapServers = bootstrapServers; + } + + public String getSchemaRegistryUrl() { + return schemaRegistryUrl; + } + + public void setSchemaRegistryUrl(String schemaRegistryUrl) { + this.schemaRegistryUrl = schemaRegistryUrl; + } + + public String getSourceTopic() { + return sourceTopic; + } + + public void setSourceTopic(String sourceTopic) { + this.sourceTopic = sourceTopic; + } + + public Long getMaxIdleTimeMillis() { + return maxIdleTimeMillis; + } + + public void setMaxIdleTimeMillis(Long maxIdleTimeMillis) { + this.maxIdleTimeMillis = maxIdleTimeMillis; + } + + public String getCommandConfig() { + return commandConfig; + } + + public void setCommandConfig(String commandConfig) { + this.commandConfig = commandConfig; + } + + public String getMailConfig() { + return mailConfig; + } + + public void setMailConfig(String mailConfig) { + this.mailConfig = mailConfig; + } + + public boolean isHelp() { + return help; + } + + public void setHelp(boolean help) { + this.help = help; + } + + @Override + public String toString() { + return "Args{" + + "bootstrapServers='" + bootstrapServers + '\'' + + ", schemaRegistryUrl='" + schemaRegistryUrl + '\'' + + ", sourceTopic='" + sourceTopic + '\'' + + ", maxIdleTimeMillis=" + maxIdleTimeMillis + + ", commandConfig='" + commandConfig + '\'' + + ", mailConfig='" + mailConfig + '\'' + + ", help=" + help + + '}'; + } +} diff --git a/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConfigUtility.java b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConfigUtility.java new file mode 100644 index 0000000..172f7c6 --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConfigUtility.java @@ -0,0 +1,30 @@ +package io.lsdopen.lsdmesp.service.notification; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Properties; + +public final class ConfigUtility { + + private ConfigUtility() { + } + + public static Properties loadProperties(String config) throws IOException { + if (config == null) { + return new Properties(); + } + + if (!Files.exists(Paths.get(config))) { + throw new IOException(config + " not found."); + } + + Properties properties = new Properties(); + try (InputStream is = new FileInputStream(config)) { + properties.load(is); + } + return properties; + } +} diff --git a/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConsumerMain.java b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConsumerMain.java new file mode 100644 index 0000000..49d7b60 --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/ConsumerMain.java @@ -0,0 +1,189 @@ +package io.lsdopen.lsdmesp.service.notification; + +import com.beust.jcommander.JCommander; +import io.confluent.kafka.serializers.KafkaAvroDeserializer; +import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.clients.consumer.*; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.util.Utf8; + +import java.io.IOException; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + +public class ConsumerMain { + private static final Logger log = LoggerFactory.getLogger(ConsumerMain.class); + + public static void main(final String[] args) { + // + // Parse input args + // + Args inputArgs = new Args(); + JCommander jc = JCommander.newBuilder() + .addObject(inputArgs) + .build(); + try { + jc.parse(args); + + if (inputArgs.isHelp()) { + jc.usage(); + System.exit(0); + } + } catch (Exception ex) { + System.err.println(ex.getLocalizedMessage()); + jc.usage(); + System.exit(1); + } + + String bootstrapServers = inputArgs.getBootstrapServers(); + String schemaRegistryUrl = inputArgs.getSchemaRegistryUrl(); + String sourceTopic = inputArgs.getSourceTopic(); + Long maxIdleTimeMillis = inputArgs.getMaxIdleTimeMillis(); + String commandConfig = inputArgs.getCommandConfig(); + String mailConfig = inputArgs.getMailConfig(); + + log.info("Consuming events with input arguments: '{}'", inputArgs); + + if (maxIdleTimeMillis == null) { + maxIdleTimeMillis = Long.MAX_VALUE; + } + + // Init shutdown hook + final AtomicBoolean runConsumerLoop = new AtomicBoolean(true); + final CountDownLatch shutdownLatch = new CountDownLatch(1); + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + log.info("Stopping consumer loop ..."); + try { + runConsumerLoop.set(false); + shutdownLatch.await(); + } catch (InterruptedException ex) { + log.error("Wait interrupted", ex); + } + log.info("Exiting shutdown hook!"); + })); + + try { + consumeEvents(bootstrapServers, schemaRegistryUrl, sourceTopic, + maxIdleTimeMillis, commandConfig, mailConfig, runConsumerLoop); + } catch (Exception ex) { + log.error("Failed to consume events", ex); + } finally { + // Allow shutdown hook to complete + shutdownLatch.countDown(); + } + + log.info("Exiting main!"); + } + + private static void consumeEvents( + String bootstrapServers, + String schemaRegistryUrl, + String sourceTopic, + Long maxIdleTimeMillis, + String commandConfig, + String mailConfig, + AtomicBoolean runConsumerLoop) throws IOException { + + final Properties consumerProps = getDefaultProps(bootstrapServers, commandConfig); + consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "demo-notification-service-consumer"); + consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + consumerProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "1000"); + consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); + consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); + consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class); + consumerProps.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, "false"); + consumerProps.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); + + MailUtility mailUtility = new MailUtility(mailConfig); + + try (Consumer consumer = new KafkaConsumer<>(consumerProps)) { + // + // Continue from committed offsets + // + consumer.subscribe(Collections.singletonList(sourceTopic)); + + log.info("Starting consumer loop"); + int counter = 0; + Long firstIdleTimeMillis = null; + while (runConsumerLoop.get()) { + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + + if (!records.isEmpty()) { + if (firstIdleTimeMillis != null) { + long diffMillis = System.currentTimeMillis() - firstIdleTimeMillis; + log.trace("Received '{}' records after being idle for '{}' millis", records.count(), diffMillis); + firstIdleTimeMillis = null; + } + + // Process all records + for (ConsumerRecord record : records) { + log.debug("Processing record with key '{}', timestamp '{}' and value '{}'", record.key(), DateUtility.toStringDateTimeSAST(new Date(record.timestamp())), record.value()); + + try { + Integer trxAmount = (Integer) record.value().get("TRANSACTION_AMOUNT"); + Utf8 trxId = (Utf8) record.value().get("TRANSACTION_ID"); + Utf8 trxTime = (Utf8) record.value().get("TRANSACTION_TIME"); + Utf8 trxFullName = (Utf8) record.value().get("FULL_NAME"); + Utf8 trxEmail = (Utf8) record.value().get("EMAIL_ADDRESS"); + + if (trxFullName == null || trxEmail == null) { + log.warn("Skipping record with missing client details"); + } else { + log.info("Sending email with trxAmount [" + trxAmount + + "] trxTime [" + trxTime + + "] trxFullName [" + trxFullName + + "] trxEmail [" + trxEmail + "]"); + + String msg = "New transaction for " + trxFullName + " (" + trxEmail + + ")

Amount: " + trxAmount + + "
Email: " + trxEmail + + "
Received: " + trxTime + + "
TransactionId: " + trxId; + + String subject = "New transaction for " + trxFullName + " and amount " + trxAmount; + + mailUtility.sendMail(msg, subject, null); + } + } catch (Exception ex) { + log.error("Failed to process record with key '{}'", record.key(), ex); + } + + counter++; + } + + // Commit after handling records + consumer.commitAsync((offsets, exception) -> log.trace("Committed offsets")); + } else { + if (firstIdleTimeMillis == null) { + firstIdleTimeMillis = System.currentTimeMillis(); + log.trace("Recording first idle time poll millis '{}'", firstIdleTimeMillis); + } else { + long diffMillis = System.currentTimeMillis() - firstIdleTimeMillis; + if (diffMillis > maxIdleTimeMillis) { + log.debug("Exiting poll loop after '{}' millis", diffMillis); + runConsumerLoop.set(false); + break; + } + } + } + } + log.info("Stopped consumer loop with counter '{}'", counter); + } finally { + log.info("Closed consumer"); + } + } + + private static Properties getDefaultProps(String bootstrapServers, String commandConfig) throws IOException { + Properties properties = ConfigUtility.loadProperties(commandConfig); + properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + return properties; + } +} diff --git a/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/DateUtility.java b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/DateUtility.java new file mode 100644 index 0000000..9974038 --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/DateUtility.java @@ -0,0 +1,39 @@ +package io.lsdopen.lsdmesp.service.notification; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; + +public class DateUtility { + private static final String TIME_ZONE_ID_SAST = "Africa/Johannesburg"; + private static final String DEFAULT_DATE_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; + + public static String getDateTimePattern() { + return DEFAULT_DATE_TIME_PATTERN; + } + + public static SimpleDateFormat getDateTimeFormatter() { + return new SimpleDateFormat(getDateTimePattern()); + } + + public static TimeZone getSASTTimeZone() { + return TimeZone.getTimeZone(TIME_ZONE_ID_SAST); + } + + public static String toStringDateTimeSAST(Date d) { + SimpleDateFormat sdf = getDateTimeFormatter(); + sdf.setTimeZone(getSASTTimeZone()); + return sdf.format(d); + } + + public static Date toDateTimeSAST(String s) { + try { + SimpleDateFormat sdf = getDateTimeFormatter(); + sdf.setTimeZone(getSASTTimeZone()); + return sdf.parse(s); + } catch (ParseException ex) { + throw new IllegalArgumentException("Failed to parse date string:" + s, ex); + } + } +} diff --git a/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/MailUtility.java b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/MailUtility.java new file mode 100644 index 0000000..a640872 --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/java/io/lsdopen/lsdmesp/service/notification/MailUtility.java @@ -0,0 +1,73 @@ +package io.lsdopen.lsdmesp.service.notification; + +import jakarta.mail.*; +import jakarta.mail.internet.InternetAddress; +import jakarta.mail.internet.MimeBodyPart; +import jakarta.mail.internet.MimeMessage; +import jakarta.mail.internet.MimeMultipart; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; + +public class MailUtility { + + private static final Logger log = LoggerFactory.getLogger(MailUtility.class); + + private final String smtpHost; + private final String smtpPort; + private final String smtpUsername; + private final String smtpPassword; + private final String smtpFromEmail; + private final String smtpToEmail; + + public MailUtility(String mailConfig) throws IOException { + Properties properties = ConfigUtility.loadProperties(mailConfig); + smtpHost = properties.getProperty("smtp.host"); + smtpPort = properties.getProperty("smtp.port"); + smtpUsername = properties.getProperty("smtp.username"); + smtpPassword = properties.getProperty("smtp.password"); + smtpFromEmail = properties.getProperty("smtp.from.email"); + smtpToEmail = properties.getProperty("smtp.to.email"); + } + + public void sendMail(String msg, String subject, String toEmail) throws MessagingException { + if (toEmail == null) { + log.debug("Overriding empty toEmail with value from config: " + smtpToEmail); + toEmail = smtpToEmail; + } + + Properties prop = new Properties(); + prop.put("mail.smtp.auth", true); + prop.put("mail.smtp.starttls.enable", "true"); + prop.put("mail.smtp.host", smtpHost); + prop.put("mail.smtp.port", smtpPort); + prop.put("mail.smtp.ssl.trust", smtpHost); + prop.put("mail.smtp.connectiontimeout", "20000"); + prop.put("mail.smtp.timeout", "20000"); + + Session session = Session.getInstance(prop, new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(smtpUsername, smtpPassword); + } + } + ); + + MimeBodyPart mimeBodyPart = new MimeBodyPart(); + mimeBodyPart.setContent(msg, "text/html; charset=utf-8"); + Multipart multipart = new MimeMultipart(); + multipart.addBodyPart(mimeBodyPart); + + Message message = new MimeMessage(session); + message.setFrom(new InternetAddress(smtpFromEmail)); + message.setRecipients(Message.RecipientType.TO, InternetAddress.parse(toEmail)); + message.setSubject(subject); + message.setContent(multipart); + + Transport.send(message); + + log.info("Successfully sent mail with subject {} to {}", subject, toEmail); + } +} diff --git a/charts/demos/apps/notification-service/src/main/resources/logback.xml b/charts/demos/apps/notification-service/src/main/resources/logback.xml new file mode 100644 index 0000000..1f74def --- /dev/null +++ b/charts/demos/apps/notification-service/src/main/resources/logback.xml @@ -0,0 +1,21 @@ + + + + + + + + %d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n + + + + + + + + + + + + + diff --git a/charts/demos/apps/notification-service/src/test/java/io/lsdopen/lsdmesp/service/notification/ArgsTest.java b/charts/demos/apps/notification-service/src/test/java/io/lsdopen/lsdmesp/service/notification/ArgsTest.java new file mode 100644 index 0000000..f0eff89 --- /dev/null +++ b/charts/demos/apps/notification-service/src/test/java/io/lsdopen/lsdmesp/service/notification/ArgsTest.java @@ -0,0 +1,96 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package io.lsdopen.lsdmesp.service.notification; + +import com.beust.jcommander.JCommander; +import org.junit.*; + +import static org.junit.Assert.*; + +/** + * ArgsTest + * + * @author rschamm + */ +public class ArgsTest { + public ArgsTest() { + } + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() { + } + + @After + public void tearDown() { + } + + @Test + public void testExampleArgs() { + System.out.println("testExampleArgs"); + + Args args = new Args(); + String[] argv = { + "--bootstrap-servers", "localhost:9092", + "--schema-registry-url", "http://localhost:8081", + "--source-topic", "fun-data", + "--mail-config", "mail-config", + "--max-idle-millis", "10000", + }; + JCommander.newBuilder() + .addObject(args) + .build() + .parse(argv); + + assertEquals("localhost:9092", args.getBootstrapServers()); + assertEquals("http://localhost:8081", args.getSchemaRegistryUrl()); + assertEquals("fun-data", args.getSourceTopic()); + assertEquals(10000L, (long) args.getMaxIdleTimeMillis()); + } + + @Test + public void testExampleArgsOnlyRequired() { + System.out.println("testExampleArgsOnlyRequired"); + + Args args = new Args(); + String[] argv = { + "--bootstrap-servers", "localhost:9092", + "--schema-registry-url", "http://localhost:8081", + "--source-topic", "fun-data", + "--mail-config", "mail-config" + }; + JCommander.newBuilder() + .addObject(args) + .build() + .parse(argv); + + assertEquals("localhost:9092", args.getBootstrapServers()); + assertEquals("http://localhost:8081", args.getSchemaRegistryUrl()); + assertEquals("fun-data", args.getSourceTopic()); + assertNull(args.getMaxIdleTimeMillis()); + } + + @Test(expected = com.beust.jcommander.ParameterException.class) + public void testExampleArgsMissingParam() { + System.out.println("testExampleArgsMissingParam"); + + Args args = new Args(); + String[] argv = { + "--bootstrap-servers", "localhost:9092" + }; + JCommander.newBuilder() + .addObject(args) + .build() + .parse(argv); + } +} diff --git a/charts/demos/bin/deploy_appmod.sh b/charts/demos/bin/deploy_appmod.sh new file mode 100755 index 0000000..906a1a6 --- /dev/null +++ b/charts/demos/bin/deploy_appmod.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Create App Modernisation namespace +kubectl create namespace app-modernisation + +# Add Confluent CA certificate secret to app-modernisation namespace +CONFLUENT_TLS_CRT=$(kubectl -n lsdmesp-confluent get secret ca-pair-sslcerts -ojsonpath="{.data['tls\.crt']}") +kubectl create secret generic confluent-ca --namespace app-modernisation --from-literal=ca.crt=placeholder +kubectl patch secret confluent-ca --namespace app-modernisation -p "{\"data\":{\"ca.crt\":\"${CONFLUENT_TLS_CRT}\"}}" + +# Create secret for microservice +kubectl create secret generic kafka-secrets --namespace app-modernisation \ +--from-literal=BOOTSTRAP_SERVERS="kafka.lsdmesp-confluent.svc.cluster.local:9092" \ +--from-literal=CLIENT_KEY="peter" \ +--from-literal=CLIENT_SECRET="peter-secret" \ +--from-literal=SASL_JAAS_CONFIG="org.apache.kafka.common.security.plain.PlainLoginModule required username='peter' password='peter-secret'" \ +--from-literal=SCHEMA_REGISTRY_API_KEY="peter" \ +--from-literal=SCHEMA_REGISTRY_API_SECRET="peter-secret" \ +--from-literal=SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO="peter:peter-secret" \ +--from-literal=SCHEMA_REGISTRY_URL="https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081" \ +--from-literal=KSQLDB_API_KEY="peter" \ +--from-literal=KSQLDB_API_SECRET="peter-secret" \ +--from-literal=KSQLDB_APP_ENDPOINT="https://ksqldb.lsdmesp-confluent.svc.cluster.local:8088" + +# Deploy Monolith app, Webapp and Ingress for Webapp +kubectl apply -f ../templates/020-monolith-deployment.yaml -n app-modernisation +kubectl apply -f ../templates/021-monolith-service.yaml -n app-modernisation +kubectl apply -f ../templates/030-webapp-deployment-mf1.yaml -n app-modernisation +kubectl apply -f ../templates/031-webapp-service.yaml -n app-modernisation +kubectl apply -f ../templates/032-webapp-ingress.yaml -n app-modernisation + +# Create scaled down Microservice app and service, Webapp 2 and Webapp 3 +kubectl apply -f ../templates/040-microservice-deployment.yaml -n app-modernisation +kubectl apply -f ../templates/041-microservice-service.yaml -n app-modernisation +kubectl apply -f ../templates/050-webapp-deployment-mf2.yaml -n app-modernisation +kubectl apply -f ../templates/060-webapp-deployment-mf3.yaml -n app-modernisation diff --git a/charts/demos/bin/deploy_postgres.sh b/charts/demos/bin/deploy_postgres.sh new file mode 100755 index 0000000..b95fd23 --- /dev/null +++ b/charts/demos/bin/deploy_postgres.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +kubectl create ns postgres +kubectl apply -f ../templates/005-postgres.yaml -n postgres diff --git a/charts/demos/connectors/postgres_sink.json b/charts/demos/connectors/postgres_sink.json new file mode 100644 index 0000000..d9f20ec --- /dev/null +++ b/charts/demos/connectors/postgres_sink.json @@ -0,0 +1,24 @@ +{ + "name": "PostgresSinkAppMod", + "config": { + "topics": "jdbc.bank.transactions", + "connector.class":"io.confluent.connect.jdbc.JdbcSinkConnector", + "connection.url": "jdbc:postgresql://postgres.postgres.svc.cluster.local:5432/postgres?user=postgres&password=app-mod-c0nflu3nt!", + "value.converter.basic.auth.credentials.source": "USER_INFO", + "value.converter.schema.registry.url": "https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081", + "value.converter.schema.registry.basic.auth.user.info": "peter:peter-secret", + "value.converter.schema.registry.ssl.truststore.location": "/mnt/sslcerts/truststore.jks", + "value.converter.schema.registry.ssl.truststore.password": "${file:/mnt/sslcerts/jksPassword.txt:jksPassword}", + "key.converter": "org.apache.kafka.connect.storage.StringConverter", + "key.converter.schemas.enable": "false", + "value.converter": "io.confluent.connect.avro.AvroConverter", + "value.converter.schemas.enable": "true", + "insert.mode": "INSERT", + "table.name.format": "bank.transactions", + "pk.fields": "transaction_id", + "pk.mode": "record_value", + "auto.create": "true", + "auto.evolve": "true", + "tasks.max":"1" + } +} \ No newline at end of file diff --git a/charts/demos/connectors/postgres_source.json b/charts/demos/connectors/postgres_source.json new file mode 100644 index 0000000..36afb49 --- /dev/null +++ b/charts/demos/connectors/postgres_source.json @@ -0,0 +1,33 @@ +{ + "name": "PostgresCDCMonolith", + "config": { + "connector.class": "io.debezium.connector.postgresql.PostgresConnector", + "tasks.max": "1", + "plugin.name": "pgoutput", + "topic.prefix": "postgres", + "database.hostname": "postgres.postgres.svc.cluster.local", + "database.port": "5432", + "database.user": "postgres", + "database.password": "app-mod-c0nflu3nt!", + "database.dbname" : "postgres", + "database.server.name": "postgres", + "key.converter.basic.auth.credentials.source": "USER_INFO", + "key.converter.schema.registry.url": "https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081", + "key.converter.schema.registry.basic.auth.user.info": "peter:peter-secret", + "key.converter.schema.registry.ssl.truststore.location": "/mnt/sslcerts/truststore.jks", + "key.converter.schema.registry.ssl.truststore.password": "${file:/mnt/sslcerts/jksPassword.txt:jksPassword}", + "value.converter.basic.auth.credentials.source": "USER_INFO", + "value.converter.schema.registry.url": "https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081", + "value.converter.schema.registry.basic.auth.user.info": "peter:peter-secret", + "value.converter.schema.registry.ssl.truststore.location": "/mnt/sslcerts/truststore.jks", + "value.converter.schema.registry.ssl.truststore.password": "${file:/mnt/sslcerts/jksPassword.txt:jksPassword}", + "schema.registry.url": "https://schemaregistry.lsdmesp-confluent.svc.cluster.local:8081", + "table.include.list": "bank.customers, bank.accounts, bank.transactions", + "output.data.format": "AVRO", + "key.converter": "io.confluent.connect.avro.AvroConverter", + "value.converter": "io.confluent.connect.avro.AvroConverter", + "after.state.only": "true", + "transforms": "unwrap", + "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState" + } +} \ No newline at end of file diff --git a/charts/demos/streams/advanced-streams.ksql b/charts/demos/streams/advanced-streams.ksql new file mode 100644 index 0000000..e6fb33e --- /dev/null +++ b/charts/demos/streams/advanced-streams.ksql @@ -0,0 +1,52 @@ +SET 'auto.offset.reset' = 'earliest'; + +CREATE STREAM customers_stream WITH (KAFKA_TOPIC = 'postgres.bank.customers',VALUE_FORMAT = 'AVRO', KEY_FORMAT ='AVRO'); +CREATE STREAM accounts_stream WITH (KAFKA_TOPIC = 'postgres.bank.accounts',VALUE_FORMAT = 'AVRO', KEY_FORMAT ='AVRO'); + +CREATE TABLE customers WITH (kafka_topic='customers', FORMAT='AVRO') AS + SELECT customer_id AS customer_id, + LATEST_BY_OFFSET(first_name) AS first_name, + LATEST_BY_OFFSET(last_name) AS last_name, + LATEST_BY_OFFSET(phone_number) AS phone_number, + LATEST_BY_OFFSET(email_address) AS email_address + FROM customers_stream + GROUP BY customer_id; + +CREATE TABLE accounts WITH (kafka_topic='accounts', FORMAT='AVRO') AS + SELECT card_number AS card_number, + LATEST_BY_OFFSET(account_id) AS account_id, + LATEST_BY_OFFSET(customer_id) AS customer_id + FROM accounts_stream + GROUP BY card_number; + +CREATE TABLE customers_accounts WITH (kafka_topic='customers_accounts',FORMAT='AVRO') AS +SELECT + C.CUSTOMER_ID AS CUSTOMER_ID, + C.FIRST_NAME + ' ' + C.LAST_NAME AS FULL_NAME, + C.PHONE_NUMBER, + C.EMAIL_ADDRESS, + A.ACCOUNT_ID, + A.CARD_NUMBER +FROM accounts A +INNER JOIN customers C +ON A.CUSTOMER_ID = C.CUSTOMER_ID; + +CREATE STREAM jdbc_bank_transactions_rekeyed +WITH (KAFKA_TOPIC='jdbc_bank_transactions_rekeyed', KEY_FORMAT='AVRO', VALUE_FORMAT='AVRO') AS + SELECT `card_number` as card_number, `transaction_id` as transaction_id, `transaction_amount` as transaction_amount, `transaction_time` as transaction_time + FROM jdbc_bank_transactions + PARTITION BY `card_number` +EMIT CHANGES; + +CREATE STREAM jdbc_bank_transactions_enriched WITH (KAFKA_TOPIC = 'jdbc_bank_transactions_enriched', KEY_FORMAT = 'AVRO', VALUE_FORMAT='AVRO') AS + SELECT + T.CARD_NUMBER AS CARD_NUMBER, + T.TRANSACTION_ID, + T.TRANSACTION_AMOUNT, + T.TRANSACTION_TIME, + C.FULL_NAME, + C.PHONE_NUMBER, + C.EMAIL_ADDRESS + FROM jdbc_bank_transactions_rekeyed T + LEFT JOIN customers_accounts C + ON C.CARD_NUMBER = T.CARD_NUMBER; diff --git a/charts/demos/templates/005-postgres.yaml b/charts/demos/templates/005-postgres.yaml new file mode 100644 index 0000000..78678aa --- /dev/null +++ b/charts/demos/templates/005-postgres.yaml @@ -0,0 +1,68 @@ +--- +apiVersion: v1 +kind: Secret +metadata: + name: postgres-env +type: Opaque +stringData: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: app-mod-c0nflu3nt! +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: postgres-pv-claim +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: postgres + name: postgresql +spec: + replicas: 1 + strategy: + type: Recreate + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + spec: + containers: + - envFrom: + - secretRef: + name: postgres-env + image: docker.io/mkananizadeh/app-mod-postgres:latest + name: postgresql + ports: + - containerPort: 5432 + name: postgresql + volumeMounts: + - mountPath: /var/lib/postgresql + name: postgres-data + volumes: + - name: postgres-data + persistentVolumeClaim: + claimName: postgres-pv-claim +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + labels: + app: postgres +spec: + selector: + app: postgres + ports: + - port: 5432 + clusterIP: None diff --git a/charts/demos/templates/010-cf-additional-cfrb.yaml b/charts/demos/templates/010-cf-additional-cfrb.yaml new file mode 100644 index 0000000..a293bf3 --- /dev/null +++ b/charts/demos/templates/010-cf-additional-cfrb.yaml @@ -0,0 +1,40 @@ +apiVersion: platform.confluent.io/v1beta1 +kind: ConfluentRolebinding +metadata: + name: lsdmesp-demo-0 + namespace: lsdmesp-confluent +spec: + principal: + name: TeamBlueAdmin + type: group + role: SystemAdmin +--- +apiVersion: platform.confluent.io/v1beta1 +kind: ConfluentRolebinding +metadata: + name: lsdmesp-demo-1 + namespace: lsdmesp-confluent +spec: + clustersScopeByIds: + connectClusterId: lsdmesp-confluent.connect + principal: + name: TeamBlueAdmin + type: group + role: SystemAdmin +--- +apiVersion: platform.confluent.io/v1beta1 +kind: ConfluentRolebinding +metadata: + name: lsdmesp-demo-2 + namespace: lsdmesp-confluent +spec: + clustersScopeByIds: + schemaRegistryClusterId: id_schemaregistry_lsdmesp-confluent + principal: + name: TeamBlueAdmin + type: group + resourcePatterns: + - name: "*" + patternType: LITERAL + resourceType: Subject + role: ResourceOwner diff --git a/charts/demos/templates/020-monolith-deployment.yaml b/charts/demos/templates/020-monolith-deployment.yaml new file mode 100644 index 0000000..2023abc --- /dev/null +++ b/charts/demos/templates/020-monolith-deployment.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: monolith-deployment + labels: + app: monolith +spec: + replicas: 1 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: monolith + template: + metadata: + labels: + app: monolith + spec: + containers: + - name: monolith + image: mkananizadeh/appmod-monolith + env: + - name: PGUSER + value: "postgres" + - name: PGHOST + value: "postgres.postgres.svc.cluster.local" + - name: PGPASSWORD + value: "app-mod-c0nflu3nt!" + - name: PGDATABASE + value: "postgres" + - name: PGPORT + value: "5432" + ports: + - containerPort: 8000 + protocol: TCP +--- diff --git a/charts/demos/templates/021-monolith-service.yaml b/charts/demos/templates/021-monolith-service.yaml new file mode 100644 index 0000000..b2ff2dd --- /dev/null +++ b/charts/demos/templates/021-monolith-service.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Service +metadata: + name: monolith-service + labels: + app: monolith +spec: + selector: + app: monolith + ports: + - name: monolith-port + port: 8000 + targetPort: 8000 + protocol: TCP + type: LoadBalancer +--- \ No newline at end of file diff --git a/charts/demos/templates/030-webapp-deployment-mf1.yaml b/charts/demos/templates/030-webapp-deployment-mf1.yaml new file mode 100644 index 0000000..6e270a7 --- /dev/null +++ b/charts/demos/templates/030-webapp-deployment-mf1.yaml @@ -0,0 +1,29 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: webapp-mf1-deployment + labels: + app: webapp +spec: + replicas: 1 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: webapp + template: + metadata: + labels: + app: webapp + spec: + containers: + - name: webapp-mf1 + image: mkananizadeh/appmod-webapp-mf1 + ports: + - containerPort: 80 + protocol: TCP +--- diff --git a/charts/demos/templates/031-webapp-service.yaml b/charts/demos/templates/031-webapp-service.yaml new file mode 100644 index 0000000..03190bc --- /dev/null +++ b/charts/demos/templates/031-webapp-service.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Service +metadata: + name: webapp-service + labels: + app: webapp +spec: + selector: + app: webapp + ports: + - name: webapp-port + port: 80 + targetPort: 80 + protocol: TCP + type: LoadBalancer +--- diff --git a/charts/demos/templates/032-webapp-ingress.yaml b/charts/demos/templates/032-webapp-ingress.yaml new file mode 100644 index 0000000..98c26b0 --- /dev/null +++ b/charts/demos/templates/032-webapp-ingress.yaml @@ -0,0 +1,21 @@ +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + labels: + app: webapp + name: appmod-webapp +spec: + ingressClassName: nginx + rules: + - host: webapp.apps.mesp.lsdopen.io + http: + paths: + - backend: + service: + name: webapp-service + port: + number: 80 + path: / + pathType: Prefix +--- diff --git a/charts/demos/templates/040-microservice-deployment.yaml b/charts/demos/templates/040-microservice-deployment.yaml new file mode 100644 index 0000000..2e62a94 --- /dev/null +++ b/charts/demos/templates/040-microservice-deployment.yaml @@ -0,0 +1,76 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: microservice-deployment + labels: + app: microservice +spec: + replicas: 0 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: microservice + template: + metadata: + labels: + app: microservice + spec: + containers: + - name: microservice + # image: mkananizadeh/appmod-microservice + image: lsdtrip/appmod-microservice:0.1.0 + env: + - name: BOOTSTRAP_SERVERS + valueFrom: + secretKeyRef: + name: kafka-secrets + key: BOOTSTRAP_SERVERS + - name: SASL_USERNAME + valueFrom: + secretKeyRef: + name: kafka-secrets + key: CLIENT_KEY + - name: SASL_PASSWORD + valueFrom: + secretKeyRef: + name: kafka-secrets + key: CLIENT_SECRET + - name: SASL_MECHANISM + value: "PLAIN" + - name: SECURITY_PROTOCOL + value: "SASL_SSL" + - name: KSQLDB_API_KEY + valueFrom: + secretKeyRef: + name: kafka-secrets + key: KSQLDB_API_KEY + - name: KSQLDB_API_SECRET + valueFrom: + secretKeyRef: + name: kafka-secrets + key: KSQLDB_API_SECRET + - name: KSQLDB_APP_ENDPOINT + valueFrom: + secretKeyRef: + name: kafka-secrets + key: KSQLDB_APP_ENDPOINT + - name: SSL_CA_LOCATION + value: "/opt/microservice/certs/ca.crt" + - name: NODE_EXTRA_CA_CERTS + value: "/opt/microservice/certs/ca.crt" + ports: + - containerPort: 8001 + protocol: TCP + volumeMounts: + - mountPath: "/opt/microservice/certs" + name: confluent-ca + readOnly: true + volumes: + - name: confluent-ca + secret: + secretName: confluent-ca diff --git a/charts/demos/templates/041-microservice-service.yaml b/charts/demos/templates/041-microservice-service.yaml new file mode 100644 index 0000000..69c24e9 --- /dev/null +++ b/charts/demos/templates/041-microservice-service.yaml @@ -0,0 +1,17 @@ +--- +apiVersion: v1 +kind: Service +metadata: + name: microservice-service + labels: + app: microservice +spec: + selector: + app: microservice + ports: + - name: microservice-port + port: 8001 + targetPort: 8001 + protocol: TCP + type: LoadBalancer +--- \ No newline at end of file diff --git a/charts/demos/templates/050-webapp-deployment-mf2.yaml b/charts/demos/templates/050-webapp-deployment-mf2.yaml new file mode 100644 index 0000000..53b8fc0 --- /dev/null +++ b/charts/demos/templates/050-webapp-deployment-mf2.yaml @@ -0,0 +1,30 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: webapp-mf2-deployment + labels: + app: webapp +spec: + replicas: 0 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: webapp + template: + metadata: + labels: + app: webapp + spec: + containers: + - name: webapp-mf2 + image: mkananizadeh/appmod-webapp-mf2 + ports: + - containerPort: 80 + protocol: TCP +--- + diff --git a/charts/demos/templates/060-webapp-deployment-mf3.yaml b/charts/demos/templates/060-webapp-deployment-mf3.yaml new file mode 100644 index 0000000..0e72845 --- /dev/null +++ b/charts/demos/templates/060-webapp-deployment-mf3.yaml @@ -0,0 +1,29 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: webapp-mf3-deployment + labels: + app: webapp +spec: + replicas: 0 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: RollingUpdate + selector: + matchLabels: + app: webapp + template: + metadata: + labels: + app: webapp + spec: + containers: + - name: webapp-mf3 + image: mkananizadeh/appmod-webapp-mf3 + ports: + - containerPort: 80 + protocol: TCP +---