Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: logcollection to OpenSearch in non-debug clusters #2080

Merged
merged 24 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/actions/constellation_create/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ inputs:
azureSNPEnforcementPolicy:
required: false
description: "Azure SNP enforcement policy."
test:
description: "The e2e test payload."
required: true

outputs:
kubeconfig:
Expand Down Expand Up @@ -131,6 +134,9 @@ runs:
--info logcollect.github.ref-name="${{ github.ref_name }}" \
--info logcollect.github.sha="${{ github.sha }}" \
--info logcollect.github.runner-os="${{ runner.os }}" \
--info logcollect.github.e2e-test-payload="${{ inputs.test }}" \
--info logcollect.github.is-debug-cluster=false \
--info logcollect.deployment-type="debugd" \
--verbosity=-1 \
--force
echo "::endgroup::"
Expand Down
73 changes: 73 additions & 0 deletions .github/actions/deploy_logcollection/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: Log Collection Deployment
description: Deploy log collection functionality to the cluster.

inputs:
logstash-port:
description: "The port of the logstash service."
default: "5045"
kubeconfig:
description: "The kubeconfig of the cluster to deploy to."
required: true
opensearchUser:
description: "The username of the opensearch cluster."
required: true
opensearchPwd:
description: "The password of the opensearch cluster."
required: true
test:
description: "The e2e test payload."
required: true
provider:
description: "The CSP of the cluster."
required: true
isDebugImage:
description: "Whether the cluster is a debug cluster / uses a debug image."
required: true

runs:
using: "composite"
steps:
- name: Template Logcollection Helm Values
id: template
shell: bash
run: |
bazel run //hack/logcollector template -- \
--dir $(realpath .) \
--username ${{ inputs.opensearchUser }} \
--password ${{ inputs.opensearchPwd }} \
--port ${{ inputs.logstash-port }} \
--fields github.actor="${{ github.triggering_actor }}" \
--fields github.workflow="${{ github.workflow }}" \
--fields github.run-id="${{ github.run_id }}" \
--fields github.run-attempt="${{ github.run_attempt }}" \
--fields github.ref-name="${{ github.ref_name }}" \
--fields github.sha="${{ github.sha }}" \
--fields github.runner-os="${{ runner.os }}" \
--fields github.e2e-test-payload="${{ inputs.test }}" \
--fields github.isDebugImage="${{ inputs.isDebugImage }}" \
--fields github.e2e-test-provider="${{ inputs.provider }}" \
--fields deployment-type="k8s"

- name: Deploy Logstash
id: deploy-logstash
shell: bash
working-directory: ./logstash
env:
KUBECONFIG: ${{ inputs.kubeconfig }}
run: |
helm repo add elastic https://helm.elastic.co
helm repo update
helm install logstash elastic/logstash \
--wait --timeout=1200s --values values.yml

- name: Deploy Filebeat
id: deploy-filebeat
shell: bash
working-directory: ./filebeat
env:
KUBECONFIG: ${{ inputs.kubeconfig }}
run: |
helm repo add elastic https://helm.elastic.co
helm repo update
helm install filebeat elastic/filebeat \
--wait --timeout=1200s --values values.yml
13 changes: 13 additions & 0 deletions .github/actions/e2e_test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,19 @@ runs:
cliVersion: ${{ inputs.cliVersion }}
azureSNPEnforcementPolicy: ${{ inputs.azureSNPEnforcementPolicy }}

- name: Deploy logcollection
id: deploy-logcollection
# TODO(msanft):temporarily deploy in debug clusters too to resolve "missing logs"-bug
# see https://dev.azure.com/Edgeless/Edgeless/_workitems/edit/3227
# if: inputs.isDebugImage == 'false'
msanft marked this conversation as resolved.
Show resolved Hide resolved
uses: ./.github/actions/deploy_logcollection
with:
msanft marked this conversation as resolved.
Show resolved Hide resolved
kubeconfig: ${{ steps.constellation-create.outputs.kubeconfig }}
opensearchUser: ${{ inputs.awsOpenSearchUsers }}
opensearchPwd: ${{ inputs.awsOpenSearchPwd }}
test: ${{ inputs.test }}
provider: ${{ inputs.cloudProvider }}
isDebugImage: ${{ inputs.isDebugImage }}
#
# Test payloads
#
Expand Down
16 changes: 8 additions & 8 deletions .github/workflows/build-logcollector-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ on:
branches:
- main
paths:
- "debugd/internal/debugd/logcollector/Makefile"
- "debugd/internal/debugd/logcollector/filebeat/**"
- "debugd/internal/debugd/logcollector/logstash/**"
- "debugd/filebeat/**"
- "debugd/logstash/**"
- "hack/logcollector/internal/templates/**"
- ".github/workflows/build-logcollector-images.yml"

jobs:
build-logcollector-images:
build-logcollector-debugd-images:
runs-on: ubuntu-22.04
permissions:
contents: read
Expand All @@ -24,16 +24,16 @@ jobs:
with:
ref: ${{ !github.event.pull_request.head.repo.fork && github.head_ref || '' }}

- name: Build and upload logstash container image
- name: Build and upload Logstash container image
uses: ./.github/actions/build_micro_service
with:
name: logstash-debugd
dockerfile: debugd/internal/debugd/logcollector/logstash/Dockerfile
dockerfile: debugd/logstash/Dockerfile
githubToken: ${{ secrets.GITHUB_TOKEN }}

- name: Build and upload filebeat container image
- name: Build and upload Filebeat container image
uses: ./.github/actions/build_micro_service
with:
name: filebeat-debugd
dockerfile: debugd/internal/debugd/logcollector/filebeat/Dockerfile
dockerfile: debugd/filebeat/Dockerfile
githubToken: ${{ secrets.GITHUB_TOKEN }}
5 changes: 4 additions & 1 deletion .github/workflows/e2e-test-daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ jobs:
cosignPassword: ${{ secrets.COSIGN_PASSWORD }}
cosignPrivateKey: ${{ secrets.COSIGN_PRIVATE_KEY }}
fetchMeasurements: ${{ matrix.refStream != 'ref/release/stream/stable/?' }}
awsOpenSearchDomain: ${{ secrets.AWS_OPENSEARCH_DOMAIN }}
awsOpenSearchUsers: ${{ secrets.AWS_OPENSEARCH_USER }}
awsOpenSearchPwd: ${{ secrets.AWS_OPENSEARCH_PWD }}
msanft marked this conversation as resolved.
Show resolved Hide resolved

- name: Always terminate cluster
if: always()
Expand Down Expand Up @@ -122,7 +125,7 @@ jobs:
test: ${{ matrix.test }}
kubernetesVersion: ${{ matrix.kubernetesVersion }}
provider: ${{ matrix.provider }}

e2e-mini:
name: Run miniconstellation E2E test
runs-on: ubuntu-22.04
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/e2e-upgrade.yml
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,9 @@ jobs:
azureIAMCreateCredentials: ${{ secrets.AZURE_E2E_IAM_CREDENTIALS }}
registry: ghcr.io
githubToken: ${{ secrets.GITHUB_TOKEN }}
awsOpenSearchDomain: ${{ secrets.AWS_OPENSEARCH_DOMAIN }}
awsOpenSearchUsers: ${{ secrets.AWS_OPENSEARCH_USER }}
awsOpenSearchPwd: ${{ secrets.AWS_OPENSEARCH_PWD }}

- name: Build CLI
uses: ./.github/actions/build_cli
Expand Down
15 changes: 12 additions & 3 deletions bazel/container/container.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
#!/usr/bin/env bash

function setup {
(stopBazelServer && sleep 1) || true

# Ensure that the cache directories exist, so they are not created by docker with root permissions.
mkdir -p "${HOME}/.cache/bazel"
mkdir -p "${HOME}/.cache/shared_bazel_repository_cache"
Expand All @@ -20,6 +22,12 @@ function startBazelServer {

setup

# In-container .bazelrc overwrite.
mkdir -p "/tmp/bazel-container"
cat << EOF > "/tmp/bazel-container/.bazelrc"
startup --output_user_root=/home/${USER}/.cache/bazel/_bazel_${USER}
EOF

local hostWorkspaceDir
hostWorkspaceDir="$(git rev-parse --show-toplevel)"
if [[ $? -ne 0 ]]; then
Expand All @@ -36,10 +44,11 @@ function startBazelServer {
--detach \
--name "${containerName}" \
-v "${hostWorkspaceDir}":/workspace \
-v "${HOME}/.cache/bazel":"/home/builder/.cache/bazel" \
-v "${HOME}/.cache/shared_bazel_repository_cache":"/home/builder/.cache/shared_bazel_repository_cache" \
-v "${HOME}/.cache/shared_bazel_action_cache":"/home/builder/.cache/shared_bazel_action_cache" \
-v "${HOME}/.cache/bazel":"${HOME}/.cache/bazel" \
-v "${HOME}/.cache/shared_bazel_repository_cache":"${HOME}/.cache/shared_bazel_repository_cache" \
-v "${HOME}/.cache/shared_bazel_action_cache":"${HOME}/.cache/shared_bazel_action_cache" \
-v "${HOME}/.docker/config.json":"/home/builder/.docker/config.json" \
-v "/tmp/bazel-container/.bazelrc":"/etc/bazel.bazelrc" \
--entrypoint=/bin/sleep \
"${containerImage}" \
infinity || return $?
Expand Down
12 changes: 12 additions & 0 deletions debugd/filebeat/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")

go_library(
name = "filebeat",
srcs = ["assets.go"],
embedsrcs = [
"templates/filebeat.yml",
"inputs.yml",
],
importpath = "github.com/edgelesssys/constellation/v2/debugd/filebeat",
visibility = ["//visibility:public"],
)
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ FROM fedora:38@sha256:61f921e0c7b51e162e6f94b14ef4e6b0d38eac5987286fe4f52a2c1158

RUN dnf install -y https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-8.6.2-x86_64.rpm

COPY debugd/internal/debugd/logcollector/filebeat/filebeat.yml /usr/share/filebeat/filebeat.yml

COPY debugd/internal/debugd/logcollector/filebeat/inputs.yml /usr/share/filebeat/inputs.d/inputs.yml
COPY debugd/filebeat/inputs.yml /usr/share/filebeat/inputs.yml
COPY debugd/filebeat/templates/ /usr/share/filebeat/templates/

ENTRYPOINT ["/usr/share/filebeat/bin/filebeat", "-e", "--path.home", "/usr/share/filebeat", "--path.data", "/usr/share/filebeat/data"]
15 changes: 15 additions & 0 deletions debugd/filebeat/assets.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
/*
Copyright (c) Edgeless Systems GmbH

SPDX-License-Identifier: AGPL-3.0-only
*/

package filebeat

import "embed"

// Assets are the exported Filebeat template files.
//
//go:embed *.yml
//go:embed templates/*
var Assets embed.FS
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
output.logstash:
hosts: ["localhost:5044"]
hosts: ["{{ .LogstashHost }}"]

output.console:
enabled: false
Expand All @@ -12,7 +12,7 @@ logging:
filebeat.config:
inputs:
enabled: true
path: /usr/share/filebeat/inputs.d/*.yml
path: /usr/share/filebeat/inputs.yml
# reload.enabled: true
# reload.period: 10s

Expand Down
19 changes: 1 addition & 18 deletions debugd/internal/cdbg/cmd/deploy.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import (
"net"
"path/filepath"
"strconv"
"strings"
"sync"
"time"

Expand Down Expand Up @@ -125,7 +124,7 @@ func deploy(cmd *cobra.Command, fileHandler file.Handler, constellationConfig *c
if err != nil {
return err
}
if err := checkInfoMap(info); err != nil {
if err := logcollector.FieldsFromMap(info).Check(); err != nil {
return err
}

Expand Down Expand Up @@ -281,22 +280,6 @@ func uploadFiles(ctx context.Context, client pb.DebugdClient, in deployOnEndpoin
return nil
}

func checkInfoMap(info map[string]string) error {
logPrefix, logFields := logcollector.InfoFields()
for k := range info {
if !strings.HasPrefix(k, logPrefix) {
continue
}
subkey := strings.TrimPrefix(k, logPrefix)

if _, ok := logFields[subkey]; !ok {
return fmt.Errorf("invalid subkey %q for info key %q", subkey, fmt.Sprintf("%s.%s", logPrefix, k))
}
}

return nil
}

type fileTransferer interface {
SendFiles(stream filetransfer.SendFilesStream) error
SetFiles(files []filetransfer.FileStat)
Expand Down
73 changes: 60 additions & 13 deletions debugd/internal/debugd/logcollector/fields.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,69 @@ Copyright (c) Edgeless Systems GmbH

SPDX-License-Identifier: AGPL-3.0-only
*/

package logcollector

// InfoFields are the fields that are allowed in the info map
// under the prefix "logcollect.".
func InfoFields() (string, map[string]struct{}) {
return "logcollect.", map[string]struct{}{
"admin": {}, // name of the person running the cdbg command
// THIS FILE IS A DUPLICATE OF hack/logcollector/fields/fields.go

import (
"fmt"
"strings"
)

var (
// DebugdLogcollectPrefix is the prefix for all OpenSearch fields specified by the user when starting through debugd.
DebugdLogcollectPrefix = "logcollect."
// AllowedFields are the fields that are allowed to be used in the logcollection.
AllowedFields = map[string]struct{}{
"admin": {}, // name of the person running the cdbg command
"is_debug_cluster": {}, // whether the cluster is a debug cluster
// GitHub workflow information, see https://docs.github.com/en/actions/learn-github-actions/environment-variables#default-environment-variables
"github.actor": {},
"github.workflow": {},
"github.run-id": {},
"github.run-attempt": {},
"github.ref-name": {},
"github.sha": {},
"github.runner-os": {},
"github.actor": {},
"github.workflow": {},
"github.run-id": {},
"github.run-attempt": {},
"github.ref-name": {},
"github.sha": {},
"github.runner-os": {},
"github.e2e-test-payload": {},
"github.is-debug-cluster": {},
// cloud provider used in e2e test. If deployed with debugd, this is a duplicate as its also
// available in the metadata. If deployed through K8s in e2e tests with a stable image, this
// is where the cloud provider is saved in.
"github.e2e-test-provider": {},
"deployment-type": {}, // deployment type, e.g. "debugd", "k8s"
}
)

// FieldsFromMap returns new Fields from the given map.
func FieldsFromMap(m map[string]string) Fields {
return Fields(m)
}

// Fields are the OpenSearch fields that are associated with a log message.
type Fields map[string]string

// Extend adds the fields from other to f and returns the result.
func (f Fields) Extend(other Fields) Fields {
for k, v := range other {
f[k] = v
}
return f
}

// Check checks whether all the fields in f are allowed. For fields that are prefixed with the debugd logcollect prefix are
// only the subkeys are checked.
func (f Fields) Check() error {
for k := range f {
if !strings.HasPrefix(k, DebugdLogcollectPrefix) {
continue
}
subkey := strings.TrimPrefix(k, DebugdLogcollectPrefix)

if _, ok := AllowedFields[subkey]; !ok {
return fmt.Errorf("invalid subkey %q for info key %q", subkey, fmt.Sprintf("%s%s", DebugdLogcollectPrefix, k))
}
}

return nil
}
Loading
Loading