diff --git a/.github/scripts/k8s_wait_for_node.sh b/.github/scripts/k8s_wait_for_node.sh new file mode 100644 index 00000000..a428bc41 --- /dev/null +++ b/.github/scripts/k8s_wait_for_node.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -eE + +node="$1" + +fail () { + kubectl describe nodes + kubectl get node "${node}" + exit 1 +} +trap fail ERR + +echo "waiting for node: ${node}" +kubectl get node "${node}" +kubectl wait --for=condition=Ready --timeout=60s "node/${node}" \ No newline at end of file diff --git a/.github/workflows/plugin_aerospike.yml b/.github/workflows/plugin_aerospike.yml new file mode 100644 index 00000000..21681752 --- /dev/null +++ b/.github/workflows/plugin_aerospike.yml @@ -0,0 +1,70 @@ +name: aerospike +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect \ No newline at end of file diff --git a/.github/workflows/plugin_apache_combined.yml b/.github/workflows/plugin_apache_combined.yml new file mode 100644 index 00000000..027a20dd --- /dev/null +++ b/.github/workflows/plugin_apache_combined.yml @@ -0,0 +1,70 @@ +name: apache_combined +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect \ No newline at end of file diff --git a/.github/workflows/plugin_apache_common.yml b/.github/workflows/plugin_apache_common.yml new file mode 100644 index 00000000..5cd0ee8e --- /dev/null +++ b/.github/workflows/plugin_apache_common.yml @@ -0,0 +1,70 @@ +name: apache_common +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect \ No newline at end of file diff --git a/.github/workflows/plugin_apache_http.yml b/.github/workflows/plugin_apache_http.yml new file mode 100644 index 00000000..a70997e0 --- /dev/null +++ b/.github/workflows/plugin_apache_http.yml @@ -0,0 +1,71 @@ +name: apache_http +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + - "observiq" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_cassandra.yaml b/.github/workflows/plugin_cassandra.yaml new file mode 100644 index 00000000..eab293b9 --- /dev/null +++ b/.github/workflows/plugin_cassandra.yaml @@ -0,0 +1,70 @@ +name: cassandra +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_codeigniter.yml b/.github/workflows/plugin_codeigniter.yml new file mode 100644 index 00000000..91e5b5c3 --- /dev/null +++ b/.github/workflows/plugin_codeigniter.yml @@ -0,0 +1,70 @@ +name: codeigniter +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_common_event_format.yml b/.github/workflows/plugin_common_event_format.yml new file mode 100644 index 00000000..1cafd0b3 --- /dev/null +++ b/.github/workflows/plugin_common_event_format.yml @@ -0,0 +1,70 @@ +name: common_event_format +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_couchdb.yml b/.github/workflows/plugin_couchdb.yml new file mode 100644 index 00000000..cc41f505 --- /dev/null +++ b/.github/workflows/plugin_couchdb.yml @@ -0,0 +1,70 @@ +name: couchdb +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_docker_swarm.yml b/.github/workflows/plugin_docker_swarm.yml new file mode 100644 index 00000000..26672a5b --- /dev/null +++ b/.github/workflows/plugin_docker_swarm.yml @@ -0,0 +1,70 @@ +name: docker_swarm +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_haproxy.yml b/.github/workflows/plugin_haproxy.yml new file mode 100644 index 00000000..70183cdc --- /dev/null +++ b/.github/workflows/plugin_haproxy.yml @@ -0,0 +1,71 @@ +name: haproxy +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + - "observiq" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_kubernetes_container.yml b/.github/workflows/plugin_kubernetes_container.yml new file mode 100644 index 00000000..4e547186 --- /dev/null +++ b/.github/workflows/plugin_kubernetes_container.yml @@ -0,0 +1,200 @@ +name: kubernetes_container +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + k8s-version: + - "v1.23.0" + - "v1.22.0" + - "v1.21.0" + - "v1.20.0" + - "v1.19.0" + - "v1.18.0" + - "v1.17.0" + - "v1.16.0" + - "v1.15.0" + - "v1.14.0" + format: + - "docker" + - "containerd" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Install Minikube + run: | + sudo apt-get update >/dev/null + sudo apt-get install -y conntrack >/dev/null + curl -s -LO https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 + sudo install minikube-linux-amd64 /usr/local/bin/minikube + + # Start Minikube with --driver=none to run a k8s cluster within the actions + # environment. This allows us to share configs, logs, and output with the container + - name: Start Minikube + timeout-minutes: 3 + run: | + minikube start \ + --driver=docker \ + --kubernetes-version=${{ matrix.k8s-version }} \ + --container-runtime=${{ matrix.format }} + + - name: Wait For Node Condition Ready + timeout-minutes: 3 + run: | + node=$(kubectl get node | grep -v NAME | awk '{print $1}') + bash ./.github/scripts/k8s_wait_for_node.sh $node + + - name: Generate Dockerfile + run: | + cat <<- EOF > Dockerfile + FROM ubuntu:20.04 + WORKDIR / + RUN apt-get update >/dev/null + RUN apt-get install -y curl >/dev/null + RUN mkdir /collector /storage /output + RUN curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + RUN chmod +x collector/collector + COPY log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/config/collector.yaml /input/config/collector.yaml + COPY plugins /plugins + EOF + + - name: Build Image + timeout-minutes: 1 + run: docker build . -t agent:latest + + - name: Push Image to Minikube + timeout-minutes: 2 + run: minikube image load agent:latest + + - name: Create Input Log ConfigMap + run: | + kubectl create configmap logs \ + --from-file=log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/logs/adservice-5657f795f5-ql97m_default_server-2180fc6125cb444bd32be19cfa73e71a25e5d6c98b59b5191ee51fc6ff6c6723.log \ + --from-file=log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/logs/example-json-logger-6b49dd876-xmr2j_default_logger-ebd483078a72e8c029757f27d872b238ac26e1d0c8c7d842415790c33cc24451.log \ + --from-file=log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/logs/productcatalogservice-7cdd54f7c7-nbsgt_default_server-0a3bc17f1a059f850fc35b8b947fcb2f1a7a726b2574f48c973015e49df47e59.log + + # local docker images can be used when imagePullPolicy: IfNotPresent + - name: Generate Deployment Manifest + run: | + cat <<- EOF > deploy.yaml + --- + kind: ServiceAccount + apiVersion: v1 + metadata: + name: stanza-metadata + namespace: default + --- + kind: ClusterRole + apiVersion: rbac.authorization.k8s.io/v1 + metadata: + name: stanza-metadata + namespace: default + rules: + - apiGroups: ["", "apps", "batch"] + resources: + - pods + - namespaces + - replicasets + - jobs + verbs: ["get", "list"] + --- + kind: ClusterRoleBinding + apiVersion: rbac.authorization.k8s.io/v1 + metadata: + name: stanza-metadata + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: stanza-metadata + subjects: + - kind: ServiceAccount + name: stanza-metadata + namespace: default + --- + apiVersion: apps/v1 + kind: Deployment + metadata: + name: agent + labels: + app: agent + spec: + replicas: 1 + selector: + matchLabels: + app: agent + template: + metadata: + labels: + app: agent + spec: + serviceAccountName: stanza-metadata + containers: + - name: agent + image: agent:latest + imagePullPolicy: IfNotPresent + command: ["/collector/collector"] + args: ["--config", "/input/config/collector.yaml"] + volumeMounts: + - name: logs + mountPath: /var/log/containers/adservice-5657f795f5-ql97m_default_server-2180fc6125cb444bd32be19cfa73e71a25e5d6c98b59b5191ee51fc6ff6c6723.log + subPath: adservice-5657f795f5-ql97m_default_server-2180fc6125cb444bd32be19cfa73e71a25e5d6c98b59b5191ee51fc6ff6c6723.log + - name: logs + mountPath: /var/log/containers/example-json-logger-6b49dd876-xmr2j_default_logger-ebd483078a72e8c029757f27d872b238ac26e1d0c8c7d842415790c33cc24451.log + subPath: example-json-logger-6b49dd876-xmr2j_default_logger-ebd483078a72e8c029757f27d872b238ac26e1d0c8c7d842415790c33cc24451.log + - name: logs + mountPath: /var/log/containers/productcatalogservice-7cdd54f7c7-nbsgt_default_server-0a3bc17f1a059f850fc35b8b947fcb2f1a7a726b2574f48c973015e49df47e59.log + subPath: productcatalogservice-7cdd54f7c7-nbsgt_default_server-0a3bc17f1a059f850fc35b8b947fcb2f1a7a726b2574f48c973015e49df47e59.log + volumes: + - name: logs + configMap: + name: logs + EOF + + - name: Run Agent + timeout-minutes: 3 + run: | + kubectl apply -f deploy.yaml + sleep 10 && kubectl get pods + pod=$(kubectl get pods | grep agent | awk '{print $1}') + kubectl describe pod "${pod}" + kubectl logs "${pod}" + + - name: Wait For Agent + timeout-minutes: 3 + run: | + kubectl rollout status -w deployment/agent + + - name: Pause + run: sleep 10 + + # Generally we want to stop the agent before stopping it however kubernetes will + # purge the log if we kill the pod + - name: Dump Stanza Logs + run: kubectl logs deploy/agent + + - name: Get Output + run: | + mkdir -p output + pod=$(kubectl get pods | grep agent | awk '{print $1}') + kubectl cp "${pod}:/output/out" output/out + cat output/out + + - name: Get Expected Output + run: | + cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + cat output/expect diff --git a/.github/workflows/plugin_nginx.yml b/.github/workflows/plugin_nginx.yml new file mode 100644 index 00000000..2c7a0ab8 --- /dev/null +++ b/.github/workflows/plugin_nginx.yml @@ -0,0 +1,71 @@ +name: nginx +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "default" + - "observiq" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/plugin_oracledb.yml b/.github/workflows/plugin_oracledb.yml new file mode 100644 index 00000000..5ee45df2 --- /dev/null +++ b/.github/workflows/plugin_oracledb.yml @@ -0,0 +1,72 @@ +name: oracledb +on: + pull_request: + push: + branches: + - master +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + format: + - "alert" + - "audit" + - "listener" + steps: + - name: Clone stanza-plugins + uses: actions/checkout@v2 + + - name: Clone Log Library + uses: actions/checkout@v2 + with: + repository: 'observIQ/log-library' + path: log-library + token: ${{ secrets.LOG_LIBRARY_ACCESS_TOKEN }} + ref: otel + + - name: Create Output Directory + run: mkdir output + + - name: Create Offset Database Directory + run: mkdir -p storage + + - name: Install Collector + run: | + mkdir -p collector + curl -s -o collector/collector https://storage.googleapis.com/observiq-cloud/observiq-collector/0.0.10/collector_linux_amd64 + chmod +x collector/collector + + - name: Run Agent + run: | + docker run -d \ + --name agent \ + -v $(pwd)/collector:/collector \ + -v $(pwd)/log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}:/input \ + -v $(pwd)/output:/output \ + -v $(pwd)/plugins:/plugins \ + -v $(pwd)/storage:/storage \ + --entrypoint sh \ + ubuntu:20.04 \ + -c '/collector/collector --config /input/config/collector.yaml' + + sleep 1 && docker logs agent + + - name: Pause + run: sleep 10 + + - name: Stop Stanza + run: docker kill agent + + - name: Dump Stanza Logs + run: docker logs agent + + - name: Get Output + run: | + sudo cat output/out + + - name: Get Expected Output + run: | + sudo cp log-library/cases/${GITHUB_WORKFLOW}/${{ matrix.format }}/expect/expect.json output/expect + sudo cat output/expect diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml deleted file mode 100644 index 9051e3a9..00000000 --- a/.github/workflows/validate.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Test -on: - pull_request: - paths: - - "Makefile" - - ".github/**" - - "plugins/**" - - "schemas/**" - - "scripts/**" - - "test/**" - push: - paths: - - "Makefile" - - ".github/**" - - "plugins/**" - - "schemas/**" - - "scripts/**" - - "test/**" - branches: - - master -jobs: - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-go@v1 - with: - go-version: '1.16' - - name: go test - run: make test \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index edcc018f..a135305b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,96 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.0.78] - Unreleased +## [1.0.0] Unreleased + +### Changed + +- `cisco_meraki`: parse src and dst fields into ip and port fields ([PR369](https://github.com/observIQ/stanza-plugins/pull/369)) + +## [0.0.88] Unreleased + +### Fixed + +- `haproxy`: Fixed typo with field name `query_parameter` ([PR368](https://github.com/observIQ/stanza-plugins/pull/368)) + +## [0.0.87] 2021-10-11 + +### Changed + +- `cisco_meraki`: Removed `key_value_parser` due to some log entries not following needed pattern ([PR357](https://github.com/observIQ/stanza-plugins/pull/357)) +- `cisco_catalyst`: Fix parsing error when log messages start with syslog priority ([PR358](https://github.com/observIQ/stanza-plugins/pull/358)) + +## [0.0.86] 2021-10-05 + +### Changed + +- `sonicwall`: Parse `src` and `dst` fields into ip, port, and interface fields: ([PR355](https://github.com/observIQ/stanza-plugins/pull/355)) +- `cisco_catalyst`: Renamed `facility` to `facility_text` ([PR354](https://github.com/observIQ/stanza-plugins/pull/354)) + +## [0.0.85] 2021-10-04 + +### Added + +- Added `cisco_catalyst` plugin ([351](https://github.com/observIQ/stanza-plugins/pull/351)) + +## [0.0.84] 2021-10-04 + +### Added + +- Added `sonicwall` plugin ([PR340](https://github.com/observIQ/stanza-plugins/pull/340)) + +### Changed + +- cisco_meraki: Remove custom regex parsers and use key_value_parser instead. ([PR349](https://github.com/observIQ/stanza-plugins/pull/349)) +- Update `codeigniter`, `common_event_format`, and `uwsgi` + - Remove Log Parser from title. + - Update uwsgi field `headers` to `headers_count` + +## [0.0.83] 2021-09-29 + +### Added + +- Added `tcp` and `udp` plugin ([PR341](https://github.com/observIQ/stanza-plugins/pull/341)) +- Added `common_event_format` plugin ([328](https://github.com/observIQ/stanza-plugins/pull/328)) + +### Fixed + +- HAProxy: Handle issue where http logs might fail if extra field is present [PR346](https://github.com/observIQ/stanza-plugins/pull/346) + +## [0.0.82] 2021-09-28 + +### Fixed + +- OracleDB: Resolved parsing issue related to multiple audit log formats [PR341](https://github.com/observIQ/stanza-plugins/pull/343) + +## [0.0.81] 2021-09-28 + +- HAProxy: Resolved an issue where http logs using default format can fail to parse ([PR342](https://github.com/observIQ/stanza-plugins/pull/342)) + +## [0.0.80] 2021-09-23 + +### Fixed +- HAProxy: Promote frontend_port as a resources ([PR338](https://github.com/observIQ/stanza-plugins/pull/338)) +- Updated ubiquiti plugin ([337](https://github.com/observIQ/stanza-plugins/pull/337)) + - Updated `ac_lite_ap_parser` expression check to be more inline with expected format. + - Moved catchall from default to a route with pattern matching and set default to output in router. + - Added if checks to determine if fields exist before doing operation `promote_name`, `promote_device`, and `severity_parser`. This will stop errors in log file if fields do not exist. + +## [0.0.79] - 2021-09-22 + +### Added + +- Added haproxy plugin ([PR 335](https://github.com/observIQ/stanza-plugins/pull/335)) + +### Fixed + +- Oracle Database: ([PR 334](https://github.com/observIQ/stanza-plugins/pull/334)) + - Resolved issue where ipv6 values caused a parse failure + - Resolved issue where default listener path is not correct +- Oracle Database: ([PR 331](https://github.com/observIQ/stanza-plugins/pull/331)) + - Resolves issue where regex fails to parse audit file dbid + +## [0.0.78] - 2021-09-15 ### Changed diff --git a/README.md b/README.md index 87c5615f..e784ae1b 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,6 @@ stanza-plugins contains plugins for the [Stanza Log Agent](https://github.com/observIQ/stanza) -[![Status](https://github.com/observIQ/stanza-plugins/workflows/Test/badge.svg)](https://github.com/observIQ/stanza-plugins/Test) - ## Release Process All plugins in this repository are released together in a single operation. diff --git a/plugins/aerospike.yaml b/plugins/aerospike.yaml index 2009a9bb..70b8527d 100644 --- a/plugins/aerospike.yaml +++ b/plugins/aerospike.yaml @@ -1,5 +1,5 @@ # Plugin Info -version: 0.0.6 +version: 1.0.0 title: Aerospike description: Log parser for Aerospike parameters: @@ -30,7 +30,7 @@ pipeline: include: - {{ $general_log_path }} start_at: {{ $start_at }} - labels: + attributes: log_type: aerospike plugin_id: {{ .id }} output: aerospike_general_parser @@ -45,19 +45,19 @@ pipeline: parse_from: aerospike_severity mapping: info: detail - critical: 'failed assertion' + error2: 'failed assertion' output: config_filter - id: config_filter type: filter - expr: '$record.context == "config"' + expr: '$body.context == "config"' - id: namespace_router type: router routes: # Remove empty name space field - output: namespace_restructure - expr: '$record.namespace == ""' + expr: '$body.namespace == ""' # namespace not empty - output: {{.output}} expr: true diff --git a/plugins/amazon_eks.yaml b/plugins/amazon_eks.yaml index 702f5c4f..bd08f178 100644 --- a/plugins/amazon_eks.yaml +++ b/plugins/amazon_eks.yaml @@ -1,6 +1,6 @@ --- # Plugin Info -version: 0.0.3 +version: 1.0.0 title: Amazon EKS description: Log parser for Amazon EKS parameters: @@ -38,14 +38,14 @@ pipeline: include: - '{{ $container_log_path }}' start_at: '{{ $start_at }}' - labels: + attributes: plugin_id: '{{ .id }}' write_to: log # Filter out agent logs. Check if file_name field starts with stanza or bindplane-agent. - id: filename_filter type: filter - expr: '$labels.file_name != nil and ($labels.file_name contains "stanza" or $labels.file_name contains "bindplane-agent")' + expr: '$attributes["file.name"] != nil and ($attributes["file.name"] contains "stanza" or $attributes["file.name"] contains "bindplane-agent")' # Initial log entry should be safe to parse as JSON - id: container_json_parser @@ -58,7 +58,7 @@ pipeline: routes: # It appears to be JSON so send it to be parsed as JSON. - output: nested_json_parser - expr: '$record.log != nil and $record.log matches "^{.*}\\s*$"' + expr: '$body.log != nil and $body.log matches "^{.*}\\s*$"' # If log field doesn't appear to be JSON then, skip nested JSON parsers - output: container_regex_parser expr: true @@ -66,13 +66,13 @@ pipeline: # Attempt to parse nested JSON since the log appears to be JSON - id: nested_json_parser type: json_parser - parse_from: $record.log + parse_from: $body.log output: container_regex_parser # Log field has been parsed if possible and now we can parse the file name field for container information. - id: container_regex_parser type: regex_parser - parse_from: $labels.file_name + parse_from: $attributes["file.name"] regex: '^(?P[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*)_(?P[^_]+)_(?P.+)-(?P[a-z0-9]{64})\.log$' severity: parse_from: stream @@ -88,16 +88,16 @@ pipeline: type: restructure ops: - move: - from: '$record.pod_name' + from: '$body.pod_name' to: '$resource["k8s.pod.name"]' - move: - from: '$record.namespace' + from: '$body.namespace' to: '$resource["k8s.namespace.name"]' - move: - from: '$record.container_name' + from: '$body.container_name' to: '$resource["container.name"]' - move: - from: '$record.container_id' + from: '$body.container_id' to: '$resource["container.id"]' # Add kubernetes metadata @@ -112,31 +112,31 @@ pipeline: - add: field: "$resource['k8s.cluster.name']" value: "{{ $cluster_name }}" - output: add_labels_router + output: add_attributes_router # Add label log_type - - id: add_labels_router + - id: add_attributes_router type: router routes: - output: {{ .output }} - expr: '$labels["k8s_pod_label/component"] == "kube-controller-manager"' - labels: + expr: '$attributes["k8s_pod_label/component"] == "kube-controller-manager"' + attributes: log_type: 'eks.controller' - output: {{ .output }} - expr: '$labels["k8s_pod_label/component"] == "kube-scheduler"' - labels: + expr: '$attributes["k8s_pod_label/component"] == "kube-scheduler"' + attributes: log_type: 'eks.scheduler' - output: {{ .output }} - expr: '$labels["k8s_pod_label/component"] == "kube-apiserver"' - labels: + expr: '$attributes["k8s_pod_label/component"] == "kube-apiserver"' + attributes: log_type: 'eks.apiserver' - output: {{ .output }} - expr: '$labels["k8s_pod_label/component"] startsWith "kube-proxy"' - labels: + expr: '$attributes["k8s_pod_label/component"] startsWith "kube-proxy"' + attributes: log_type: 'eks.proxy' - output: {{ .output }} expr: true - labels: + attributes: log_type: 'eks.container' # Use journald to gather kubelet logs. Use provided path for journald if available otherwise use default locations. @@ -145,7 +145,7 @@ pipeline: # {{ if .kubelet_journald_log_path }} directory: '{{ .kubelet_journald_log_path }}' # {{ end }} - labels: + attributes: log_type: 'eks.kubelet' plugin_id: '{{ .id }}' output: kubelet_filter @@ -153,14 +153,14 @@ pipeline: # Only grab entry if it is the kubelet.service - id: kubelet_filter type: filter - expr: '$record._SYSTEMD_UNIT != "kubelet.service"' + expr: '$body._SYSTEMD_UNIT != "kubelet.service"' # Move hostname to k8s node resource - id: kubelet_resource type: restructure ops: - move: - from: "$record._HOSTNAME" + from: "$body._HOSTNAME" to: "$resource['host.name']" - move: from: "MESSAGE" @@ -174,7 +174,7 @@ pipeline: type: router routes: - output: message_regex_parser - expr: '$record.message matches "^\\w\\d{4}"' + expr: '$body.message matches "^\\w\\d{4}"' - output: {{ .output }} expr: true @@ -188,9 +188,9 @@ pipeline: mapping: debug: d info: i - warning: w + warn: w error: e - critical: c + error2: c timestamp: parse_from: timestamp layout: '%m%d %H:%M:%S.%s' diff --git a/plugins/apache_combined.yaml b/plugins/apache_combined.yaml index f2862785..de4904d8 100644 --- a/plugins/apache_combined.yaml +++ b/plugins/apache_combined.yaml @@ -1,4 +1,4 @@ -version: 0.0.1 +version: 1.0.0 title: Apache Combined description: Log parser for Apache combined format parameters: @@ -25,7 +25,7 @@ pipeline: include: - {{ $log_path }} start_at: {{ $start_at }} - labels: + attributes: log_type: 'apache_combined' plugin_id: {{ .id }} output: apache_combined_regex_parser @@ -42,7 +42,7 @@ pipeline: preserve_to: status mapping: info: 2xx - notice: 3xx - warning: 4xx + info2: 3xx + warn: 4xx error: 5xx output: {{ .output }} diff --git a/plugins/apache_common.yaml b/plugins/apache_common.yaml index b967d40c..169eb65e 100644 --- a/plugins/apache_common.yaml +++ b/plugins/apache_common.yaml @@ -1,4 +1,4 @@ -version: 0.0.1 +version: 1.0.0 title: Apache Common description: Log parser for Apache common format parameters: @@ -25,7 +25,7 @@ pipeline: include: - {{ $log_path }} start_at: {{ $start_at }} - labels: + attributes: log_type: 'apache_common' plugin_id: {{ .id }} output: apache_common_regex_parser @@ -42,13 +42,13 @@ pipeline: preserve_to: status mapping: info: 2xx - notice: 3xx - warning: 4xx + info2: 3xx + warn: 4xx error: 5xx output: {{ .output }} - id: access_protocol_parser type: regex_parser - parse_from: $record.protocol + parse_from: $body.protocol regex: '(?P[^/]*)/(?P.*)' output: {{ .output }} diff --git a/plugins/apache_http.yaml b/plugins/apache_http.yaml index 24566de8..ef7e72bb 100644 --- a/plugins/apache_http.yaml +++ b/plugins/apache_http.yaml @@ -1,4 +1,4 @@ -version: 0.0.8 +version: 1.0.0 title: Apache HTTP Server description: Log parser for Apache HTTP Server parameters: @@ -58,7 +58,7 @@ pipeline: include: - {{ $access_log_path }} start_at: {{ $start_at }} - labels: + attributes: log_type: 'apache_http.access' plugin_id: {{ .id }} output: '{{ if eq $log_format "default" }}access_regex_parser{{ else if eq $log_format "observiq" }}access_json_parser{{ end }}' @@ -74,7 +74,7 @@ pipeline: multiline: line_start_pattern: '\[(?P