Skip to content

Commit

Permalink
Merge branch 'develop' into csa-1.12.0-cdh-7.1.9
Browse files Browse the repository at this point in the history
# Conflicts:
#	flink-cyber/flink-commands/json-commands/pom.xml
  • Loading branch information
stas-panasiuk committed Oct 8, 2024
2 parents 629a2c1 + 0512dcd commit 402ae58
Show file tree
Hide file tree
Showing 29 changed files with 13,600 additions and 635 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
restore-keys: |
${{ runner.os }}-maven-
- name: Build and Test with Maven
run: mvn -P '!add-depebdencies-for-IDEA,!full-build,!include-front-end' -B package --file flink-cyber/pom.xml
run: mvn -P '!add-dependencies-for-IDEA,!full-build,!include-front-end' -B package --file flink-cyber/pom.xml

# Optional: Uploads the full dependency graph to GitHub to improve the quality of Dependabot alerts this repository can receive
- name: Update dependency graph
Expand Down
96 changes: 96 additions & 0 deletions .github/workflows/publish_release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

name: Publish Release

on:
schedule:
# At 0:00am each day on the default branch
- cron: '0 0 * * *'
workflow_dispatch:
inputs:
tag:
description: 'Version tag'
required: true
type: string
name:
description: 'Release name'
required: true
type: string
skip_tests:
description: 'Skip tests'
required: true
default: false
type: boolean

jobs:
# checks if there's a commit in last 24hr
check_date:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.should_run.outputs.should_run }}
steps:
- uses: actions/checkout@v2
- name: print latest_commit
run: echo ${{ github.sha }}

- id: should_run
continue-on-error: true
name: check latest commit is less than a day
run: |
test -z $(git rev-list --after="24 hours" ${{ github.sha }}) \
&& echo "::set-output name=should_run::false" \
|| (gh run cancel ${{ github.run_id }} \
&& gh run watch ${{ github.run_id }})
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish_release:
needs: check_date
if: ${{ github.event_name == 'workflow_dispatch' || needs.check_date.outputs.should_run != 'false' }}
runs-on: ubuntu-latest
container: maven:3-eclipse-temurin-8
steps:
- uses: actions/checkout@v3

- name: Cache local Maven repository
uses: actions/cache/restore@v3
with:
path: /root/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Build and Test with Maven
run: mvn -P '!add-dependencies-for-IDEA,full-build,include-front-end' -B package -DskipTests=${{ github.event_name == 'workflow_dispatch' && inputs.skip_tests || true }} --file flink-cyber/pom.xml

- name: Release
uses: softprops/action-gh-release@v2
if: ${{ github.event_name == 'workflow_dispatch' }}
with:
tag_name: ${{ inputs.tag }}
name: ${{ inputs.name }}
draft: true
generate_release_notes: true
token: ${{ secrets.GITHUB_TOKEN }}
files: |
flink-cyber/cyber-csd/target/cyber-csd--*
flink-cyber/cyber-csd/target/CYBERSEC-*
flink-cyber/cyber-parcel/target/CYBERSEC-*
- name: Get current date
id: date
run: echo "CURRENT_DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV

- name: Publish artifact
uses: actions/upload-artifact@v4
with:
name: cybersec-${{ github.event_name == 'workflow_dispatch' && 'release' || 'nightly' }}-${{ env.CURRENT_DATE }}
path: |
flink-cyber/cyber-csd/target/cyber-csd--*
flink-cyber/cyber-csd/target/CYBERSEC-*
flink-cyber/cyber-parcel/target/CYBERSEC-*
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,12 @@ The Cyber Toolkit is flexible and configurable so the ingestion can be changed w
3. [Event Generation](flink-cyber/caracal-generator/README.md)

## Packaging
The Cybersec Toolkit includes a Cloudera Manager parcel and service for easier installation.
The Cybersec Toolkit includes a Cloudera Manager parcel and service for easier installation.

Artifacts are available for download on the [releases page](https://github.com/cloudera/cybersec/releases).
You can also find less stable, but more up to date artifacts by selecting one of successful runs on [this page](https://github.com/cloudera/cybersec/actions/workflows/publish_release.yml) and scrolling to the bottom of the selected run page.

Or you can find artifacts after the build in the following directories:
1. [Parcel](flink-cyber/cyber-parcel)
2. [Cloudera Service](flink-cyber/cyber-csd)

Expand Down
6 changes: 6 additions & 0 deletions flink-cyber/cyber-services/cyber-service-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
Expand Down
7 changes: 6 additions & 1 deletion flink-cyber/cyber-services/cyber-worker-service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,12 @@
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>

<build>
Expand Down
1 change: 1 addition & 0 deletions flink-cyber/flink-commands/json-commands/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>

<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,13 +334,17 @@ private void configure(StreamTableEnvironment tableEnv) {

protected final String buildInsertSql(String topic, MappingDto mappingDto, ResolvedSchema tableSchema) {
return String.join("\n",
getInsertSqlPrefix() + " " + mappingDto.getTableName() + "(" + getInsertColumns(mappingDto) + ") "
getInsertSqlPrefix() + " " + getTableName(topic, mappingDto) + "(" + getInsertColumns(mappingDto) + ") "
+ getInsertSqlSuffix(),
" SELECT " + getFromColumns(mappingDto, tableSchema),
" from " + KAFKA_TABLE,
String.format(" where `source`='%s'", topic));
}

protected String getTableName(String source, MappingDto mappingDto) {
return mappingDto.getTableName();
}

protected String getInsertSqlPrefix() {
return "INSERT INTO ";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ protected void executeInsert(StreamTableEnvironment tableEnv, Map<String, Mappin
mappingDto.getTableName(), "indexing-job", params);

//read from view and write to kafka sink
final Table table = tableEnv.from(mappingDto.getTableName());
final Table table = tableEnv.from(getTableName(topic, mappingDto));
final String schemaString = AvroSchemaUtil.convertToAvro(tablesConfig.get(mappingDto.getTableName()))
.toString();

Expand Down Expand Up @@ -87,6 +87,11 @@ protected FormatDescriptor getFormatDescriptor() {
return null;
}

@Override
protected String getTableName(String source, MappingDto mappingDto) {
return source.concat("_tmpview");
}

@Override
protected String getInsertSqlPrefix() {
return "CREATE TEMPORARY VIEW ";
Expand Down
8 changes: 7 additions & 1 deletion flink-cyber/flink-profiler/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,12 @@
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
Expand All @@ -46,6 +51,7 @@
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
Expand Down
11 changes: 6 additions & 5 deletions flink-cyber/flink-stellar/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,6 @@
<groupId>org.adrianwalker</groupId>
<artifactId>multiline-string</artifactId>
</dependency>
<dependency>
<groupId>com.trendmicro</groupId>
<artifactId>tlsh</artifactId>
<version>3.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
Expand Down Expand Up @@ -252,6 +247,12 @@
<version>${global_hamcrest_version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>${jupiter.junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package org.apache.metron.stellar.common.utils.hashing.tlsh;

class BitPairsTable {

private static final int BIT_PAIRS_DIFF_TABLE_SIZE = 256;

private final int[][] table;

BitPairsTable() {
this.table = generateDefaultBitPairsDiffTable();
}

private static int[][] generateDefaultBitPairsDiffTable() {
int[][] result = new int[BIT_PAIRS_DIFF_TABLE_SIZE][BIT_PAIRS_DIFF_TABLE_SIZE];

for (int i = 0; i < BIT_PAIRS_DIFF_TABLE_SIZE; i++) {
for (int j = 0; j < BIT_PAIRS_DIFF_TABLE_SIZE; j++) {
int x = i;
int y = j;
int diff = 0;

for (int z = 0; z < 4; z++) {
int d = Math.abs(x % 4 - y % 4);

if (d == 3) {
diff += d * 2;
} else {
diff += d;
}

if (z < 3) {
x /= 4;
y /= 4;
}
}

result[i][j] = diff;
}
}

return result;
}

public int getValue(int row, int column) {
return table[row][column];
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package org.apache.metron.stellar.common.utils.hashing.tlsh;


import java.util.function.IntUnaryOperator;
import java.util.stream.IntStream;

public class SlidingWindow {
public static final int DEFAULT_SIZE = 5;
private final byte[] window;
private int byteCount = 0;

SlidingWindow() {
this.window = new byte[DEFAULT_SIZE];
}

public void put(final byte value) {
int cursor = byteCount % window.length;
window[cursor] = value;
byteCount++;
}

public int[] getWindow() {
final int startPosition = (byteCount - 1) % window.length;
final IntUnaryOperator reverseIterate = i -> i == 0 ? window.length - 1 : i - 1;
final IntUnaryOperator mapper = i -> window[i] & 0xFF;
return IntStream.iterate(startPosition, reverseIterate)
.limit(window.length)
.map(mapper)
.toArray();
}

public int getByteCount() {
return byteCount;
}

public int getWindowSize() {
return window.length;
}
}
Loading

0 comments on commit 402ae58

Please sign in to comment.