Skip to content

Commit

Permalink
Merge branch 'master' into oracle-updates
Browse files Browse the repository at this point in the history
  • Loading branch information
acrylJonny authored Dec 10, 2024
2 parents d8fb4c8 + 1f389c1 commit 17c01b4
Show file tree
Hide file tree
Showing 618 changed files with 141,707 additions and 7,515 deletions.
16 changes: 4 additions & 12 deletions .github/workflows/airflow-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,29 +34,21 @@ jobs:
include:
# Note: this should be kept in sync with tox.ini.
- python-version: "3.8"
extra_pip_requirements: "apache-airflow~=2.1.4"
extra_pip_extras: plugin-v1
- python-version: "3.8"
extra_pip_requirements: "apache-airflow~=2.2.4"
extra_pip_extras: plugin-v1
extra_pip_requirements: "apache-airflow~=2.3.4"
extra_pip_extras: test-airflow23
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.4.3"
extra_pip_extras: plugin-v2,test-airflow24
extra_pip_extras: test-airflow24
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.6.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.7.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.10"
extra_pip_requirements: "apache-airflow~=2.8.1 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt"
extra_pip_extras: plugin-v2
- python-version: "3.11"
extra_pip_requirements: "apache-airflow~=2.9.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.9.3/constraints-3.11.txt"
extra_pip_extras: plugin-v2
- python-version: "3.11"
extra_pip_requirements: "apache-airflow~=2.10.2 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.11.txt"
extra_pip_extras: plugin-v2
extra_pip_requirements: "apache-airflow~=2.10.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.10.3/constraints-3.11.txt"
fail-fast: false
steps:
- name: Set up JDK 17
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ jobs:
path: |
~/.cache/uv
key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }}
- name: Install dependencies
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
Expand All @@ -83,6 +85,7 @@ jobs:
- uses: gradle/actions/setup-gradle@v3
- name: Gradle build (and test) for NOT metadata ingestion
if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }}
# datahub-schematron:cli excluded due to dependency on metadata-ingestion
run: |
./gradlew build \
-x :metadata-ingestion:build \
Expand All @@ -100,6 +103,7 @@ jobs:
-x :metadata-ingestion-modules:gx-plugin:check \
-x :datahub-frontend:build \
-x :datahub-web-react:build \
-x :metadata-integration:java:datahub-schematron:cli:test \
--parallel
- name: Gradle build (and test) for frontend
if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/check-datahub-jars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,5 @@ jobs:
- name: check ${{ matrix.command }} jar
run: |
./gradlew :metadata-integration:java:${{ matrix.command }}:build --info
./gradlew :metadata-integration:java:${{ matrix.command }}:checkShadowJar
./gradlew :metadata-integration:java:${{ matrix.command }}:javadoc
9 changes: 5 additions & 4 deletions .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,23 +29,24 @@ jobs:
"swaroopjagadish",
"treff7es",
"yoonhyejin",
"eboneil",
"gabe-lyons",
"hsheth2",
"jjoyce0510",
"maggiehays",
"pedro93",
"RyanHolstien",
"sakethvarma397",
"Kunal-kankriya",
"purnimagarg1",
"dushayntAW",
"sagar-salvi-apptware",
"kushagra-apptware",
"Salman-Apptware",
"mayurinehate",
"noggi",
"skrydal"
"skrydal",
"kevinkarchacryl",
"sgomezvillamor",
"acrylJonny",
"chakru-r"
]'),
github.actor
)
Expand Down
13 changes: 11 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ buildscript {
// see also datahub-frontend/play.gradle
ext.playVersion = '2.8.22'
ext.playScalaVersion = '2.13'
ext.akkaVersion = '2.6.21' // 2.7.0+ has incompatible license
ext.log4jVersion = '2.23.1'
ext.slf4jVersion = '1.7.36'
ext.logbackClassic = '1.4.14'
Expand All @@ -56,7 +57,7 @@ buildscript {
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '15.5.2'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.19.0'
ext.openLineageVersion = '1.24.2'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand Down Expand Up @@ -105,7 +106,14 @@ project.ext.spec = [
]

project.ext.externalDependency = [
'akkaHttp': "com.typesafe.akka:akka-http-core_$playScalaVersion:10.2.10",
'akkaHttp': "com.typesafe.akka:akka-http-core_$playScalaVersion:10.2.10", // max version due to licensing
'akkaActor': "com.typesafe.akka:akka-actor_$playScalaVersion:$akkaVersion",
'akkaStream': "com.typesafe.akka:akka-stream_$playScalaVersion:$akkaVersion",
'akkaActorTyped': "com.typesafe.akka:akka-actor-typed_$playScalaVersion:$akkaVersion",
'akkaSlf4j': "com.typesafe.akka:akka-slf4j_$playScalaVersion:$akkaVersion",
'akkaJackson': "com.typesafe.akka:akka-serialization-jackson_$playScalaVersion:$akkaVersion",
'akkaParsing': "com.typesafe.akka:akka-parsing_$playScalaVersion:$akkaVersion",
'akkaProtobuf': "com.typesafe.akka:akka-protobuf-v3_$playScalaVersion:$akkaVersion",
'antlr4Runtime': 'org.antlr:antlr4-runtime:4.9.3',
'antlr4': 'org.antlr:antlr4:4.9.3',
'assertJ': 'org.assertj:assertj-core:3.11.1',
Expand Down Expand Up @@ -350,6 +358,7 @@ allprojects {
}
}
}

}

configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) {
Expand Down
5 changes: 3 additions & 2 deletions datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,6 @@ public Object perform(
CallContext ctx = ctxResult.getFirst();
Result result = (Result) ctxResult.getSecond();

setContextRedirectUrl(ctx);

// Handle OIDC authentication errors.
if (OidcResponseErrorHandler.isError(ctx)) {
return OidcResponseErrorHandler.handleError(ctx);
Expand Down Expand Up @@ -192,6 +190,9 @@ private Pair<CallContext, Object> superPerform(
}
}

// Set the redirect url from cookie before creating action
setContextRedirectUrl(ctx);

action = this.redirectToOriginallyRequestedUrl(ctx, defaultUrl);
}
} catch (RuntimeException var20) {
Expand Down
7 changes: 7 additions & 0 deletions datahub-frontend/play.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,13 @@ dependencies {
implementation externalDependency.antlr4Runtime
implementation externalDependency.antlr4
implementation externalDependency.akkaHttp
implementation externalDependency.akkaActor
implementation externalDependency.akkaStream
implementation externalDependency.akkaActorTyped
implementation externalDependency.akkaSlf4j
implementation externalDependency.akkaJackson
implementation externalDependency.akkaParsing
implementation externalDependency.akkaProtobuf

implementation externalDependency.jerseyCore
implementation externalDependency.jerseyGuava
Expand Down
1 change: 1 addition & 0 deletions datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ plugins {
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
}

apply from: '../gradle/coverage/java-coverage.gradle'

dependencies {
implementation project(':metadata-service:restli-client-api')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import com.linkedin.datahub.graphql.generated.Domain;
import com.linkedin.datahub.graphql.generated.ERModelRelationship;
import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
Expand Down Expand Up @@ -312,6 +313,7 @@
import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResolvedActorResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver;
Expand Down Expand Up @@ -1730,12 +1732,22 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
.type(
"InstitutionalMemoryMetadata",
typeWiring ->
typeWiring.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())))
typeWiring
.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource())
.getAuthor()
.getUrn()))
.dataFetcher(
"actor",
new EntityTypeResolver(
this.entityTypes,
(env) ->
(Entity)
((InstitutionalMemoryMetadata) env.getSource()).getActor())))
.type(
"DatasetStatsSummary",
typeWiring ->
Expand Down Expand Up @@ -2242,6 +2254,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) {
"HyperParameterValueType",
typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver()))
.type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver()))
.type("ResolvedActor", typeWiring -> typeWiring.typeResolver(new ResolvedActorResolver()))
.type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver()))
.type(
"TimeSeriesAspect",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,21 @@ private TimeSeriesChart getActiveUsersTimeSeriesChart(
final DateTime end,
final String title,
final DateInterval interval) {
final DateRange dateRange =
new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis()));

final DateRange dateRange;

// adjust month to show 1st of month rather than last day of previous month
if (interval == DateInterval.MONTH) {
dateRange =
new DateRange(
String.valueOf(beginning.plusDays(1).getMillis()), // Shift start by 1 day
String.valueOf(end.plusDays(1).getMillis()) // Shift end by 1 day
);
} else {
// week display starting Sundays
dateRange =
new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis()));
}

final List<NamedLine> timeSeriesLines =
_analyticsService.getTimeseriesChart(
Expand All @@ -96,6 +109,7 @@ private TimeSeriesChart getActiveUsersTimeSeriesChart(
ImmutableMap.of(),
Collections.emptyMap(),
Optional.of("browserId"));

return TimeSeriesChart.builder()
.setTitle(title)
.setDateRange(dateRange)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
package com.linkedin.datahub.graphql.analytics.service;

import static com.linkedin.metadata.Constants.CORP_USER_EDITABLE_INFO_ASPECT_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_INFO_ASPECT_NAME;

import com.google.common.collect.ImmutableSet;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.dashboard.DashboardInfo;
import com.linkedin.datahub.graphql.generated.BarSegment;
import com.linkedin.datahub.graphql.generated.Cell;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityProfileParams;
import com.linkedin.datahub.graphql.generated.LinkParams;
import com.linkedin.datahub.graphql.generated.NamedBar;
import com.linkedin.datahub.graphql.generated.Row;
import com.linkedin.datahub.graphql.generated.SearchParams;
import com.linkedin.datahub.graphql.generated.*;
import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper;
import com.linkedin.dataplatform.DataPlatformInfo;
import com.linkedin.dataset.DatasetProperties;
Expand All @@ -22,6 +17,7 @@
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.glossary.GlossaryTermInfo;
import com.linkedin.identity.CorpUserEditableInfo;
import com.linkedin.identity.CorpUserInfo;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.key.GlossaryTermKey;
Expand All @@ -35,6 +31,7 @@
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
Expand Down Expand Up @@ -169,36 +166,79 @@ public static void convertToUserInfoRows(
final Map<Urn, EntityResponse> gmsResponseByUser =
entityClient.batchGetV2(
opContext,
CORP_USER_INFO_ASPECT_NAME,
CORP_USER_ENTITY_NAME,
userUrns,
ImmutableSet.of(CORP_USER_INFO_ASPECT_NAME));
final Map<Urn, CorpUserInfo> urnToCorpUserInfo =
ImmutableSet.of(CORP_USER_INFO_ASPECT_NAME, CORP_USER_EDITABLE_INFO_ASPECT_NAME));
final Stream<Map.Entry<Urn, EntityResponse>> entityStream =
gmsResponseByUser.entrySet().stream()
.filter(
entry ->
entry.getValue() != null
&& entry.getValue().getAspects().containsKey(CORP_USER_INFO_ASPECT_NAME))
.collect(
Collectors.toMap(
Map.Entry::getKey,
entry ->
&& (entry.getValue().getAspects().containsKey(CORP_USER_INFO_ASPECT_NAME)
|| entry
.getValue()
.getAspects()
.containsKey(CORP_USER_EDITABLE_INFO_ASPECT_NAME)));
final Map<Urn, Pair<CorpUserInfo, CorpUserEditableInfo>> urnToCorpUserInfo =
entityStream.collect(
Collectors.toMap(
Map.Entry::getKey,
entry -> {
CorpUserInfo userInfo = null;
CorpUserEditableInfo editableInfo = null;
try {
userInfo =
new CorpUserInfo(
entry
.getValue()
.getAspects()
.get(CORP_USER_INFO_ASPECT_NAME)
.getValue()
.data())));
.data());
} catch (Exception e) {
// nothing to do
}
try {

editableInfo =
new CorpUserEditableInfo(
entry
.getValue()
.getAspects()
.get(CORP_USER_EDITABLE_INFO_ASPECT_NAME)
.getValue()
.data());
} catch (Exception e) {
// nothing to do
}

return Pair.of(userInfo, editableInfo);
}));
// Populate a row with the user link, title, and email.
rows.forEach(
row -> {
Urn urn = UrnUtils.getUrn(row.getCells().get(0).getValue());
EntityResponse response = gmsResponseByUser.get(urn);
String maybeDisplayName = response != null ? getUserName(response).orElse(null) : null;
String maybeEmail =
urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getEmail() : null;
String maybeTitle =
urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getTitle() : null;
String maybeEmail = null;
String maybeTitle = null;
if (urnToCorpUserInfo.containsKey(urn)) {
Pair<CorpUserInfo, CorpUserEditableInfo> pair = urnToCorpUserInfo.get(urn);
if (pair.getLeft() != null) {
CorpUserInfo userInfo = pair.getLeft();
maybeEmail = userInfo.getEmail();
maybeTitle = userInfo.getTitle();
}
if (pair.getRight() != null) {
CorpUserEditableInfo userInfo = pair.getRight();
if (maybeEmail == null) {
maybeEmail = userInfo.getEmail();
}
if (maybeTitle == null) {
maybeTitle = userInfo.getTitle();
}
}
}
if (maybeDisplayName != null) {
row.getCells().get(0).setValue(maybeDisplayName);
}
Expand Down
Loading

0 comments on commit 17c01b4

Please sign in to comment.