diff --git a/kafka/fixtures/kafka_consumer.sql b/kafka/fixtures/kafka_consumer.sql index 44d7246..09e13b1 100644 --- a/kafka/fixtures/kafka_consumer.sql +++ b/kafka/fixtures/kafka_consumer.sql @@ -60,12 +60,10 @@ CREATE TABLE click_analytics ( `device` LowCardinality(String), `country` LowCardinality(String), `region` String, - `city` String, - `timestamp` Date DEFAULT toDate(now()) + `city` String ) ENGINE = MergeTree() -PARTITION BY toYYYYMM(timestamp) -ORDER BY (code, timestamp, browser, os, device, country); +ORDER BY (code, browser, os, device, country); -- Create a Materialized View named 'click_analytics_consumer' to transfer data from the Kafka table to the ClickHouse table -- The view selects all columns from 'eurl_kafka' and inserts them into the 'click_analytics' table diff --git a/lib/services/redisPublicGenerate.ts b/lib/services/redisPublicGenerate.ts index 2b1b956..0f0bb69 100644 --- a/lib/services/redisPublicGenerate.ts +++ b/lib/services/redisPublicGenerate.ts @@ -48,24 +48,8 @@ const checkIfShortCodePublic = (shortCode: string): boolean => { const publishUserAgent = async (req: NextRequest, code: string) => { const userAgent = userAgentAnlytics(req); - const tmp_test_data = { -city : req.geo!.city, -country : req.geo!.country, -region : req.geo!.region, -lat : req.geo!.latitude, -long :req.geo!.longitude - } - - await pubSubRedis.lpush( - "user_location", - JSON.stringify({ - code, - ...tmp_test_data, - }) - ); - - await pubSubRedis.lpush( - "user_analytics", + await pubSubRedis.publish( + "user_anlytics", JSON.stringify({ code, ...userAgent,