Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 2 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ SYSTEM_STORAGE_DATABASE_SLAVE_CONNECT=${SYSTEM_STORAGE_DATABASE_CONNECT}
SYSTEM_STORAGE_COLLECTION_CACHE_INTERVAL=30s
SYSTEM_STORAGE_SOURCE_UPDATE_TTL=60s

SYSTEM_STATISTIC_CONNECT=clickhouse://clickhouse-server:9000/stats?debug=false

MESSANGER_EMAIL_MAILER=smtp
MESSANGER_EMAIL_URL=smtp.gmail.com
MESSANGER_EMAIL_PORT=587
Expand Down
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
[submodule "submodules/blaze-api"]
path = submodules/blaze-api
url = https://github.com/geniusrabbit/blaze-api
[submodule "submodules/archivarius"]
path = submodules/archivarius
url = https://github.com/geniusrabbit/archivarius
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ include .env
export

export GOPRIVATE="github.com/geniusrabbit/*"
APP_BUILD_TAGS ?= postgres,migrate,redis,jaeger
APP_BUILD_TAGS ?= postgres,clickhouse,migrate,redis,jaeger
# doctl kubernetes cluster kubeconfig save use_your_cluster_name

include deploy/build.mk
Expand Down Expand Up @@ -70,6 +70,7 @@ build-docker-dev: build ## Build docker image for development
.PHONY: run
run: build-docker-dev ## Run API service by docker-compose
@echo "Run API service http://localhost:${DOCKER_SERVER_HTTP_PORT}"
$(DOCKER_COMPOSE) up api

.PHONY: stop
stop: ## Stop all services
Expand Down
20 changes: 7 additions & 13 deletions cmd/api/appcontext/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@ type storageConfig struct {
SlaveConnect string `json:"slave_connect" yaml:"slave_connect" env:"SYSTEM_STORAGE_DATABASE_SLAVE_CONNECT"`
}

// StatisticConfig contains statistic configuration options
type statisticConfig struct {
Connect string `json:"connect" yaml:"connect" env:"SYSTEM_STATISTIC_CONNECT"`
}

type socialAuthProviderEndpoint struct {
AuthURL string `json:"auth_url" yaml:"auth_url" env:"AUTH_URL"`
DeviceAuthURL string `json:"device_auth_url" yaml:"device_auth_url" env:"DEVICE_AUTH_URL"`
Expand Down Expand Up @@ -161,17 +166,8 @@ type permissionConfig struct {
}

type systemConfig struct {
Storage storageConfig `json:"storage" yaml:"storage"`
}

// BillingConfig contains billing configuration options
type billingConfig struct {
Connect string `json:"connect" yaml:"connect" env:"BILLING_CONNECT"`
}

// StatisticConfig contains statistic configuration options
type statisticConfig struct {
Connect string `json:"connect" yaml:"connect" env:"STATISTIC_CONNECT"`
Storage storageConfig `json:"storage" yaml:"storage"`
Statistic statisticConfig `json:"statistic" yaml:"statistic"`
}

// MessangerConfig contains email configuration options for messanger
Expand Down Expand Up @@ -213,8 +209,6 @@ type ConfigType struct {
OAuth2 oauth2Config `json:"oauth2" yaml:"oauth2"`
Messanger messangerConfig `json:"messanger" yaml:"messanger"`
Permissions permissionConfig `json:"permissions" yaml:"permissions"`
Billing billingConfig `json:"billing" yaml:"billing"`
Statistic statisticConfig `json:"statistic" yaml:"statistic"`
}

// String implementation of Stringer interface
Expand Down
21 changes: 5 additions & 16 deletions cmd/api/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,12 @@ import (
"github.com/geniusrabbit/blaze-api/repository/historylog/middleware/gormlog"
"github.com/geniusrabbit/blaze-api/repository/socialauth/delivery/rest"

statsClient "github.com/geniusrabbit/archivarius/client"
billingClient "github.com/geniusrabbit/billing/client"

"github.com/sspserver/api/cmd/api/appcontext"
"github.com/sspserver/api/cmd/api/appinit"
"github.com/sspserver/api/cmd/api/server"
rtbsourceuc "github.com/sspserver/api/internal/repository/rtbsource/usecase"
"github.com/sspserver/api/internal/repository/statistic"
statisticrc "github.com/sspserver/api/internal/repository/statistic/repository"
statisticuc "github.com/sspserver/api/internal/repository/statistic/usecase"
"github.com/sspserver/api/internal/server/graphql"
"github.com/sspserver/api/internal/server/graphql/resolvers"
"github.com/sspserver/api/private/emails"
Expand Down Expand Up @@ -179,23 +177,14 @@ func main() {

messangerWrap := messangerWrapper(messangerObj)

// Establish connection to Billing
billingCl, err := billingClient.ConnectAPI(ctx, conf.Billing.Connect)
fatalError(err, "connect to billing")
defer func() {
if err := billingCl.Close(); err != nil {
loggerObj.Error("Close billing connection", zap.Error(err))
}
}()

// Establish connection to Statistic
statisticCl, err := statsClient.ConnectAPI(ctx, conf.Statistic.Connect)
statDatabase, err := database.Connect(ctx, conf.System.Statistic.Connect, conf.IsDebug())
fatalError(err, "connect to statistic")

rtbSourceUsecase := rtbsourceuc.New()

statisticUsecase := statistic.NewUsecase(
statistic.NewRepository(statisticCl))
statisticUsecase := statisticuc.NewUsecase(
statisticrc.NewRepository(statDatabase))

// Prepare context
ctx = ctxlogger.WithLogger(ctx, loggerObj)
Expand Down
29 changes: 17 additions & 12 deletions deploy/develop/clickhouse/migrations.sh
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
#!/bin/bash
FILE=/state/clickhouse-migrations
if [[ -e $FILE ]]; then
echo 'Already applied!'
else
for file in archivarius_migrations/*.up.sql; do
if [ -n "$file" ] && [ -e "$file" ]; then
echo "$file"
fi
clickhouse-client --host clickhouse-server --queries-file $file
done
fi

touch /state/clickhouse-migrations
apply_migrations() {
FILE=/state/clickhouse-$2
if [[ -e $FILE ]]; then
echo 'Already applied!'
else
for file in "$1/*.up.sql"; do
if [ -n "$file" ] && [ -e "$file" ]; then
echo "$file"
fi
clickhouse-client --host clickhouse-server --queries-file $file
done
fi
touch /state/clickhouse-$2
}

apply_migrations /migrations migrations
apply_migrations /migrations-gen migrations-gen
3 changes: 2 additions & 1 deletion deploy/develop/docker-compose-common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ services:
condition: service_started
volumes:
- "./clickhouse/migrations.sh:/migrations.sh:ro"
- "../migrations/stats:/archivarius_migrations:ro"
- "../migrations/stats:/migrations:ro"
- "../migrations/stats-gen:/migrations-gen:ro"
- "state:/state"

database:
Expand Down
166 changes: 166 additions & 0 deletions deploy/migrations/stats-gen/gen.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import random
import uuid
from datetime import datetime, timedelta


def random_ipv6():
return ":".join(format(random.randint(0, 0xffff), '04x') for _ in range(8))

def sql_str(val):
return "'" + str(val) + "'"

def random_category_ids():
cnt = random.randint(1, 3)
arr = [random.randint(1, 100) for _ in range(cnt)]
return "[" + ",".join(str(x) for x in arr) + "]"

num_rows = 20000
# Данные за предыдущие 3 месяца до сегодня
start_date = datetime.now() - timedelta(days=90)

for _ in range(num_rows):
random_seconds = random.randint(0, 90 * 24 * 3600)
timemark_dt = start_date + timedelta(seconds=random_seconds)
timemark = timemark_dt.strftime('%Y-%m-%d %H:%M:%S')
datehourmark = timemark_dt.replace(minute=0, second=0, microsecond=0).strftime('%Y-%m-%d %H:%M:%S')
datemark = timemark_dt.strftime('%Y-%m-%d')

sign = 1
delay = random.randint(0, 1000000)
duration = random.randint(0, 1000000)
event_val = random.choice(["impression", "click", "view", "direct", "src.win", "src.bid", "src.skip", "src.nobid", "src.fail"])
status = random.randint(0, 5)
auc_id = f"UUIDStringToNum('{uuid.uuid4()}')"
auc_type = random.choice([0, 1, 2, 3])
imp_id = f"UUIDStringToNum('{uuid.uuid4()}')"
impad_id = f"UUIDStringToNum('{uuid.uuid4()}')"
extauc_id = "extauc_" + str(random.randint(1, 1000))
extimp_id = "extimp_" + str(random.randint(1, 1000))
source_id = random.randint(1, 10)
network = "network_" + str(random.randint(1, 10))
platform_type = random.choice([0, 1, 2, 3, 4])
domain = "domain_" + str(random.randint(1, 10))
app_id = random.randint(1, 10)
zone_id = random.randint(1, 10)
format_id = random.randint(1, 10)
ad_w = random.randint(100, 1920)
ad_h = random.randint(100, 1080)
src_url = "http://src.url/" + str(random.randint(1, 1000))
win_url = "http://win.url/" + str(random.randint(1, 1000))
url_val = "http://target.url/" + str(random.randint(1, 1000))
pricing_model = random.randint(0, 3)
purchase_view_price = random.randint(1, 100) * 1000000000
purchase_click_price = random.randint(1, 100) * 1000000000
potential_view_price = random.randint(1, 100) * 1000000000
potential_click_price = random.randint(1, 100) * 1000000000
view_price = random.randint(1, 100) * 1000000000
click_price = random.randint(1, 100) * 1000000000
ud_id = "ud_" + str(random.randint(1, 1000))
uu_id = f"UUIDStringToNum('{uuid.uuid4()}')"
sess_id = f"UUIDStringToNum('{uuid.uuid4()}')"
fingerprint = "fp_" + str(random.randint(1, 1000))
etag = "etag_" + str(random.randint(1, 1000))
carrier_id = random.randint(1, 10)
country = random.choice(["RU", "US", "GB", "CN", "DE"])
latitude = round(random.uniform(-90, 90), 6)
longitude = round(random.uniform(-180, 180), 6)
language = "en_US"
ip_val = random_ipv6()
ref_val = "http://ref.url/" + str(random.randint(1, 1000))
page_url = "http://page.url/" + str(random.randint(1, 1000))
ua = "Mozilla/5.0"
device_id = random.randint(1, 10)
device_type = random.randint(0, 5)
os_id = random.randint(1, 10)
browser_id = random.randint(1, 10)
category_ids = random_category_ids()
adblock = random.randint(0, 1)
private_val = random.randint(0, 1)
robot = random.randint(0, 1)
proxy = random.randint(0, 1)
backup = random.randint(0, 1)
x_val = random.randint(0, 1920)
y_val = random.randint(0, 1080)
w_val = random.randint(100, 1920)
h_val = random.randint(100, 1080)
subid1 = "subid1_" + str(random.randint(1, 1000))
subid2 = "subid2_" + str(random.randint(1, 1000))
subid3 = "subid3_" + str(random.randint(1, 1000))
subid4 = "subid4_" + str(random.randint(1, 1000))
subid5 = "subid5_" + str(random.randint(1, 1000))
created_at = timemark

values = [
sign,
sql_str(timemark),
sql_str(datehourmark),
sql_str(datemark),
delay,
duration,
sql_str(event_val),
status,
auc_id,
auc_type,
imp_id,
impad_id,
sql_str(extauc_id),
sql_str(extimp_id),
source_id,
sql_str(network),
platform_type,
sql_str(domain),
app_id,
zone_id,
format_id,
ad_w,
ad_h,
sql_str(src_url),
sql_str(win_url),
sql_str(url_val),
pricing_model,
purchase_view_price,
purchase_click_price,
potential_view_price,
potential_click_price,
view_price,
click_price,
sql_str(ud_id),
uu_id,
sess_id,
sql_str(fingerprint),
sql_str(etag),
carrier_id,
sql_str(country),
latitude,
longitude,
sql_str(language),
sql_str(ip_val),
sql_str(ref_val),
sql_str(page_url),
sql_str(ua),
device_id,
device_type,
os_id,
browser_id,
category_ids,
adblock,
private_val,
robot,
proxy,
backup,
x_val,
y_val,
w_val,
h_val,
sql_str(subid1),
sql_str(subid2),
sql_str(subid3),
sql_str(subid4),
sql_str(subid5),
sql_str(created_at)
]

values_str = ", ".join(str(v) for v in values)
print(f"INSERT INTO stats.events_local VALUES ({values_str});")
Loading
Loading