Skip to content

Commit

Permalink
Merge branch 'main' into COST-4313-subscription-name-null
Browse files Browse the repository at this point in the history
  • Loading branch information
bacciotti committed Jan 15, 2024
2 parents 88f2960 + 03d5006 commit 2891109
Show file tree
Hide file tree
Showing 35 changed files with 949 additions and 284 deletions.
2 changes: 1 addition & 1 deletion .baseimagedigest
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
sha256:87bcbfedfd70e67aab3875fff103bade460aeff510033ebb36b7efa009ab6639
12b3e537205aec0066fc6c0735d15674dc741de7910a5520757762908c98f5fa -
5869bead574dd90c800c62a1aafb8ef0516ed1e8b23874b8ed4c7755968614ee -
2 changes: 1 addition & 1 deletion .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ omit =
*manage.py
*celery.py
*configurator.py
*database.py
database.py
*feature_flags.py
*probe_server.py
*settings.py
Expand Down
19 changes: 13 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ jobs:

- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v34.0.2
uses: tj-actions/changed-files@v41.0.0
with:
files_from_source_file: docker-files.txt

Expand Down Expand Up @@ -120,12 +120,12 @@ jobs:

- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v34.0.2
uses: tj-actions/changed-files@v41.0.0
with:
files: |
db_functions/
koku/
.github/postgres
db_functions/**
koku/**
.github/postgres/**
.github/scripts/check_migrations.sh
.github/workflows/ci.yml
Pipfile.lock
Expand All @@ -134,10 +134,17 @@ jobs:
- name: Check files or fork
id: check-files-or-fork
run: |
if [ ! -z "${{ steps.changed-files.outputs.all_changed_and_modified_files }}" ] || [ "${{ github.event.pull_request.head.repo.full_name }}" != "project-koku/koku" ]; then
if [ "${{ steps.changed-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event.pull_request.head.repo.full_name }}" != "project-koku/koku" ]; then
echo "run_tests=true" >> $GITHUB_OUTPUT
fi
- name: Show changed files
run: |
echo "Changed files:"
for file in ${{ steps.changed-files.outputs.all_changed_and_modified_files }}; do
echo " $file"
done
units:
name: Units - ${{ matrix.python-version }}
needs: changed-files
Expand Down
1 change: 1 addition & 0 deletions .shellcheckrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
disable=SC1090,SC1091,SC2164
6 changes: 3 additions & 3 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

97 changes: 51 additions & 46 deletions dev/scripts/load_test_customer_data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
#

usage() {
log-info "Usage: `basename $0` <command>"
log-info "Usage: $(basename "$0") <command>"
log-info ""
log-info "[source name]:"
log-info "\t AWS build and populate test customer data for AWS"
Expand All @@ -44,8 +44,8 @@ NISE="$(which nise)"
NISE_DATA_PATH="${DEV_SCRIPTS_PATH}/../../testing"

# import common functions
source $DEV_SCRIPTS_PATH/common/logging.sh
source $DEV_SCRIPTS_PATH/common/utils.sh
source "$DEV_SCRIPTS_PATH"/common/logging.sh
source "$DEV_SCRIPTS_PATH"/common/utils.sh

trap handle_errors ERR

Expand All @@ -70,7 +70,8 @@ export PGPASSWORD="${DATABASE_PASSWORD}"
export PGPORT="${POSTGRES_SQL_SERVICE_PORT}"
export PGHOST="${POSTGRES_SQL_SERVICE_HOST}"
export PGUSER="${DATABASE_USER}"
export OS="$(uname)"
OS="$(uname)"
export OS

export S3_ACCESS_KEY="${S3_ACCESS_KEY}"
export S3_SECRET_KEY="${S3_SECRET}"
Expand All @@ -88,9 +89,6 @@ END_DATE=${3:-$(date +'%Y-%m-%d')} # defaults to today
log-debug "START_DATE=${START_DATE}"
log-debug "END_DATE=${END_DATE}"

# this is the default that's in koku.masu.config
DATA_DIR=/var/tmp/masu


check-api-status() {
# API status validation.
Expand All @@ -102,7 +100,7 @@ check-api-status() {
local _status_url=$2

log-info "Checking that $_server_name is up and running..."
CHECK=$(curl --connect-timeout 20 -s -w "%{http_code}\n" -L ${_status_url} -o /dev/null)
CHECK=$(curl --connect-timeout 20 -s -w "%{http_code}\n" -L "${_status_url}" -o /dev/null)
if [[ $CHECK != 200 ]];then
log-err "$_server_name is not available at: $_status_url"
log-err "exiting..."
Expand All @@ -118,16 +116,16 @@ add_cost_models() {
# 1 - api_provider.name; this needs to match the source_name in test_customer.yaml
# 2 - cost model json filename; this needs to be a file in $DEV_SCRIPTS_PATH
#
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
COST_MODEL_JSON=$(cat "$DEV_SCRIPTS_PATH/cost_models/$2" | sed -e "s/PROVIDER_UUID/$UUID/g")
UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
COST_MODEL_JSON=$(< "$DEV_SCRIPTS_PATH/cost_models/$2" sed -e "s/PROVIDER_UUID/$UUID/g")

log-info "creating cost model, source_name: $1, uuid: $UUID"
RESPONSE=$(curl -s -w "%{http_code}\n" \
--header "Content-Type: application/json" \
--request POST \
--data "$COST_MODEL_JSON" \
${KOKU_URL_PREFIX}/v1/cost-models/)
"${KOKU_URL_PREFIX}"/v1/cost-models/)
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

Expand All @@ -137,10 +135,10 @@ add_cost_models() {
if [[ $STATUS_CODE != 201 ]]; then
# logging warning if resource already exists
if [[ $DATA =~ "already associated" && $STATUS_CODE == 400 ]]; then
log-warn $DATA
log-warn "$DATA"
else
log-err "HTTP STATUS: $STATUS"
log-err $DATA
log-err "$DATA"
fi
fi
else
Expand All @@ -155,18 +153,18 @@ trigger_download() {
#
local _download_types=("$@")
for download_type in "${_download_types[@]}"; do
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$download_type'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$download_type'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
log-info "Triggering download for, source_name: $download_type, uuid: $UUID"
RESPONSE=$(curl -s -w "%{http_code}\n" ${MASU_URL_PREFIX}/v1/download/?provider_uuid=$UUID)
RESPONSE=$(curl -s -w "%{http_code}\n" "${MASU_URL_PREFIX}"/v1/download/?provider_uuid="$UUID")
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 200 ]];then
log-err $DATA
log-err "$DATA"
fi

else
Expand All @@ -181,28 +179,31 @@ trigger_ocp_ingest() {
# 1 - the source name. If the source does not exist, ingestion is skipped.
# 2 - payload name to be ingested.
#
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
local formatted_start_date
local formatted_end_date

UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
if [[ $OS = "Darwin" ]]; then
local formatted_start_date=$(date -j -f "%Y-%m-%d" "$START_DATE" +'%Y_%m')
local formatted_end_date=$(date -j -f "%Y-%m-%d" "$END_DATE" +'%Y_%m')
formatted_start_date=$(date -j -f "%Y-%m-%d" "$START_DATE" +'%Y_%m')
formatted_end_date=$(date -j -f "%Y-%m-%d" "$END_DATE" +'%Y_%m')
else
local tmp_start="$START_DATE"
local formatted_start_date=$(date -d "$START_DATE" +'%Y_%m')
local formatted_end_date=$(date -d "$END_DATE" +'%Y_%m')
formatted_start_date=$(date -d "$START_DATE" +'%Y_%m')
formatted_end_date=$(date -d "$END_DATE" +'%Y_%m')
fi
while [ ! "$formatted_start_date" \> "$formatted_end_date" ]; do
local payload_name="$2.$formatted_start_date.tar.gz"
log-info "Triggering ingest for, source_name: $1, uuid: $UUID, payload_name: $payload_name"
RESPONSE=$(curl -s -w "%{http_code}\n" ${MASU_URL_PREFIX}/v1/ingest_ocp_payload/?payload_name=$payload_name)
RESPONSE=$(curl -s -w "%{http_code}\n" "${MASU_URL_PREFIX}"/v1/ingest_ocp_payload/?payload_name="$payload_name")
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 202 ]];then
log-err $DATA
log-err "$DATA"
fi
if [[ $OS = "Darwin" ]]; then
formatted_start_date=$(date -j -v+1m -f "%Y_%m" "$formatted_start_date" +'%Y_%m')
Expand All @@ -221,18 +222,18 @@ render_yaml_files() {
local _yaml_files=("$@")
RENDERED_YAML=()
for fname in "${_yaml_files[@]}"; do
OUT=$(dirname $YAML_PATH/$fname)/rendered_$(basename $YAML_PATH/$fname)
OUT=$(dirname "$YAML_PATH"/"$fname")/rendered_$(basename "$YAML_PATH"/"$fname")
log-debug "rendering ${fname} to ${OUT}"
python $DEV_SCRIPTS_PATH/render_nise_yamls.py -f $YAML_PATH/$fname -o $OUT -s "$START_DATE" -e "$END_DATE"
RENDERED_YAML+="$OUT "
python "$DEV_SCRIPTS_PATH"/render_nise_yamls.py -f "$YAML_PATH/$fname" -o "$OUT" -s "$START_DATE" -e "$END_DATE"
RENDERED_YAML+=("$OUT ")
done
}

cleanup_rendered_files(){
local _yaml_files=("$@")
for fname in ${_yaml_files[@]}; do
for fname in "${_yaml_files[@]}"; do
log-debug "removing ${fname}..."
rm $fname
rm "$fname"
done
}

Expand All @@ -241,22 +242,22 @@ enable_ocp_tags() {
RESPONSE=$(curl -s -w "%{http_code}\n" --header "Content-Type: application/json" \
--request POST \
--data '{"schema": "org1234567","action": "create","tag_keys": ["environment", "app", "version", "storageclass", "application", "instance-type"], "provider_type": "ocp"}' \
${MASU_URL_PREFIX}/v1/enabled_tags/)
"${MASU_URL_PREFIX}"/v1/enabled_tags/)
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 200 ]];then
log-err $DATA
log-err "$DATA"
fi
}

nise_report(){
# wrapper function to run nise cli
log-debug "RUNNING - $NISE report $@"
$NISE report $@
log-debug "RUNNING - $NISE report $*"
$NISE report "$@"
}

# AWS customer data
Expand All @@ -270,7 +271,8 @@ build_aws_data() {

local _download_types=("Test AWS Source")
local _ocp_ingest_name="Test OCP on AWS"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -284,8 +286,8 @@ build_aws_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCP on AWS' openshift_on_aws_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test AWS Source' aws_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCP on AWS' openshift_on_aws_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"
add_cost_models 'Test AWS Source' aws_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -305,7 +307,8 @@ build_azure_data() {

local _download_types=("Test Azure Source" "Test Azure v2 Source")
local _ocp_ingest_name="Test OCP on Azure"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -320,7 +323,7 @@ build_azure_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test Azure Source' azure_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test Azure Source' azure_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -340,7 +343,8 @@ build_gcp_data() {

local _download_types=("Test GCP Source" "Test OCPGCP Source")
local _ocp_ingest_name="Test OCP on GCP"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -355,7 +359,7 @@ build_gcp_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test GCP Source' gcp_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test GCP Source' gcp_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -368,7 +372,8 @@ build_onprem_data() {
local _yaml_files=("ocp/ocp_on_premise.yml")
local _rendered_yaml_files=("$YAML_PATH/ocp/rendered_ocp_on_premise.yml")
local _ocp_ingest_name="Test OCP on Premises"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -381,7 +386,7 @@ build_onprem_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCP on Premises' openshift_on_prem_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCP on Premises' openshift_on_prem_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -407,7 +412,7 @@ build_oci_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCI Source' oci_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCI Source' oci_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -422,7 +427,7 @@ build_all(){
}

# ---execute---
provider_arg=`echo ${1} |tr [a-z] [A-Z]`
provider_arg=$(echo "${1}" | tr '[:lower:]' '[:upper:]')

case ${provider_arg} in
"AWS")
Expand Down
Loading

0 comments on commit 2891109

Please sign in to comment.