Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ This project provides a scalable API backend using FastAPI and PostgreSQL, featu
### 1. Clone & Setup Environment

```bash
git clone <repo-url>
git clone https://github.com/goldlabelapps/python.git
cd python
cp .env.sample .env # Add your Postgres credentials and settings
python -m venv venv
Expand Down
2 changes: 1 addition & 1 deletion app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Python - FastAPI, Postgres, tsvector"""

# Current Version
__version__ = "2.2.0"
__version__ = "2.2.1"
4 changes: 4 additions & 0 deletions app/api/orders/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""Prospect Routes"""

from .orders import router as orders_router
# from .flag import router as flag_router
72 changes: 72 additions & 0 deletions app/api/orders/orders.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from app import __version__
import os
from app.utils.make_meta import make_meta
from fastapi import APIRouter, Query, Path, Body, HTTPException
from app.utils.db import get_db_connection

router = APIRouter()
base_url = os.getenv("BASE_URL", "http://localhost:8000")




# Refactored GET /orders endpoint to return paginated, filtered, and ordered results
@router.get("/orders")
def get_orders(
page: int = Query(1, ge=1, description="Page number (1-based)"),
limit: int = Query(100, ge=1, le=500, description="Records per page (default 100, max 500)"),
search: str = Query(None, description="Search string (case-insensitive, partial match)"),
hideflagged: bool = Query(False, description="If true, flagged records are excluded")
) -> dict:
"""Return paginated, filtered, and ordered records, filtered by search if provided."""
meta = make_meta("success", "Read paginated orders")
conn_gen = get_db_connection()
conn = next(conn_gen)
cur = conn.cursor()
offset = (page - 1) * limit
try:
# Build WHERE clause
where_clauses = ["hide IS NOT TRUE"]
params = []
if hideflagged:
where_clauses.append("flag IS NOT TRUE")
# No first_name/last_name search, as those columns do not exist
where_sql = " AND ".join(where_clauses)

# Count query
count_query = f'SELECT COUNT(*) FROM orders WHERE {where_sql};'
cur.execute(count_query, params)
count_row = cur.fetchone() if cur.description is not None else None
total = count_row[0] if count_row is not None else 0

# Data query
data_query = f'''
SELECT * FROM orders
WHERE {where_sql}
OFFSET %s LIMIT %s;
'''
cur.execute(data_query, params + [offset, limit])
if cur.description is not None:
columns = [desc[0] for desc in cur.description]
rows = cur.fetchall()
data = [dict(zip(columns, row)) for row in rows]
else:
data = []
except Exception as e:
data = []
total = 0
meta = make_meta("error", f"Failed to read orders: {str(e)}")
finally:
cur.close()
conn.close()
return {
"meta": meta,
"pagination": {
"page": page,
"limit": limit,
"total": total,
"pages": (total // limit) + (1 if total % limit else 0)
},
"data": data,
}

21 changes: 21 additions & 0 deletions app/api/orders/sql/check_orders_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
"""
Script to check the number of rows in the orders table and print a sample row.
"""
from app.utils.db import get_db_connection_direct

def check_orders_table():
conn = get_db_connection_direct()
with conn:
with conn.cursor() as cur:
cur.execute("SELECT COUNT(*) FROM orders;")
count = cur.fetchone()[0]
print(f"orders table row count: {count}")
if count > 0:
cur.execute("SELECT * FROM orders LIMIT 1;")
row = cur.fetchone()
print("Sample row:")
print(row)
print("Check complete.")

if __name__ == "__main__":
check_orders_table()
90 changes: 90 additions & 0 deletions app/api/orders/sql/import_magento_products_to_orders.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
"""
Script to import data from magento_products.csv into the orders table.
"""
import csv
from datetime import datetime
from app.utils.db import get_db_connection_direct

CSV_PATH = "app/static/csv/magento_products.csv"

# List of columns in the orders table (must match the table definition)
ORDERS_COLUMNS = [
"sku","store_view_code","attribute_set_code","product_type","categories","product_websites","name","description","short_description","weight","product_online","tax_class_name","visibility","price","special_price","special_price_from_date","special_price_to_date","url_key","meta_title","meta_keywords","meta_description","base_image","base_image_label","small_image","small_image_label","thumbnail_image","thumbnail_image_label","swatch_image","swatch_image_label","created_at","updated_at","new_from_date","new_to_date","display_product_options_in","map_price","msrp_price","map_enabled","gift_message_available","custom_design","custom_design_from","custom_design_to","custom_layout_update","page_layout","product_options_container","msrp_display_actual_price_type","country_of_manufacture","additional_attributes","qty","out_of_stock_qty","use_config_min_qty","is_qty_decimal","allow_backorders","use_config_backorders","min_cart_qty","use_config_min_sale_qty","max_cart_qty","use_config_max_sale_qty","is_in_stock","notify_on_stock_below","use_config_notify_stock_qty","manage_stock","use_config_manage_stock","use_config_qty_increments","qty_increments","use_config_enable_qty_inc","enable_qty_increments","is_decimal_divided","website_id","related_skus","related_position","crosssell_skus","crosssell_position","upsell_skus","upsell_position","additional_images","additional_image_labels","hide_from_product_page","custom_options","bundle_price_type","bundle_sku_type","bundle_price_view","bundle_weight_type","bundle_values","bundle_shipment_type","associated_skus","downloadable_links","downloadable_samples","configurable_variations","configurable_variation_labels","hide","flag"
]


def import_csv_to_orders():
conn = get_db_connection_direct()
inserted = 0
total = 0
with conn:
with conn.cursor() as cur:
print("Clearing orders table before import...")
cur.execute("TRUNCATE TABLE orders;")
print("orders table cleared.")
seen_skus = set()
with open(CSV_PATH, newline='', encoding='utf-8') as csvfile:
reader = csv.DictReader(csvfile)
for idx, row in enumerate(reader):
original_sku = row.get("sku")
new_sku = original_sku
# Ensure SKU is unique in this import batch
suffix = 1
while new_sku in seen_skus:
new_sku = f"{original_sku}_{suffix}"
suffix += 1
if new_sku != original_sku:
print(f"Duplicate SKU found: {original_sku}, changed to {new_sku}")
row["sku"] = new_sku
seen_skus.add(new_sku)
total += 1
# Add hide and flag fields if not present
row.setdefault("hide", False)
row.setdefault("flag", False)
# Convert empty strings to None
values = [row.get(col) if row.get(col) != '' else None for col in ORDERS_COLUMNS]
# Print first row and values for debug
if idx == 0:
print("First CSV row:", row)
print("First values list:", values)
# Convert booleans and numerics as needed
for i, col in enumerate(ORDERS_COLUMNS):
if col in ["hide", "flag", "product_online", "use_config_min_qty", "is_qty_decimal", "allow_backorders", "use_config_backorders", "use_config_min_sale_qty", "use_config_max_sale_qty", "is_in_stock", "notify_on_stock_below", "use_config_notify_stock_qty", "manage_stock", "use_config_manage_stock", "use_config_qty_increments", "use_config_enable_qty_inc", "enable_qty_increments", "is_decimal_divided"]:
if values[i] is not None:
values[i] = str(values[i]).lower() in ("1", "true", "t", "yes")
elif col in ["weight", "price", "special_price", "map_price", "msrp_price", "qty", "out_of_stock_qty", "min_cart_qty", "max_cart_qty", "qty_increments"]:
if values[i] is not None:
try:
values[i] = float(values[i])
except ValueError:
values[i] = None
elif col.endswith("_date") or col.endswith("_at") or col in ["custom_design_from", "custom_design_to"]:
if values[i] is not None:
try:
# Try parsing as date or datetime
values[i] = datetime.strptime(values[i], "%m/%d/%y")
except Exception:
try:
values[i] = datetime.strptime(values[i], "%Y-%m-%d")
except Exception:
values[i] = None
placeholders = ','.join(['%s'] * len(ORDERS_COLUMNS))
sql = f"INSERT INTO orders ({', '.join(ORDERS_COLUMNS)}) VALUES ({placeholders})"
try:
cur.execute(sql, values)
inserted += 1
except Exception as e:
print(f"Error inserting row {idx+1} (sku={row.get('sku')}): {e}")
try:
conn.commit()
print("Database commit successful.")
except Exception as e:
print(f"Error during commit: {e}")
print(f"Total rows processed: {total}")
print(f"Total rows attempted to insert: {inserted}")
if inserted == 0:
print("No rows were inserted. Please check for errors above or review the data and schema alignment.")
print("Data import attempt complete.")

if __name__ == "__main__":
import_csv_to_orders()
Loading
Loading