Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
-- Credit Limit Alerts by Organization (80%+)
-- Uses backend_griffin_market tables with shared/non-shared pool logic
-- See shared/enterprise-token-pools.md for complete documentation

-- Shared pool orgs (most enterprise)
with shared_pool as (
select
p.ltid as group_id,
p.organization_id as org,
'shared' as pool_type,
sum(p.quantity) as total_purchased,
sum(case when c.quantity is not null then c.quantity else 0 end) as total_consumed,
count(distinct c.consumed_by_ltid) as users_at_risk,
timestamp_millis(max(p.created_at_ms)) as last_purchase_ts
from `ltx-dwh-prod-processed`.base.backend_griffin_market_purchased_consumables p
left join `ltx-dwh-prod-processed`.base.backend_griffin_market_consumed_consumables c
on p.consumable_id = c.consumable_id
where p.is_shared_token_pool = true
and p.revoked_at_ms is null
group by 1, 2
),

-- Non-shared pool users (McCann_NY, Cylndr SSO, code users with orgs)
non_shared_pool as (
select
p.ltid as group_id,
p.organization_id as org,
'non_shared' as pool_type,
sum(p.quantity) as total_purchased,
sum(case when c.quantity is not null then c.quantity else 0 end) as total_consumed,
1 as users_at_risk,
timestamp_millis(max(p.created_at_ms)) as last_purchase_ts
from `ltx-dwh-prod-processed`.base.backend_griffin_market_purchased_consumables p
left join `ltx-dwh-prod-processed`.base.backend_griffin_market_consumed_consumables c
on p.consumable_id = c.consumable_id
where p.is_shared_token_pool = false
and p.revoked_at_ms is null
and p.organization_id is not null -- Only enterprise users, exclude self-serve code users
group by 1, 2
),

-- Combine both pool types
combined_pools as (
select * from shared_pool
union all
select * from non_shared_pool
),

-- Add enterprise classification and calculate metrics
enterprise_orgs as (
select
*,
case
-- McCann split
when org = 'McCann_NY' then 'McCann_NY'
when org like '%McCann%' then 'McCann_Paris'

-- Contracted enterprise
when org in (
'Indegene',
'HearWell_BeWell',
'Novig',
'Cylndr Studios',
'Miroma',
'Deriv',
'McCann_Paris'
) then 'Enterprise'

-- Enterprise pilot (all others)
else 'Enterprise Pilot'
end as ent_type,

-- Calculate metrics
total_purchased - total_consumed as current_balance,
safe_divide(total_consumed, total_purchased) * 100 as pct_used
from combined_pools
where org not in ('Lightricks', 'Popular Pays', 'None')
),

-- Filter for 80%+ usage
alerts as (
select
ent_type,
org,
pool_type,
users_at_risk,
total_purchased,
current_balance,
total_consumed,
round(pct_used, 1) as avg_pct_used,
last_purchase_ts
from enterprise_orgs
where pct_used >= 80 -- At or above 80% usage
),

-- Aggregate summary
org_summary as (
select
ent_type,
org as enterprise_org,
max(pool_type) as pool_type,
sum(users_at_risk) as users_at_risk,
sum(total_purchased) as org_total_purchased,
sum(current_balance) as org_current_balance,
sum(total_consumed) as org_consumed,
round(safe_divide(sum(total_consumed), sum(total_purchased)) * 100, 1) as org_avg_pct_used
from alerts
group by ent_type, enterprise_org
)

select
date_sub(current_date(), interval 1 day) as report_date,

-- Summary counts
countif(ent_type = 'Enterprise') as total_enterprise_orgs_near_limit,
countif(ent_type = 'Enterprise Pilot') as total_enterprise_pilot_orgs_near_limit,

-- Enterprise orgs list with details
array_agg(
if(ent_type = 'Enterprise',
struct(
enterprise_org as org,
pool_type,
users_at_risk,
org_total_purchased as total_purchased,
org_current_balance as current_balance,
org_consumed as consumed,
org_avg_pct_used as avg_pct_used
),
null)
ignore nulls
order by org_avg_pct_used desc
) as enterprise_orgs_at_risk,

-- Enterprise Pilot orgs list with details
array_agg(
if(ent_type = 'Enterprise Pilot',
struct(
enterprise_org as org,
pool_type,
users_at_risk,
org_total_purchased as total_purchased,
org_current_balance as current_balance,
org_consumed as consumed,
org_avg_pct_used as avg_pct_used
),
null)
ignore nulls
order by org_avg_pct_used desc
) as enterprise_pilot_orgs_at_risk

from org_summary
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
-- Credit Limit Alerts by Organization (80%+) - Active in Last Week
-- Uses backend_griffin_market tables with shared/non-shared pool logic
-- See shared/enterprise-token-pools.md for complete documentation

-- Shared pool orgs (most enterprise)
with shared_pool as (
select
p.ltid as group_id,
p.organization_id as org,
'shared' as pool_type,
sum(p.quantity) as total_purchased,
sum(case when c.quantity is not null then c.quantity else 0 end) as total_consumed,
count(distinct c.consumed_by_ltid) as users_at_risk,
timestamp_millis(max(p.created_at_ms)) as last_purchase_ts
from `ltx-dwh-prod-processed`.base.backend_griffin_market_purchased_consumables p
left join `ltx-dwh-prod-processed`.base.backend_griffin_market_consumed_consumables c
on p.consumable_id = c.consumable_id
where p.is_shared_token_pool = true
and p.revoked_at_ms is null
group by 1, 2
),

-- Non-shared pool users (McCann_NY, Cylndr SSO, code users with orgs)
non_shared_pool as (
select
p.ltid as group_id,
p.organization_id as org,
'non_shared' as pool_type,
sum(p.quantity) as total_purchased,
sum(case when c.quantity is not null then c.quantity else 0 end) as total_consumed,
1 as users_at_risk,
timestamp_millis(max(p.created_at_ms)) as last_purchase_ts
from `ltx-dwh-prod-processed`.base.backend_griffin_market_purchased_consumables p
left join `ltx-dwh-prod-processed`.base.backend_griffin_market_consumed_consumables c
on p.consumable_id = c.consumable_id
where p.is_shared_token_pool = false
and p.revoked_at_ms is null
and p.organization_id is not null -- Only enterprise users, exclude self-serve code users
group by 1, 2
),

-- Combine both pool types
combined_pools as (
select * from shared_pool
union all
select * from non_shared_pool
),

-- Add enterprise classification and calculate metrics
enterprise_orgs as (
select
*,
case
-- McCann split
when org = 'McCann_NY' then 'McCann_NY'
when org like '%McCann%' then 'McCann_Paris'

-- Contracted enterprise
when org in (
'Indegene',
'HearWell_BeWell',
'Novig',
'Cylndr Studios',
'Miroma',
'Deriv',
'McCann_Paris'
) then 'Enterprise'

-- Enterprise pilot (all others)
else 'Enterprise Pilot'
end as ent_type,

-- Calculate metrics
total_purchased - total_consumed as current_balance,
safe_divide(total_consumed, total_purchased) * 100 as pct_used
from combined_pools
where org not in ('Lightricks', 'Popular Pays', 'None')
),

-- Filter for 80%+ usage
alerts as (
select
ent_type,
org,
pool_type,
users_at_risk,
total_purchased,
current_balance,
total_consumed,
round(pct_used, 1) as avg_pct_used,
last_purchase_ts
from enterprise_orgs
where pct_used >= 80 -- At or above 80% usage
),

-- Build enterprise user list for activity filter
ent_users_list as (
select distinct
lt_id,
case
when coalesce(enterprise_name_at_purchase, current_enterprise_name, organization_name) = 'McCann_NY' then 'McCann_NY'
when coalesce(enterprise_name_at_purchase, current_enterprise_name, organization_name) like '%McCann%' then 'McCann_Paris'
else coalesce(enterprise_name_at_purchase, current_enterprise_name, organization_name)
end as org
from `ltx-dwh-prod-processed`.web.ltxstudio_users
where coalesce(enterprise_name_at_purchase, current_enterprise_name, organization_name) not in ('Lightricks', 'Popular Pays', 'None')
),

-- Orgs with generation activity in last 7 days
active_orgs_last_week as (
select distinct
e.org
from `ltx-dwh-prod-processed`.web.ltxstudio_user_all_actions a
inner join ent_users_list e on a.lt_id = e.lt_id
where date(a.action_ts) >= date_sub(current_date(), interval 7 day)
and a.action_category = 'generations'
),

-- Aggregate summary (only active orgs)
org_summary as (
select
a.ent_type,
a.org as enterprise_org,
max(a.pool_type) as pool_type,
sum(a.users_at_risk) as users_at_risk,
sum(a.total_purchased) as org_total_purchased,
sum(a.current_balance) as org_current_balance,
sum(a.total_consumed) as org_consumed,
round(safe_divide(sum(a.total_consumed), sum(a.total_purchased)) * 100, 1) as org_avg_pct_used
from alerts a
inner join active_orgs_last_week aw
on a.org = aw.org
group by a.ent_type, a.org
)

select
date_sub(current_date(), interval 1 day) as report_date,

-- Summary counts
countif(ent_type = 'Enterprise') as total_enterprise_orgs_near_limit,
countif(ent_type = 'Enterprise Pilot') as total_enterprise_pilot_orgs_near_limit,

-- Enterprise orgs list with details
array_agg(
if(ent_type = 'Enterprise',
struct(
enterprise_org as org,
pool_type,
users_at_risk,
org_total_purchased as total_purchased,
org_current_balance as current_balance,
org_consumed as consumed,
org_avg_pct_used as avg_pct_used
),
null)
ignore nulls
order by org_avg_pct_used desc
) as enterprise_orgs_at_risk,

-- Enterprise Pilot orgs list with details
array_agg(
if(ent_type = 'Enterprise Pilot',
struct(
enterprise_org as org,
pool_type,
users_at_risk,
org_total_purchased as total_purchased,
org_current_balance as current_balance,
org_consumed as consumed,
org_avg_pct_used as avg_pct_used
),
null)
ignore nulls
order by org_avg_pct_used desc
) as enterprise_pilot_orgs_at_risk

from org_summary
1 change: 1 addition & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ Every agent reads from these files. They are the single source of truth — do n
| `shared/bq-schema.md` | BQ tables, columns, joins, segmentation queries | Writing ANY SQL |
| `shared/event-registry.yaml` | Known events per feature, types, status | Referencing ANY event |
| `shared/metric-standards.md` | How every metric is calculated (with SQL) | Defining ANY metric |
| `shared/enterprise-token-pools.md` | Token pool logic, shared vs non-shared, credit limits | Enterprise token/billing analysis |
| `shared/gpu-cost-query-templates.md` | 11 GPU cost queries (DoD, WoW, anomaly detection, breakdowns) | Analyzing GPU/infrastructure costs |
| `shared/gpu-cost-analysis-patterns.md` | Cost analysis workflows, benchmarks, investigation playbooks | Interpreting GPU cost data |

Expand Down
Loading