Skip to content

Commit 2ea0e49

Browse files
mdesmetclaude
andcommitted
fix: migrate PR #283 — Snowflake column lowercasing, warehouse-advisor SQL fixes, finops e2e tests
- snowflake.ts: normalize column names to lowercase at driver level (rawColumns for value lookup, columns.map(toLowerCase) for keys) - warehouse-advisor: remove warehouse_size from SNOWFLAKE_LOAD_SQL and SNOWFLAKE_SIZING_SQL; source size via SHOW WAREHOUSES with silent fallback; run load+sizing queries in parallel via Promise.all - unused-resources: convert {days}/{limit} string interpolation to parameterized binds (?, ?) for all Snowflake/BigQuery/Databricks queries - tags.ts: remove non-existent 'comment' column from TAGS query - drivers-snowflake-e2e.test.ts: update column assertions to lowercase - Add finops-databricks-e2e.test.ts and finops-snowflake-e2e.test.ts (12/12 and 17/17 pass respectively) - schema-finops-dbt.test.ts: add regression tests for issue #203 Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 30fe57d commit 2ea0e49

File tree

8 files changed

+732
-47
lines changed

8 files changed

+732
-47
lines changed

packages/drivers/src/snowflake.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,10 @@ export async function connect(config: ConnectionConfig): Promise<Connector> {
4141
if (!rows || rows.length === 0) {
4242
return resolve({ columns: [], rows: [] })
4343
}
44-
const columns = Object.keys(rows[0])
44+
const rawColumns = Object.keys(rows[0])
45+
const columns = rawColumns.map((col) => col.toLowerCase())
4546
const mapped = rows.map((row) =>
46-
columns.map((col) => row[col]),
47+
rawColumns.map((col) => row[col]),
4748
)
4849
resolve({ columns, rows: mapped })
4950
},

packages/opencode/src/altimate/native/finops/unused-resources.ts

Lines changed: 14 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,10 @@ WHERE active_bytes > 0
3232
FROM SNOWFLAKE.ACCOUNT_USAGE.ACCESS_HISTORY ah,
3333
LATERAL FLATTEN(input => ah.base_objects_accessed) f
3434
WHERE f.value:"objectName"::string = table_catalog || '.' || table_schema || '.' || table_name
35-
AND ah.query_start_time >= DATEADD('day', -{days}, CURRENT_TIMESTAMP())
35+
AND ah.query_start_time >= DATEADD('day', ?, CURRENT_TIMESTAMP())
3636
)
3737
ORDER BY size_bytes DESC NULLS LAST
38-
LIMIT {limit}
38+
LIMIT ?
3939
`
4040

4141
const SNOWFLAKE_UNUSED_TABLES_SIMPLE_SQL = `
@@ -51,9 +51,9 @@ FROM SNOWFLAKE.ACCOUNT_USAGE.TABLE_STORAGE_METRICS
5151
WHERE active_bytes > 0
5252
AND table_catalog NOT IN ('SNOWFLAKE')
5353
AND table_schema NOT IN ('INFORMATION_SCHEMA')
54-
AND last_altered < DATEADD('day', -{days}, CURRENT_TIMESTAMP())
54+
AND last_altered < DATEADD('day', ?, CURRENT_TIMESTAMP())
5555
ORDER BY size_bytes DESC NULLS LAST
56-
LIMIT {limit}
56+
LIMIT ?
5757
`
5858

5959
const SNOWFLAKE_IDLE_WAREHOUSES_SQL = `
@@ -68,7 +68,7 @@ SELECT
6868
WHEN name NOT IN (
6969
SELECT DISTINCT warehouse_name
7070
FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY
71-
WHERE start_time >= DATEADD('day', -{days}, CURRENT_TIMESTAMP())
71+
WHERE start_time >= DATEADD('day', ?, CURRENT_TIMESTAMP())
7272
) THEN TRUE
7373
ELSE FALSE
7474
END as is_idle
@@ -92,9 +92,9 @@ SELECT
9292
creation_time as created
9393
FROM \`region-US.INFORMATION_SCHEMA.TABLE_STORAGE\`
9494
WHERE NOT deleted
95-
AND last_modified_time < UNIX_MILLIS(TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL {days} DAY))
95+
AND last_modified_time < UNIX_MILLIS(TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL ? DAY))
9696
ORDER BY size_bytes DESC
97-
LIMIT {limit}
97+
LIMIT ?
9898
`
9999

100100
// ---------------------------------------------------------------------------
@@ -111,9 +111,9 @@ SELECT
111111
last_altered,
112112
created
113113
FROM system.information_schema.tables
114-
WHERE last_altered < DATE_SUB(CURRENT_DATE(), {days})
114+
WHERE last_altered < DATE_SUB(CURRENT_DATE(), ?)
115115
ORDER BY last_altered ASC
116-
LIMIT {limit}
116+
LIMIT ?
117117
`
118118

119119
// ---------------------------------------------------------------------------
@@ -165,17 +165,11 @@ export async function findUnusedResources(params: UnusedResourcesParams): Promis
165165
if (whType === "snowflake") {
166166
// Try ACCESS_HISTORY first, fall back to simple query
167167
try {
168-
const sql = SNOWFLAKE_UNUSED_TABLES_SQL
169-
.replace("{days}", String(days))
170-
.replace("{limit}", String(limit))
171-
const result = await connector.execute(sql, limit)
168+
const result = await connector.execute(SNOWFLAKE_UNUSED_TABLES_SQL, limit, [-days, limit])
172169
unusedTables = rowsToRecords(result)
173170
} catch {
174171
try {
175-
const sql = SNOWFLAKE_UNUSED_TABLES_SIMPLE_SQL
176-
.replace("{days}", String(days))
177-
.replace("{limit}", String(limit))
178-
const result = await connector.execute(sql, limit)
172+
const result = await connector.execute(SNOWFLAKE_UNUSED_TABLES_SIMPLE_SQL, limit, [-days, limit])
179173
unusedTables = rowsToRecords(result)
180174
} catch (e) {
181175
errors.push(`Could not query unused tables: ${e}`)
@@ -184,29 +178,22 @@ export async function findUnusedResources(params: UnusedResourcesParams): Promis
184178

185179
// Idle warehouses
186180
try {
187-
const sql = SNOWFLAKE_IDLE_WAREHOUSES_SQL.replace("{days}", String(days))
188-
const result = await connector.execute(sql, 1000)
181+
const result = await connector.execute(SNOWFLAKE_IDLE_WAREHOUSES_SQL, 1000, [-days])
189182
const all = rowsToRecords(result)
190183
idleWarehouses = all.filter((w) => w.is_idle)
191184
} catch (e) {
192185
errors.push(`Could not query idle warehouses: ${e}`)
193186
}
194187
} else if (whType === "bigquery") {
195188
try {
196-
const sql = BIGQUERY_UNUSED_TABLES_SQL
197-
.replace("{days}", String(days))
198-
.replace("{limit}", String(limit))
199-
const result = await connector.execute(sql, limit)
189+
const result = await connector.execute(BIGQUERY_UNUSED_TABLES_SQL, limit, [days, limit])
200190
unusedTables = rowsToRecords(result)
201191
} catch (e) {
202192
errors.push(`Could not query unused tables: ${e}`)
203193
}
204194
} else if (whType === "databricks") {
205195
try {
206-
const sql = DATABRICKS_UNUSED_TABLES_SQL
207-
.replace(/{days}/g, String(days))
208-
.replace("{limit}", String(limit))
209-
const result = await connector.execute(sql, limit)
196+
const result = await connector.execute(DATABRICKS_UNUSED_TABLES_SQL, limit, [days, limit])
210197
unusedTables = rowsToRecords(result)
211198
} catch (e) {
212199
errors.push(`Could not query unused tables: ${e}`)

packages/opencode/src/altimate/native/finops/warehouse-advisor.ts

Lines changed: 38 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,21 +17,19 @@ import type {
1717
const SNOWFLAKE_LOAD_SQL = `
1818
SELECT
1919
warehouse_name,
20-
warehouse_size,
2120
AVG(avg_running) as avg_concurrency,
2221
AVG(avg_queued_load) as avg_queue_load,
2322
MAX(avg_queued_load) as peak_queue_load,
2423
COUNT(*) as sample_count
2524
FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_LOAD_HISTORY
2625
WHERE start_time >= DATEADD('day', -{days}, CURRENT_TIMESTAMP())
27-
GROUP BY warehouse_name, warehouse_size
26+
GROUP BY warehouse_name
2827
ORDER BY avg_queue_load DESC
2928
`
3029

3130
const SNOWFLAKE_SIZING_SQL = `
3231
SELECT
3332
warehouse_name,
34-
warehouse_size,
3533
COUNT(*) as query_count,
3634
AVG(total_elapsed_time) / 1000.0 as avg_time_sec,
3735
PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY total_elapsed_time) / 1000.0 as p95_time_sec,
@@ -40,10 +38,14 @@ SELECT
4038
FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY
4139
WHERE start_time >= DATEADD('day', -{days}, CURRENT_TIMESTAMP())
4240
AND execution_status = 'SUCCESS'
43-
GROUP BY warehouse_name, warehouse_size
41+
GROUP BY warehouse_name
4442
ORDER BY total_credits DESC
4543
`
4644

45+
// SHOW WAREHOUSES: fast metadata query — no ACCOUNT_USAGE latency, no credits consumed.
46+
// Used to look up current warehouse sizes for recommendations.
47+
const SNOWFLAKE_SHOW_WAREHOUSES = "SHOW WAREHOUSES"
48+
4749
// ---------------------------------------------------------------------------
4850
// BigQuery SQL templates
4951
// ---------------------------------------------------------------------------
@@ -150,13 +152,15 @@ function rowsToRecords(result: { columns: string[]; rows: any[][] }): Record<str
150152
}
151153

152154
function generateSizingRecommendations(
153-
loadData: Record<string, unknown>[], sizingData: Record<string, unknown>[],
155+
loadData: Record<string, unknown>[],
156+
sizingData: Record<string, unknown>[],
157+
sizeByWarehouse: Map<string, string>,
154158
): Record<string, unknown>[] {
155159
const recs: Record<string, unknown>[] = []
156160

157161
for (const wh of loadData) {
158162
const name = String(wh.warehouse_name || "unknown")
159-
const size = String(wh.warehouse_size || "unknown")
163+
const size = sizeByWarehouse.get(name) ?? String(wh.warehouse_size || "unknown")
160164
const avgQueue = Number(wh.avg_queue_load || 0)
161165
const peakQueue = Number(wh.peak_queue_load || 0)
162166
const avgConcurrency = Number(wh.avg_concurrency || 0)
@@ -230,12 +234,35 @@ export async function adviseWarehouse(params: WarehouseAdvisorParams): Promise<W
230234

231235
try {
232236
const connector = await Registry.get(params.warehouse)
233-
const loadResult = await connector.execute(loadSql, 1000)
234-
const sizingResult = await connector.execute(sizingSql, 1000)
237+
238+
// Run load and sizing queries in parallel
239+
const [loadResult, sizingResult] = await Promise.all([
240+
connector.execute(loadSql, 1000),
241+
connector.execute(sizingSql, 1000),
242+
])
243+
244+
// Build warehouse name → size map from SHOW WAREHOUSES (Snowflake only).
245+
// Failures (e.g. insufficient privileges) are silently ignored — recommendations
246+
// still work but show "unknown" for size.
247+
const sizeByWarehouse = new Map<string, string>()
248+
if (whType === "snowflake") {
249+
try {
250+
const showResult = await connector.execute(SNOWFLAKE_SHOW_WAREHOUSES, 1000)
251+
const nameIdx = showResult.columns.indexOf("name")
252+
const sizeIdx = showResult.columns.indexOf("size")
253+
for (const row of showResult.rows) {
254+
if (nameIdx >= 0 && sizeIdx >= 0) {
255+
sizeByWarehouse.set(String(row[nameIdx]), String(row[sizeIdx]))
256+
}
257+
}
258+
} catch {
259+
// ignore — size will fall back to "unknown"
260+
}
261+
}
235262

236263
const loadData = rowsToRecords(loadResult)
237264
const sizingData = rowsToRecords(sizingResult)
238-
const recommendations = generateSizingRecommendations(loadData, sizingData)
265+
const recommendations = generateSizingRecommendations(loadData, sizingData, sizeByWarehouse)
239266

240267
return {
241268
success: true,
@@ -260,10 +287,12 @@ export async function adviseWarehouse(params: WarehouseAdvisorParams): Promise<W
260287
export const SQL_TEMPLATES = {
261288
SNOWFLAKE_LOAD_SQL,
262289
SNOWFLAKE_SIZING_SQL,
290+
SNOWFLAKE_SHOW_WAREHOUSES,
263291
BIGQUERY_LOAD_SQL,
264292
BIGQUERY_SIZING_SQL,
265293
DATABRICKS_LOAD_SQL,
266294
DATABRICKS_SIZING_SQL,
267295
buildLoadSql,
268296
buildSizingSql,
297+
generateSizingRecommendations,
269298
}

packages/opencode/src/altimate/native/schema/tags.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ SELECT
3737
tag_schema,
3838
tag_name,
3939
tag_owner,
40-
comment,
4140
created
4241
FROM SNOWFLAKE.ACCOUNT_USAGE.TAGS
4342
WHERE deleted IS NULL

packages/opencode/test/altimate/drivers-snowflake-e2e.test.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
4949
describe("Password Auth", () => {
5050
test("connects successfully with password", async () => {
5151
const result = await connector.execute("SELECT CURRENT_USER() AS u")
52-
expect(result.columns).toEqual(["U"])
52+
expect(result.columns).toEqual(["u"])
5353
expect(result.rows.length).toBe(1)
5454
expect(typeof result.rows[0][0]).toBe("string")
5555
})
@@ -76,7 +76,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
7676
describe("Query Execution", () => {
7777
test("SELECT literal integer", async () => {
7878
const r = await connector.execute("SELECT 1 AS n")
79-
expect(r.columns).toEqual(["N"])
79+
expect(r.columns).toEqual(["n"])
8080
expect(r.rows).toEqual([[1]])
8181
expect(r.row_count).toBe(1)
8282
expect(r.truncated).toBe(false)
@@ -103,7 +103,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
103103
const r = await connector.execute(
104104
"SELECT 1 AS a, 'b' AS b, TRUE AS c, NULL AS d",
105105
)
106-
expect(r.columns).toEqual(["A", "B", "C", "D"])
106+
expect(r.columns).toEqual(["a", "b", "c", "d"])
107107
expect(r.rows[0][0]).toBe(1)
108108
expect(r.rows[0][1]).toBe("b")
109109
expect(r.rows[0][2]).toBe(true)
@@ -203,7 +203,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
203203
"SELECT * FROM _altimate_sf_e2e_test ORDER BY id",
204204
)
205205
expect(r.row_count).toBe(3)
206-
expect(r.columns).toEqual(["ID", "NAME", "ACTIVE"])
206+
expect(r.columns).toEqual(["id", "name", "active"])
207207
})
208208

209209
test("UPDATE row", async () => {
@@ -239,15 +239,15 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
239239
const r = await connector.execute(
240240
"SELECT ARRAY_CONSTRUCT(1, 2, 3) AS arr, OBJECT_CONSTRUCT('key', 'value') AS obj",
241241
)
242-
expect(r.columns).toEqual(["ARR", "OBJ"])
242+
expect(r.columns).toEqual(["arr", "obj"])
243243
expect(r.rows.length).toBe(1)
244244
})
245245

246246
test("DATE / TIME / TIMESTAMP", async () => {
247247
const r = await connector.execute(
248248
"SELECT CURRENT_DATE() AS d, CURRENT_TIME() AS t, CURRENT_TIMESTAMP() AS ts",
249249
)
250-
expect(r.columns).toEqual(["D", "T", "TS"])
250+
expect(r.columns).toEqual(["d", "t", "ts"])
251251
})
252252

253253
test("BOOLEAN", async () => {
@@ -360,7 +360,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
360360
undefined,
361361
["alice"],
362362
)
363-
expect(result.columns).toEqual(["NAME"])
363+
expect(result.columns).toEqual(["name"])
364364
expect(result.rows).toHaveLength(1)
365365
expect(result.rows[0][0]).toBe("alice")
366366
})
@@ -437,7 +437,7 @@ describe.skipIf(!HAS_SNOWFLAKE)("Snowflake Driver E2E", () => {
437437

438438
test("scalar bind — SELECT ? returns the bound value", async () => {
439439
const result = await connector.execute("SELECT ? AS val", undefined, [42])
440-
expect(result.columns).toEqual(["VAL"])
440+
expect(result.columns).toEqual(["val"])
441441
expect(result.rows[0][0]).toBe(42)
442442
})
443443

0 commit comments

Comments
 (0)