AshenH commited on
Commit
2e1969a
·
verified ·
1 Parent(s): d861fbf

Update tools/sql_tool.py

Browse files
Files changed (1) hide show
  1. tools/sql_tool.py +132 -429
tools/sql_tool.py CHANGED
@@ -1,440 +1,143 @@
1
- # space/tools/sql_tool.py
2
  import os
3
  import re
4
- import json
5
- import logging
6
- import pandas as pd
7
- from typing import Optional
8
- from utils.config import AppConfig
9
- from utils.tracing import Tracer
10
 
11
- logger = logging.getLogger(__name__)
12
 
13
- RESERVED_MD_WORKSPACE_NAMES = {"", "workspace", "default"}
14
- MAX_QUERY_LENGTH = 50000
15
- MAX_RESULT_ROWS = 100000
16
-
17
-
18
- class SQLToolError(Exception):
19
- """Custom exception for SQL tool errors."""
20
- pass
21
 
 
 
 
 
22
 
23
  class SQLTool:
24
  """
25
- SQL execution tool supporting BigQuery and MotherDuck backends.
26
- Includes input validation, error handling, and secure query execution.
27
  """
28
-
29
- def __init__(self, cfg: AppConfig, tracer: Tracer):
30
- self.cfg = cfg
31
- self.tracer = tracer
32
- self.backend = cfg.sql_backend
33
- self.client = None
34
-
35
- logger.info(f"Initializing SQLTool with backend: {self.backend}")
36
-
37
- try:
38
- if self.backend == "bigquery":
39
- self._init_bigquery()
40
- elif self.backend == "motherduck":
41
- self._init_motherduck()
42
- else:
43
- raise SQLToolError(f"Unknown SQL backend: {self.backend}")
44
-
45
- logger.info(f"SQLTool initialized successfully with {self.backend}")
46
-
47
- except Exception as e:
48
- logger.error(f"Failed to initialize SQLTool: {e}")
49
- raise SQLToolError(f"SQL backend initialization failed: {e}") from e
50
-
51
- def _init_bigquery(self):
52
- """Initialize BigQuery client with service account credentials."""
53
- try:
54
- from google.cloud import bigquery
55
- from google.oauth2 import service_account
56
-
57
- key_json = os.getenv("GCP_SERVICE_ACCOUNT_JSON")
58
- if not key_json:
59
- raise SQLToolError(
60
- "Missing GCP_SERVICE_ACCOUNT_JSON environment variable. "
61
- "Please configure BigQuery credentials."
62
- )
63
-
64
- # Parse credentials
65
- try:
66
- if key_json.strip().startswith("{"):
67
- info = json.loads(key_json)
68
- else:
69
- # Assume it's a file path
70
- with open(key_json, 'r') as f:
71
- info = json.load(f)
72
- except json.JSONDecodeError as e:
73
- raise SQLToolError(f"Invalid JSON in GCP_SERVICE_ACCOUNT_JSON: {e}")
74
- except FileNotFoundError:
75
- raise SQLToolError(f"GCP service account file not found: {key_json}")
76
-
77
- # Validate required fields
78
- required_fields = ["type", "project_id", "private_key", "client_email"]
79
- missing = [f for f in required_fields if f not in info]
80
- if missing:
81
- raise SQLToolError(
82
- f"GCP service account JSON missing required fields: {missing}"
83
- )
84
-
85
- creds = service_account.Credentials.from_service_account_info(info)
86
- project = self.cfg.gcp_project or info.get("project_id")
87
-
88
- if not project:
89
- raise SQLToolError("GCP project ID not specified in config or credentials")
90
-
91
- self.client = bigquery.Client(credentials=creds, project=project)
92
- logger.info(f"BigQuery client initialized for project: {project}")
93
-
94
- except ImportError as e:
95
- raise SQLToolError(
96
- "BigQuery dependencies not installed. "
97
- "Install with: pip install google-cloud-bigquery"
98
- ) from e
99
-
100
- def _init_motherduck(self):
101
- """Initialize MotherDuck/DuckDB client with version validation."""
102
- try:
103
- import duckdb
104
-
105
- # Version compatibility check - be more flexible
106
- version = duckdb.__version__
107
- logger.info(f"DuckDB version: {version}")
108
-
109
- # Warn if not on recommended version, but don't fail
110
- if not version.startswith("1.3"):
111
- logger.warning(
112
- f"DuckDB {version} detected. Recommended: 1.3.x for MotherDuck compatibility. "
113
- "Some features may not work as expected."
114
- )
115
-
116
- # Get configuration
117
- token = (self.cfg.motherduck_token or os.getenv("MOTHERDUCK_TOKEN") or "").strip()
118
- if not token:
119
- raise SQLToolError(
120
- "Missing MOTHERDUCK_TOKEN. "
121
- "Get your token from: https://motherduck.com/docs/key-tasks/authenticating-to-motherduck"
122
- )
123
-
124
- db_name = (self.cfg.motherduck_db or "workspace").strip()
125
- allow_create = os.getenv("ALLOW_CREATE_DB", "true").lower() == "true"
126
-
127
- # Connect based on database name
128
- if db_name in RESERVED_MD_WORKSPACE_NAMES:
129
- # Workspace mode - no specific database context
130
- connection_string = f"md:?motherduck_token={token}"
131
- logger.info("Connecting to MotherDuck workspace")
132
- self.client = duckdb.connect(connection_string)
133
- else:
134
- # Try connecting to specific database
135
- try:
136
- connection_string = f"md:{db_name}?motherduck_token={token}"
137
- logger.info(f"Connecting to MotherDuck database: {db_name}")
138
- self.client = duckdb.connect(connection_string)
139
- except Exception as db_err:
140
- logger.warning(f"Direct connection to '{db_name}' failed: {db_err}")
141
-
142
- # Fallback: connect to workspace and setup database
143
- connection_string = f"md:?motherduck_token={token}"
144
- self.client = duckdb.connect(connection_string)
145
- self._ensure_db_context(db_name, allow_create)
146
-
147
- # Test connection
148
- try:
149
- self.client.execute("SELECT 1").fetchone()
150
- logger.info("MotherDuck connection test successful")
151
- except Exception as e:
152
- raise SQLToolError(f"MotherDuck connection test failed: {e}")
153
-
154
- except ImportError as e:
155
- raise SQLToolError(
156
- "DuckDB not installed. Install with: pip install duckdb"
157
- ) from e
158
-
159
- def _ensure_db_context(self, db_name: str, allow_create: bool):
160
- """
161
- Ensure database context is set for MotherDuck.
162
- Creates database if it doesn't exist and allow_create is True.
163
- """
164
- if db_name in RESERVED_MD_WORKSPACE_NAMES:
165
- return
166
-
167
- safe_name = self._quote_ident(db_name)
168
-
169
- # Try to USE the database first
170
- try:
171
- self.client.execute(f"USE {safe_name};")
172
- logger.info(f"Using existing database: {db_name}")
173
- return
174
- except Exception as use_err:
175
- logger.info(f"Database '{db_name}' not found: {use_err}")
176
-
177
- if not allow_create:
178
- raise SQLToolError(
179
- f"Database '{db_name}' does not exist and ALLOW_CREATE_DB is disabled. "
180
- f"Either create the database manually or set ALLOW_CREATE_DB=true"
181
- )
182
-
183
- # Attempt to create and use the database
184
- try:
185
- logger.info(f"Creating database: {db_name}")
186
- self.client.execute(f"CREATE DATABASE IF NOT EXISTS {safe_name};")
187
- self.client.execute(f"USE {safe_name};")
188
- logger.info(f"Database '{db_name}' created and selected")
189
- except Exception as create_err:
190
- raise SQLToolError(
191
- f"Failed to create database '{db_name}': {create_err}"
192
- ) from create_err
193
-
194
- @staticmethod
195
- def _quote_ident(name: str) -> str:
196
- """
197
- Safely quote SQL identifiers.
198
- Replaces non-alphanumeric characters with underscores.
199
- """
200
- if not name:
201
- return "unnamed"
202
-
203
- # Remove dangerous characters
204
- safe = re.sub(r"[^a-zA-Z0-9_]", "_", name)
205
-
206
- # Ensure it doesn't start with a number
207
- if safe[0].isdigit():
208
- safe = "_" + safe
209
-
210
- return safe
211
-
212
- def _validate_sql(self, sql: str) -> tuple[bool, str]:
213
- """
214
- Validate SQL query for basic safety.
215
- Returns (is_valid, error_message).
216
- """
217
- if not sql or not sql.strip():
218
- return False, "Empty SQL query"
219
-
220
- if len(sql) > MAX_QUERY_LENGTH:
221
- return False, f"Query too long (max {MAX_QUERY_LENGTH} characters)"
222
-
223
- # Dangerous patterns check
224
- sql_lower = sql.lower()
225
-
226
- # Block multiple statements (simple check)
227
- if sql.count(';') > 1:
228
- return False, "Multiple SQL statements not allowed"
229
-
230
- # Block dangerous keywords in non-SELECT queries
231
- dangerous_patterns = [
232
- (r'\bdrop\s+table\b', "DROP TABLE"),
233
- (r'\bdrop\s+database\b', "DROP DATABASE"),
234
- (r'\bdelete\s+from\b', "DELETE FROM"),
235
- (r'\btruncate\b', "TRUNCATE"),
236
- (r'\bexec\s*\(', "EXEC"),
237
- (r'\bexecute\s*\(', "EXECUTE"),
238
- ]
239
-
240
- for pattern, name in dangerous_patterns:
241
- if re.search(pattern, sql_lower):
242
- logger.warning(f"Blocked query with {name} pattern")
243
- return False, f"Query contains blocked operation: {name}"
244
-
245
- return True, ""
246
-
247
- def _nl_to_sql(self, message: str) -> str:
248
- """
249
- Convert natural language to SQL query.
250
-
251
- IMPORTANT: This is a simple heuristic template system.
252
- For production, either:
253
- 1. Replace table/column names with your actual schema, OR
254
- 2. Integrate a proper NL2SQL model (e.g., T5, CodeGen, GPT), OR
255
- 3. Have users write SQL directly
256
-
257
- To customize: Set these environment variables or edit the code:
258
- - SQL_DEFAULT_SCHEMA (default: "analytics")
259
- - SQL_DEFAULT_TABLE (default: "fact_table")
260
- """
261
- m = message.lower()
262
-
263
- # Get configurable defaults
264
- default_schema = os.getenv("SQL_DEFAULT_SCHEMA", "analytics")
265
- default_table = os.getenv("SQL_DEFAULT_TABLE", "fact_table")
266
- full_table = f"{default_schema}.{default_table}"
267
-
268
- # If it's already SQL, return as-is (after validation)
269
- if re.match(r'^\s*select\s', m, re.IGNORECASE):
270
- return message.strip()
271
-
272
- # Special keyword: show tables/schemas
273
- if any(keyword in m for keyword in ["show tables", "list tables", "available tables", "what tables"]):
274
- return """
275
- SELECT table_schema, table_name, table_type
276
- FROM information_schema.tables
277
- WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
278
- ORDER BY table_schema, table_name
279
- LIMIT 100;
280
- """
281
-
282
- if any(keyword in m for keyword in ["show schemas", "list schemas", "available schemas"]):
283
- return """
284
- SELECT DISTINCT table_schema
285
- FROM information_schema.tables
286
- WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
287
- ORDER BY table_schema;
288
- """
289
-
290
- if "show columns" in m or "describe table" in m or "table structure" in m:
291
- # Try to extract table name from message
292
- return f"""
293
- SELECT column_name, data_type, is_nullable
294
- FROM information_schema.columns
295
- WHERE table_schema = '{default_schema}'
296
- ORDER BY ordinal_position
297
- LIMIT 100;
298
- """
299
-
300
- # Template-based generation (customize for your schema)
301
- if "avg" in m or "average" in m:
302
- if "by month" in m or "monthly" in m:
303
- return f"""
304
- SELECT
305
- DATE_TRUNC('month', date_col) AS month,
306
- AVG(metric_col) AS avg_metric
307
- FROM {full_table}
308
- GROUP BY 1
309
- ORDER BY 1 DESC
310
- LIMIT 100;
311
- """
312
-
313
- if "top" in m:
314
- # Extract number if present
315
- match = re.search(r'top\s+(\d+)', m)
316
- limit = match.group(1) if match else "10"
317
- return f"""
318
- SELECT *
319
- FROM {full_table}
320
- ORDER BY metric_col DESC
321
- LIMIT {limit};
322
- """
323
-
324
- if "count" in m:
325
- return f"""
326
- SELECT
327
- category_col,
328
- COUNT(*) AS count
329
- FROM {full_table}
330
- GROUP BY 1
331
- ORDER BY 2 DESC
332
- LIMIT 100;
333
- """
334
-
335
- # Default fallback - show available tables instead of failing
336
- logger.warning(
337
- f"Could not generate specific SQL for query: '{message}'. "
338
- f"Returning list of available tables. "
339
- f"Configure SQL_DEFAULT_SCHEMA and SQL_DEFAULT_TABLE or write SQL directly."
340
- )
341
- return """
342
- SELECT
343
- table_schema,
344
- table_name,
345
- table_type,
346
- 'Run: SELECT * FROM ' || table_schema || '.' || table_name || ' LIMIT 5' as example_query
347
- FROM information_schema.tables
348
- WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
349
- ORDER BY table_schema, table_name
350
- LIMIT 50;
351
- """
352
-
353
- def run(self, message: str) -> pd.DataFrame:
354
  """
355
- Execute SQL query from natural language or SQL statement.
356
-
357
- Args:
358
- message: Natural language query or SQL statement
359
-
360
- Returns:
361
- DataFrame with query results
362
-
363
- Raises:
364
- SQLToolError: If query execution fails
365
  """
366
- try:
367
- # Convert to SQL
368
- sql = self._nl_to_sql(message)
369
- logger.info(f"Generated SQL query (first 200 chars): {sql[:200]}")
370
-
371
- # Validate SQL
372
- is_valid, error_msg = self._validate_sql(sql)
373
- if not is_valid:
374
- raise SQLToolError(f"Invalid SQL query: {error_msg}")
375
-
376
- # Log query attempt
377
- self.tracer.trace_event("sql_query", {
378
- "sql": sql[:1000], # Limit logged SQL length
379
- "backend": self.backend,
380
- "message": message[:500]
381
- })
382
-
383
- # Execute based on backend
384
- if self.backend == "bigquery":
385
- result = self._execute_bigquery(sql)
386
- else: # motherduck
387
- result = self._execute_duckdb(sql)
388
-
389
- # Validate result
390
- if not isinstance(result, pd.DataFrame):
391
- raise SQLToolError("Query did not return a DataFrame")
392
-
393
- # Check result size
394
- if len(result) > MAX_RESULT_ROWS:
395
- logger.warning(f"Result truncated from {len(result)} to {MAX_RESULT_ROWS} rows")
396
- result = result.head(MAX_RESULT_ROWS)
397
-
398
- logger.info(f"Query successful: {len(result)} rows, {len(result.columns)} columns")
399
- self.tracer.trace_event("sql_success", {
400
- "rows": len(result),
401
- "columns": len(result.columns)
402
- })
403
-
404
- return result
405
-
406
- except SQLToolError:
407
- raise
408
- except Exception as e:
409
- error_msg = f"SQL execution failed: {str(e)}"
410
- logger.error(error_msg)
411
- self.tracer.trace_event("sql_error", {"error": error_msg})
412
- raise SQLToolError(error_msg) from e
413
-
414
- def _execute_bigquery(self, sql: str) -> pd.DataFrame:
415
- """Execute query on BigQuery."""
416
- try:
417
- query_job = self.client.query(sql)
418
- df = query_job.to_dataframe()
419
- return df
420
- except Exception as e:
421
- raise SQLToolError(f"BigQuery execution error: {str(e)}") from e
422
-
423
- def _execute_duckdb(self, sql: str) -> pd.DataFrame:
424
- """Execute query on DuckDB/MotherDuck."""
425
- try:
426
- result = self.client.execute(sql)
427
- df = result.fetch_df()
428
- return df
429
- except Exception as e:
430
- raise SQLToolError(f"DuckDB execution error: {str(e)}") from e
431
-
432
- def test_connection(self) -> bool:
433
- """Test database connection."""
434
- try:
435
- test_query = "SELECT 1 AS test"
436
- result = self.run(test_query)
437
- return len(result) == 1 and result.iloc[0, 0] == 1
438
- except Exception as e:
439
- logger.error(f"Connection test failed: {e}")
440
- return False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # tools/sql_tool.py
2
  import os
3
  import re
4
+ import duckdb
5
+ from typing import Optional, Tuple
 
 
 
 
6
 
7
+ DUCKDB_PATH = os.getenv("DUCKDB_PATH", "alm.duckdb")
8
 
9
+ # Defaults point to your real table; can be overridden via Space secrets
10
+ DEFAULT_SCHEMA = os.getenv("SQL_DEFAULT_SCHEMA", "main")
11
+ DEFAULT_TABLE = os.getenv("SQL_DEFAULT_TABLE", "masterdataset_v")
 
 
 
 
 
12
 
13
+ def _full_table(schema: Optional[str] = None, table: Optional[str] = None) -> str:
14
+ schema = schema or DEFAULT_SCHEMA
15
+ table = table or DEFAULT_TABLE
16
+ return f"{schema}.{table}"
17
 
18
  class SQLTool:
19
  """
20
+ Minimal NL→SQL helper wired to main.masterdataset_v with a DuckDB runner.
 
21
  """
22
+ def __init__(self, db_path: Optional[str] = None):
23
+ self.db_path = db_path or DUCKDB_PATH
24
+ self.con = duckdb.connect(self.db_path)
25
+
26
+ def run_sql(self, sql: str):
27
+ return self.con.execute(sql).df()
28
+
29
+ # -------------------------
30
+ # NL → SQL
31
+ # -------------------------
32
+ def _nl_to_sql(self, message: str, schema: Optional[str] = None, table: Optional[str] = None) -> Tuple[str, str]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  """
34
+ Returns (sql, rationale). Very small template library covering your common queries.
35
+ Falls back to SHOW TABLES if no match.
 
 
 
 
 
 
 
 
36
  """
37
+ full_table = _full_table(schema, table)
38
+ m = message.strip().lower()
39
+
40
+ # Common synonyms
41
+ def has_any(txt, words):
42
+ return any(w in txt for w in words)
43
+
44
+ # Extract a "top N"
45
+ limit = None
46
+ m_top = re.search(r"\btop\s+(\d+)", m)
47
+ if m_top:
48
+ limit = int(m_top.group(1))
49
+
50
+ # 1) Top N FDs by Portfolio_value
51
+ if has_any(m, ["fd", "fixed deposit", "deposits"]) and has_any(m, ["top", "largest", "biggest"]) and has_any(m, ["portfolio value", "portfolio_value"]):
52
+ n = limit or 10
53
+ sql = f"""
54
+ SELECT contract_number, Portfolio_value, Interest_rate, currency, segments
55
+ FROM {full_table}
56
+ WHERE lower(product) = 'fd'
57
+ ORDER BY Portfolio_value DESC
58
+ LIMIT {n};
59
+ """
60
+ why = f"Top {n} fixed deposits by Portfolio_value from {full_table}"
61
+ return sql, why
62
+
63
+ # 2) Top N Assets by Portfolio_value
64
+ if has_any(m, ["asset", "loan", "advances"]) and has_any(m, ["top", "largest", "biggest"]) and has_any(m, ["portfolio value", "portfolio_value"]):
65
+ n = limit or 10
66
+ sql = f"""
67
+ SELECT contract_number, Portfolio_value, Interest_rate, currency, segments
68
+ FROM {full_table}
69
+ WHERE lower(product) = 'assets'
70
+ ORDER BY Portfolio_value DESC
71
+ LIMIT {n};
72
+ """
73
+ why = f"Top {n} assets by Portfolio_value from {full_table}"
74
+ return sql, why
75
+
76
+ # 3) Aggregate (SUM/AVG) by segment or currency
77
+ if has_any(m, ["sum", "total", "avg", "average"]) and has_any(m, ["segment", "currency"]):
78
+ agg = "SUM" if has_any(m, ["sum", "total"]) else "AVG"
79
+ dim = "segments" if "segment" in m else "currency"
80
+ sql = f"""
81
+ SELECT {dim}, {agg}(Portfolio_value) AS {agg.lower()}_Portfolio_value
82
+ FROM {full_table}
83
+ GROUP BY 1
84
+ ORDER BY 2 DESC;
85
+ """
86
+ why = f"{agg} Portfolio_value grouped by {dim} from {full_table}"
87
+ return sql, why
88
+
89
+ # 4) Filter by product, currency, or segment
90
+ product = None
91
+ if "fd" in m or "deposit" in m:
92
+ product = "fd"
93
+ elif "asset" in m or "loan" in m or "advance" in m:
94
+ product = "assets"
95
+
96
+ parts = [f"SELECT * FROM {full_table} WHERE 1=1"]
97
+ why_parts = [f"Filtered rows from {full_table}"]
98
+
99
+ if product:
100
+ parts.append(f"AND lower(product) = '{product}'")
101
+ why_parts.append(f"product = {product}")
102
+
103
+ # currency filter like: "in lkr", "currency usd"
104
+ cur = None
105
+ cur_match = re.search(r"\b(currency|in)\s+([a-z]{3})\b", m)
106
+ if cur_match:
107
+ cur = cur_match.group(2).upper()
108
+ if cur:
109
+ parts.append(f"AND upper(currency) = '{cur}'")
110
+ why_parts.append(f"currency = {cur}")
111
+
112
+ # segment filter like: "segment retail" or "for corporate"
113
+ seg_match = re.search(r"(segment|for)\s+([a-z0-9_\- ]+)", m)
114
+ if seg_match:
115
+ seg = seg_match.group(2).strip()
116
+ if seg:
117
+ parts.append(f"AND lower(segments) LIKE '%{seg.lower()}%'")
118
+ why_parts.append(f"segments like '{seg}'")
119
+
120
+ # maybe a limit
121
+ if limit:
122
+ parts.append(f"LIMIT {limit}")
123
+
124
+ fallback_sql = " ".join(parts) + ";"
125
+ fallback_why = "; ".join(why_parts)
126
+ if fallback_sql:
127
+ return fallback_sql, fallback_why
128
+
129
+ # 5) Super fallback: show sample rows
130
+ return f"SELECT * FROM {full_table} LIMIT 20;", f"Default sample from {full_table}"
131
+
132
+ # Public helpers
133
+ def query_from_nl(self, message: str):
134
+ sql, why = self._nl_to_sql(message)
135
+ df = self.run_sql(sql)
136
+ return df, sql, why
137
+
138
+ def table_exists(self, schema: Optional[str] = None, table: Optional[str] = None) -> bool:
139
+ schema = schema or DEFAULT_SCHEMA
140
+ table = table or DEFAULT_TABLE
141
+ q = f"SELECT COUNT(*) AS n FROM information_schema.tables WHERE table_schema = '{schema}' AND table_name = '{table}';"
142
+ n = self.con.execute(q).fetchone()[0]
143
+ return n > 0