sqlbase.py
Go to the documentation of this file.
1 # -*- coding: utf-8 -*-
2 """
3 Base class for producing SQL for topics and messages.
4 
5 ------------------------------------------------------------------------------
6 This file is part of grepros - grep for ROS bag files and live topics.
7 Released under the BSD License.
8 
9 @author Erki Suurjaak
10 @created 03.01.2022
11 @modified 28.06.2023
12 ------------------------------------------------------------------------------
13 """
14 
15 import json
16 import re
17 
18 import yaml
19 
20 from ... import api
21 from ... common import ConsolePrinter, ellipsize, ensure_namespace, import_item, merge_dicts
22 
23 
24 
25 class SqlMixin(object):
26  """
27  Base class for producing SQL for topics and messages.
28 
29  Can load additional SQL dialects or additional options for existing dialects
30  from a YAML/JSON file.
31  """
32 
33 
34  DEFAULT_DIALECT = "sqlite"
35 
36 
37  DEFAULT_ARGS = dict(META=False, WRITE_OPTIONS={}, MATCH_WRAPPER=None, VERBOSE=False)
38 
39 
40  def __init__(self, args=None, **kwargs):
41  """
42  @param args arguments as namespace or dictionary, case-insensitive
43  @param args.write_options ```
44  {"dialect": SQL dialect if not default,
45  "nesting": true|false to created nested type tables}
46  ```
47  @param kwargs any and all arguments as keyword overrides, case-insensitive
48  """
49  self._args = ensure_namespace(args, SqlMixin.DEFAULT_ARGS, **kwargs)
50  self._topics = {} # {(topic, typename, typehash): {name, table_name, view_name, sql, ..}}
51  self._types = {} # {(typename, typehash): {type, table_name, sql, ..}}
52  self._schema = {} # {(typename, typehash): {cols}}
53  self._sql_cache = {} # {table: "INSERT INTO table VALUES (%s, ..)"}
54  self._dialect = args.WRITE_OPTIONS.get("dialect", self.DEFAULT_DIALECT)
55  self._nesting = args.WRITE_OPTIONS.get("nesting")
56 
57 
58  def validate(self):
59  """
60  Returns whether arguments are valid.
61 
62  Verifies that "dialect-file" is valid and "dialect" contains supported value, if any.
63  """
64  return all([self._validate_dialect_file(), self._validate_dialect()])
65 
66 
68  """Returns whether "dialect-file" is valid in args.WRITE_OPTIONS."""
69  ok = True
70  if self._args.WRITE_OPTIONS.get("dialect-file"):
71  filename = self._args.WRITE_OPTIONS["dialect-file"]
72  try:
73  with open(filename) as f:
74  dialects = yaml.safe_load(f.read())
75  if any(not isinstance(v, dict) for v in dialects.values()):
76  raise Exception("Each dialect must be a dictionary.")
77  for opts in dialects.values():
78  for k, v in list(opts.get("adapters", {}).items()):
79  try: opts["adapters"][k] = import_item(v)
80  except ImportError:
81  ok = False
82  ConsolePrinter.error("Error loading adapter %r for %r "
83  "in SQL dialect file %r.", v, k, filename)
84  merge_dicts(self.DIALECTS, dialects)
85  except Exception as e:
86  ok = False
87  ConsolePrinter.error("Error reading SQL dialect file %r: %s", filename, e)
88 
89  # Populate ROS type aliases like "byte" and "char"
90  for opts in self.DIALECTS.values() if ok else ():
91  for rostype in list(opts.get("types", {})):
92  alias = api.get_type_alias(rostype)
93  if alias:
94  opts["types"][alias] = opts["types"][rostype]
95  if alias and rostype + "[]" in opts["types"]:
96  opts["types"][alias + "[]"] = opts["types"][rostype + "[]"]
97 
98  return ok
99 
100 
101  def _validate_dialect(self):
102  """Returns whether "dialect" is valid in args.WRITE_OPTIONS."""
103  ok = True
104  if "dialect" in self._args.WRITE_OPTIONS \
105  and self._args.WRITE_OPTIONS["dialect"] not in tuple(filter(bool, self.DIALECTS)):
106  ok = False
107  ConsolePrinter.error("Unknown dialect for SQL: %r. "
108  "Choose one of {%s}.",
109  self._args.WRITE_OPTIONS["dialect"],
110  "|".join(sorted(filter(bool, self.DIALECTS))))
111  return ok
112 
113 
114  def close(self):
115  """Clears data structures."""
116  self._topics.clear()
117  self._types.clear()
118  self._schema.clear()
119  self._sql_cache.clear()
120 
121 
122  def _make_topic_data(self, topic, msg, exclude_cols=()):
123  """
124  Returns full data dictionary for topic, including view name and SQL.
125 
126  @param exclude_cols list of column names to exclude from view SELECT, if any
127  @return {"name": topic name, "type": message type name as "pkg/Cls",
128  "table_name": message type table name, "view_name": topic view name,
129  "md5": message type definition MD5 hash, "sql": "CREATE VIEW .."}
130  """
131  with api.TypeMeta.make(msg, topic) as m:
132  typename, typehash, typekey = (m.typename, m.typehash, m.typekey)
133 
134  table_name = self._types[typekey]["table_name"]
135  pkgname, clsname = typename.split("/", 1)
136  nameargs = {"topic": topic, "type": typename, "hash": typehash,
137  "package": pkgname, "class": clsname}
138  view_name = self._make_entity_name("view", nameargs)
139 
140  sqlargs = dict(nameargs, view=quote(view_name), table=quote(table_name, force=True),
141  topic=repr(topic), cols="*")
142  if exclude_cols:
143  exclude_cols = [x[0] if isinstance(x, (list, tuple)) else x for x in exclude_cols]
144  select_cols = [c for c in self._schema[typekey] if c not in exclude_cols]
145  sqlargs["cols"] = ", ".join(quote(c) for c in select_cols)
146  sql = self._get_dialect_option("view_template").strip().format(**sqlargs)
147 
148  return {"name": topic, "type": typename, "md5": typehash,
149  "sql": sql, "table_name": table_name, "view_name": view_name}
150 
151 
152  def _make_type_data(self, msg, extra_cols=(), rootmsg=None):
153  """
154  Returns full data dictionary for message type, including table name and SQL.
155 
156  @param rootmsg top message this message is nested under, if any
157  @param extra_cols additional table columns, as [(column name, column def)]
158  @return {"type": message type name as "pkg/Cls",
159  "table_name": message type table name,
160  "definition": message type definition,
161  "cols": [(column name, column type)],
162  "md5": message type definition MD5 hash, "sql": "CREATE TABLE .."}
163  """
164  rootmsg = rootmsg or msg
165  with api.TypeMeta.make(msg, root=rootmsg) as m:
166  typename, typehash = (m.typename, m.typehash)
167 
168  cols = []
169  scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
170  for path, value, subtype in api.iter_message_fields(msg, scalars=scalars):
171  coltype = self._make_column_type(subtype)
172  cols += [(".".join(path), coltype)]
173  cols.extend(extra_cols or [])
174  cols = list(zip(self._make_column_names([c for c, _ in cols]), [t for _, t in cols]))
175  namewidth = 2 + max(len(n) for n, _ in cols)
176  coldefs = ["%s %s" % (quote(n).ljust(namewidth), t) for n, t in cols]
177 
178  pkgname, clsname = typename.split("/", 1)
179  nameargs = {"type": typename, "hash": typehash, "package": pkgname, "class": clsname}
180  table_name = self._make_entity_name("table", nameargs)
181 
182  sqlargs = dict(nameargs, table=quote(table_name), cols="\n %s\n" % ",\n ".join(coldefs))
183  sql = self._get_dialect_option("table_template").strip().format(**sqlargs)
184  return {"type": typename, "md5": typehash,
185  "definition": api.TypeMeta.make(msg).definition,
186  "table_name": table_name, "cols": cols, "sql": sql}
187 
188 
189  def _make_topic_insert_sql(self, topic, msg):
190  """Returns ("INSERT ..", [args]) for inserting into topics-table."""
191  POSARG = self._get_dialect_option("posarg")
192  topickey = api.TypeMeta.make(msg, topic).topickey
193  tdata = self._topics[topickey]
194 
195  sql = self._get_dialect_option("insert_topic").strip().replace("%s", POSARG)
196  args = [tdata[k] for k in ("name", "type", "md5", "table_name", "view_name")]
197  return sql, args
198 
199 
200  def _make_type_insert_sql(self, msg):
201  """Returns ("INSERT ..", [args]) for inserting into types-table."""
202  POSARG = self._get_dialect_option("posarg")
203  typekey = api.TypeMeta.make(msg).typekey
204  tdata = self._types[typekey]
205 
206  sql = self._get_dialect_option("insert_type").strip().replace("%s", POSARG)
207  args = [tdata[k] for k in ("type", "definition", "md5", "table_name")]
208  return sql, args
209 
210 
211  def _make_message_insert_sql(self, topic, msg, extra_cols=()):
212  """
213  Returns ("INSERT ..", [args]) for inserting into message type table.
214 
215  @param extra_cols list of additional table columns, as [(name, value)]
216  """
217  typekey = api.TypeMeta.make(msg, topic).typekey
218  table_name = self._types[typekey]["table_name"]
219  sql, cols, args = self._sql_cache.get(table_name), [], []
220 
221  scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
222  for p, v, t in api.iter_message_fields(msg, scalars=scalars):
223  if not sql: cols.append(".".join(p))
224  args.append(self._make_column_value(v, t))
225  args = tuple(args) + tuple(v for _, v in extra_cols)
226 
227  if not sql:
228  POSARG = self._get_dialect_option("posarg")
229  if extra_cols: cols.extend(c for c, _ in extra_cols)
230  sql = "INSERT INTO %s (%s) VALUES (%s)" % \
231  (quote(table_name), ", ".join(map(quote, cols)),
232  ", ".join([POSARG] * len(args)))
233  self._sql_cache[table_name] = sql
234 
235  return sql, args
236 
237 
238  def _make_update_sql(self, table, values, where=()):
239  """Returns ("UPDATE ..", [args])."""
240  POSARG = self._get_dialect_option("posarg")
241  sql, args, sets, filters = "UPDATE %s SET " % quote(table), [], [], []
242  for lst, vals in [(sets, values), (filters, where)]:
243  for k, v in vals.items() if isinstance(vals, dict) else vals:
244  lst.append("%s = %s" % (quote(k), POSARG))
245  args.append(self._make_column_value(v))
246  sql += ", ".join(sets) + (" WHERE " if filters else "") + " AND ".join(filters)
247  return sql, args
248 
249 
250  def _make_entity_name(self, category, args):
251  """
252  Returns valid unique name for table/view.
253 
254  @param args format arguments for table/view name template
255  """
256  name = self._get_dialect_option("%s_name_template" % category).format(**args)
257  existing = set(sum(([x["table_name"], x.get("view_name")]
258  for dct in (self._topics, self._types)
259  for x in dct.values()), []))
260  return self._make_name("entity", name, existing)
261 
262 
263  def _make_name(self, category, name, existing=()):
264  """
265  Returns a valid unique name for table/view/column.
266 
267  Replaces invalid characters and constrains length.
268  If name already exists, appends counter like " (2)".
269  """
270  MAXLEN_ARG = "maxlen_column" if "column" == category else "maxlen_entity"
271  MAXLEN = self._get_dialect_option(MAXLEN_ARG)
272  INVALID_RGX = self._get_dialect_option("invalid_char_regex")
273  INVALID_REPL = self._get_dialect_option("invalid_char_repl")
274  if not MAXLEN and not INVALID_RGX: return name
275 
276  name1 = re.sub(INVALID_RGX, INVALID_REPL, name) if INVALID_RGX else name
277  name2 = ellipsize(name1, MAXLEN)
278  counter = 2
279  while name2 in existing:
280  suffix = " (%s)" % counter
281  name2 = ellipsize(name1, MAXLEN - len(suffix)) + suffix
282  counter += 1
283  return name2
284 
285 
286  def _make_column_names(self, col_names):
287  """Returns valid unique names for table columns."""
288  result = []
289  for name in col_names:
290  result.append(self._make_name("column", name, result))
291  return list(result)
292 
293 
294  def _make_column_value(self, value, typename=None):
295  """Returns column value suitable for inserting to database."""
296  if not typename: return value
297 
298  v = value
299  if isinstance(v, (list, tuple)):
300  scalartype = api.scalar(typename)
301  if scalartype in api.ROS_TIME_TYPES:
302  v = [self._convert_time(x) for x in v]
303  elif scalartype not in api.ROS_BUILTIN_TYPES:
304  if self._nesting: v = []
305  else: v = [api.message_to_dict(x) for x in v]
306  else:
307  v = self._convert_column_value(v, typename)
308  elif api.is_ros_time(v):
309  v = self._convert_time_value(v, typename)
310  elif typename not in api.ROS_BUILTIN_TYPES:
311  v = json.dumps(api.message_to_dict(v))
312  else:
313  v = self._convert_column_value(v, typename)
314  return v
315 
316 
317  def _make_column_type(self, typename, fallback=None):
318  """
319  Returns column type for SQL.
320 
321  @param fallback fallback typename to use for lookup if no mapping for typename
322  """
323  TYPES = self._get_dialect_option("types")
324  ARRAYTEMPLATE = self._get_dialect_option("arraytype_template")
325  DEFAULTTYPE = self._get_dialect_option("defaulttype")
326 
327  scalartype = api.scalar(typename)
328  timetype = api.get_ros_time_category(scalartype)
329  coltype = TYPES.get(typename) or TYPES.get(api.canonical(typename, unbounded=True))
330 
331  if not coltype and scalartype in TYPES:
332  coltype = ARRAYTEMPLATE.format(type=TYPES[scalartype])
333  if not coltype and timetype in TYPES:
334  if typename != scalartype:
335  coltype = ARRAYTEMPLATE.format(type=TYPES[timetype])
336  else:
337  coltype = TYPES[timetype]
338  if not coltype and fallback:
339  coltype = self._make_column_type(fallback)
340  if not coltype:
341  coltype = DEFAULTTYPE or quote(typename)
342  return coltype
343 
344 
345  def _convert_column_value(self, value, typename):
346  """Returns ROS value converted to dialect value."""
347  ADAPTERS = self._get_dialect_option("adapters")
348  if not ADAPTERS: return value
349 
350  adapter, iterate = ADAPTERS.get(typename), False
351  if not adapter and isinstance(value, (list, tuple)):
352  adapter, iterate = ADAPTERS.get(api.scalar(typename)), True
353  if adapter:
354  value = [adapter(x) for x in value] if iterate else adapter(value)
355  return value
356 
357 
358  def _convert_time_value(self, value, typename):
359  """Returns ROS time/duration value converted to dialect value."""
360  adapter = self._get_dialect_option("adapters").get(typename)
361  if adapter:
362  try: is_int = issubclass(adapter, int)
363  except Exception: is_int = False
364  v = api.to_sec(value) if is_int else "%d.%09d" % api.to_sec_nsec(value)
365  result = adapter(v)
366  else:
367  result = api.to_decimal(value)
368  return result
369 
370 
371  def _get_dialect_option(self, option):
372  """Returns option for current SQL dialect, falling back to default dialect."""
373  return self.DIALECTS[self._dialect].get(option, self.DIALECTS[None].get(option))
374 
375 
376 
377  DIALECTS = {
378 
379  None: {
380  # CREATE TABLE template, args: table, cols, type, hash, package, class
381  "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols});",
382  # CREATE VIEW template, args: view, cols, table, topic, type, hash, package, class
383  "view_template": """
384 DROP VIEW IF EXISTS {view};
385 
386 CREATE VIEW {view} AS
387 SELECT {cols}
388 FROM {table}
389 WHERE _topic = {topic};""",
390  "table_name_template": "{type}", # args: type, hash, package, class
391  "view_name_template": "{topic}", # args: topic, type, hash, package, class
392  "types": {}, # Mapping between ROS and SQL common types
393  "adapters": {}, # Mapping between ROS types and callable converters
394  "defaulttype": None, # Fallback SQL type if no mapping for ROS type
395  "arraytype_template": "{type}[]", # Array type template, args: type
396  "maxlen_entity": 0, # Maximum table/view name length, 0 disables
397  "maxlen_column": 0, # Maximum column name length, 0 disables
398  "invalid_char_regex": None, # Regex for matching invalid characters in name
399  "invalid_char_repl": "__", # Replacement for invalid characters in name
400 
401  "insert_topic": """
402 INSERT INTO topics (name, type, md5, table_name, view_name)
403 VALUES (%s, %s, %s, %s, %s);""",
404  "insert_type": """
405 INSERT INTO types (type, definition, md5, table_name)
406 VALUES (%s, %s, %s, %s);""",
407  "posarg": "%s",
408  },
409 
410  "sqlite": {
411  "posarg": "?",
412  "base_schema": """
413 CREATE TABLE IF NOT EXISTS messages (
414  id INTEGER PRIMARY KEY,
415  topic_id INTEGER NOT NULL,
416  timestamp INTEGER NOT NULL,
417  data BLOB NOT NULL,
418 
419  topic TEXT NOT NULL,
420  type TEXT NOT NULL,
421  dt TIMESTAMP NOT NULL,
422  yaml TEXT NOT NULL
423 );
424 
425 CREATE TABLE IF NOT EXISTS topics (
426  id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
427  name TEXT NOT NULL,
428  type TEXT NOT NULL,
429  serialization_format TEXT DEFAULT "cdr",
430  offered_qos_profiles TEXT DEFAULT "",
431 
432  md5 TEXT NOT NULL,
433  table_name TEXT NOT NULL,
434  view_name TEXT
435 );
436 
437 CREATE TABLE IF NOT EXISTS types (
438  id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
439  type TEXT NOT NULL,
440  definition TEXT NOT NULL,
441  md5 TEXT NOT NULL,
442  table_name TEXT NOT NULL,
443  nested_tables JSON
444 );
445 
446 CREATE INDEX IF NOT EXISTS timestamp_idx ON messages (timestamp ASC);
447 
448 PRAGMA journal_mode = WAL;
449 """,
450  "insert_message": """
451 INSERT INTO messages (topic_id, timestamp, data, topic, type, dt, yaml)
452 VALUES (:topic_id, :timestamp, :data, :topic, :type, :dt, :yaml)
453 """,
454  },
455 
456  "postgres": {
457  "types": {
458  "int8": "SMALLINT", "int16": "SMALLINT", "int32": "INTEGER",
459  "uint8": "SMALLINT", "uint16": "INTEGER", "uint32": "BIGINT",
460  "int64": "BIGINT", "uint64": "BIGINT", "bool": "BOOLEAN",
461  "string": "TEXT", "wstring": "TEXT", "uint8[]": "BYTEA",
462  "float32": "REAL", "float64": "DOUBLE PRECISION",
463  },
464  "defaulttype": "JSONB",
465  "maxlen_entity": 63,
466  "maxlen_column": 63,
467 
468  "insert_topic": """
469 INSERT INTO topics (name, type, md5, table_name, view_name)
470 VALUES (%s, %s, %s, %s, %s)
471 RETURNING id;""",
472  "insert_type": """
473 INSERT INTO types (type, definition, md5, table_name)
474 VALUES (%s, %s, %s, %s)
475 RETURNING id;""",
476  "base_schema": """
477 CREATE TABLE IF NOT EXISTS topics (
478  id BIGSERIAL PRIMARY KEY,
479  name TEXT NOT NULL,
480  type TEXT NOT NULL,
481  md5 TEXT NOT NULL,
482  table_name TEXT NOT NULL,
483  view_name TEXT NOT NULL
484 );
485 
486 CREATE TABLE IF NOT EXISTS types (
487  id BIGSERIAL PRIMARY KEY,
488  type TEXT NOT NULL,
489  definition TEXT NOT NULL,
490  md5 TEXT NOT NULL,
491  table_name TEXT NOT NULL,
492  nested_tables JSON
493 );""",
494  },
495 
496  "clickhouse": {
497  "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols}) ENGINE = ENGINE;",
498  "types": {
499  "int8": "Int8", "int16": "Int16", "int32": "Int32",
500  "uint8": "UInt8", "uint16": "UInt16", "uint32": "UInt32",
501  "int64": "Int64", "uint64": "UInt64", "bool": "UInt8",
502  "float32": "Float32", "float64": "Float64", "uint8[]": "String",
503  "string": "String", "wstring": "String",
504  },
505  "defaulttype": "String",
506  "arraytype_template": "Array({type})",
507  },
508  }
509 
510 
511 
513  KEYWORDS = [
514  "A", "ABORT", "ABS", "ABSOLUTE", "ACCESS", "ACTION", "ADA", "ADD", "ADMIN", "AFTER",
515  "AGGREGATE", "ALIAS", "ALL", "ALLOCATE", "ALSO", "ALTER", "ALWAYS", "ANALYSE", "ANALYZE",
516  "AND", "ANY", "ARE", "ARRAY", "AS", "ASC", "ASENSITIVE", "ASSERTION", "ASSIGNMENT",
517  "ASYMMETRIC", "AT", "ATOMIC", "ATTACH", "ATTRIBUTE", "ATTRIBUTES", "AUDIT",
518  "AUTHORIZATION", "AUTOINCREMENT", "AUTO_INCREMENT", "AVG", "AVG_ROW_LENGTH", "BACKUP",
519  "BACKWARD", "BEFORE", "BEGIN", "BERNOULLI", "BETWEEN", "BIGINT", "BINARY", "BIT", "BITVAR",
520  "BIT_LENGTH", "BLOB", "BOOL", "BOOLEAN", "BOTH", "BREADTH", "BREAK", "BROWSE", "BULK",
521  "BY", "C", "CACHE", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", "CASE", "CAST",
522  "CATALOG", "CATALOG_NAME", "CEIL", "CEILING", "CHAIN", "CHANGE", "CHAR", "CHARACTER",
523  "CHARACTERISTICS", "CHARACTERS", "CHARACTER_LENGTH", "CHARACTER_SET_CATALOG",
524  "CHARACTER_SET_NAME", "CHARACTER_SET_SCHEMA", "CHAR_LENGTH", "CHECK", "CHECKED",
525  "CHECKPOINT", "CHECKSUM", "CLASS", "CLASS_ORIGIN", "CLOB", "CLOSE", "CLUSTER", "CLUSTERED",
526  "COALESCE", "COBOL", "COLLATE", "COLLATION", "COLLATION_CATALOG", "COLLATION_NAME",
527  "COLLATION_SCHEMA", "COLLECT", "COLUMN", "COLUMNS", "COLUMN_NAME", "COMMAND_FUNCTION",
528  "COMMAND_FUNCTION_CODE", "COMMENT", "COMMIT", "COMMITTED", "COMPLETION", "COMPRESS",
529  "COMPUTE", "CONDITION", "CONDITION_NUMBER", "CONNECT", "CONNECTION", "CONNECTION_NAME",
530  "CONSTRAINT", "CONSTRAINTS", "CONSTRAINT_CATALOG", "CONSTRAINT_NAME", "CONSTRAINT_SCHEMA",
531  "CONSTRUCTOR", "CONTAINS", "CONTAINSTABLE", "CONTINUE", "CONVERSION", "CONVERT", "COPY",
532  "CORR", "CORRESPONDING", "COUNT", "COVAR_POP", "COVAR_SAMP", "CREATE", "CREATEDB",
533  "CREATEROLE", "CREATEUSER", "CROSS", "CSV", "CUBE", "CUME_DIST", "CURRENT", "CURRENT_DATE",
534  "CURRENT_DEFAULT_TRANSFORM_GROUP", "CURRENT_PATH", "CURRENT_ROLE", "CURRENT_TIME",
535  "CURRENT_TIMESTAMP", "CURRENT_TRANSFORM_GROUP_FOR_TYPE", "CURRENT_USER", "CURSOR",
536  "CURSOR_NAME", "CYCLE", "DATA", "DATABASE", "DATABASES", "DATE", "DATETIME",
537  "DATETIME_INTERVAL_CODE", "DATETIME_INTERVAL_PRECISION", "DAY", "DAYOFMONTH", "DAYOFWEEK",
538  "DAYOFYEAR", "DAY_HOUR", "DAY_MICROSECOND", "DAY_MINUTE", "DAY_SECOND", "DBCC",
539  "DEALLOCATE", "DEC", "DECIMAL", "DECLARE", "DEFAULT", "DEFAULTS", "DEFERRABLE", "DEFERRED",
540  "DEFINED", "DEFINER", "DEGREE", "DELAYED", "DELAY_KEY_WRITE", "DELETE", "DELIMITER",
541  "DELIMITERS", "DENSE_RANK", "DENY", "DEPTH", "DEREF", "DERIVED", "DESC", "DESCRIBE",
542  "DESCRIPTOR", "DESTROY", "DESTRUCTOR", "DETACH", "DETERMINISTIC", "DIAGNOSTICS",
543  "DICTIONARY", "DISABLE", "DISCONNECT", "DISK", "DISPATCH", "DISTINCT", "DISTINCTROW",
544  "DISTRIBUTED", "DIV", "DO", "DOMAIN", "DOUBLE", "DROP", "DUAL", "DUMMY", "DUMP", "DYNAMIC",
545  "DYNAMIC_FUNCTION", "DYNAMIC_FUNCTION_CODE", "EACH", "ELEMENT", "ELSE", "ELSEIF", "ENABLE",
546  "ENCLOSED", "ENCODING", "ENCRYPTED", "END", "END-EXEC", "ENUM", "EQUALS", "ERRLVL",
547  "ESCAPE", "ESCAPED", "EVERY", "EXCEPT", "EXCEPTION", "EXCLUDE", "EXCLUDING", "EXCLUSIVE",
548  "EXEC", "EXECUTE", "EXISTING", "EXISTS", "EXIT", "EXP", "EXPLAIN", "EXTERNAL", "EXTRACT",
549  "FALSE", "FETCH", "FIELDS", "FILE", "FILLFACTOR", "FILTER", "FINAL", "FIRST", "FLOAT",
550  "FLOAT4", "FLOAT8", "FLOOR", "FLUSH", "FOLLOWING", "FOR", "FORCE", "FOREIGN", "FORTRAN",
551  "FORWARD", "FOUND", "FREE", "FREETEXT", "FREETEXTTABLE", "FREEZE", "FROM", "FULL",
552  "FULLTEXT", "FUNCTION", "FUSION", "G", "GENERAL", "GENERATED", "GET", "GLOBAL", "GO",
553  "GOTO", "GRANT", "GRANTED", "GRANTS", "GREATEST", "GROUP", "GROUPING", "HANDLER", "HAVING",
554  "HEADER", "HEAP", "HIERARCHY", "HIGH_PRIORITY", "HOLD", "HOLDLOCK", "HOST", "HOSTS",
555  "HOUR", "HOUR_MICROSECOND", "HOUR_MINUTE", "HOUR_SECOND", "IDENTIFIED", "IDENTITY",
556  "IDENTITYCOL", "IDENTITY_INSERT", "IF", "IGNORE", "ILIKE", "IMMEDIATE", "IMMUTABLE",
557  "IMPLEMENTATION", "IMPLICIT", "IN", "INCLUDE", "INCLUDING", "INCREMENT", "INDEX",
558  "INDICATOR", "INFILE", "INFIX", "INHERIT", "INHERITS", "INITIAL", "INITIALIZE",
559  "INITIALLY", "INNER", "INOUT", "INPUT", "INSENSITIVE", "INSERT", "INSERT_ID", "INSTANCE",
560  "INSTANTIABLE", "INSTEAD", "INT", "INT1", "INT2", "INT3", "INT4", "INT8", "INTEGER",
561  "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", "INVOKER", "IS", "ISAM", "ISNULL",
562  "ISOLATION", "ITERATE", "JOIN", "K", "KEY", "KEYS", "KEY_MEMBER", "KEY_TYPE", "KILL",
563  "LANCOMPILER", "LANGUAGE", "LARGE", "LAST", "LAST_INSERT_ID", "LATERAL", "LEAD", "LEADING",
564  "LEAST", "LEAVE", "LEFT", "LENGTH", "LESS", "LEVEL", "LIKE", "LIMIT", "LINENO", "LINES",
565  "LISTEN", "LN", "LOAD", "LOCAL", "LOCALTIME", "LOCALTIMESTAMP", "LOCATION", "LOCATOR",
566  "LOCK", "LOGIN", "LOGS", "LONG", "LONGBLOB", "LONGTEXT", "LOOP", "LOWER", "LOW_PRIORITY",
567  "M", "MAP", "MATCH", "MATCHED", "MAX", "MAXEXTENTS", "MAXVALUE", "MAX_ROWS", "MEDIUMBLOB",
568  "MEDIUMINT", "MEDIUMTEXT", "MEMBER", "MERGE", "MESSAGE_LENGTH", "MESSAGE_OCTET_LENGTH",
569  "MESSAGE_TEXT", "METHOD", "MIDDLEINT", "MIN", "MINUS", "MINUTE", "MINUTE_MICROSECOND",
570  "MINUTE_SECOND", "MINVALUE", "MIN_ROWS", "MLSLABEL", "MOD", "MODE", "MODIFIES", "MODIFY",
571  "MODULE", "MONTH", "MONTHNAME", "MORE", "MOVE", "MULTISET", "MUMPS", "MYISAM", "NAME",
572  "NAMES", "NATIONAL", "NATURAL", "NCHAR", "NCLOB", "NESTING", "NEW", "NEXT", "NO",
573  "NOAUDIT", "NOCHECK", "NOCOMPRESS", "NOCREATEDB", "NOCREATEROLE", "NOCREATEUSER",
574  "NOINHERIT", "NOLOGIN", "NONCLUSTERED", "NONE", "NORMALIZE", "NORMALIZED", "NOSUPERUSER",
575  "NOT", "NOTHING", "NOTIFY", "NOTNULL", "NOWAIT", "NO_WRITE_TO_BINLOG", "NULL", "NULLABLE",
576  "NULLIF", "NULLS", "NUMBER", "NUMERIC", "OBJECT", "OCTETS", "OCTET_LENGTH", "OF", "OFF",
577  "OFFLINE", "OFFSET", "OFFSETS", "OIDS", "OLD", "ON", "ONLINE", "ONLY", "OPEN",
578  "OPENDATASOURCE", "OPENQUERY", "OPENROWSET", "OPENXML", "OPERATION", "OPERATOR",
579  "OPTIMIZE", "OPTION", "OPTIONALLY", "OPTIONS", "OR", "ORDER", "ORDERING", "ORDINALITY",
580  "OTHERS", "OUT", "OUTER", "OUTFILE", "OUTPUT", "OVER", "OVERLAPS", "OVERLAY", "OVERRIDING",
581  "OWNER", "PACK_KEYS", "PAD", "PARAMETER", "PARAMETERS", "PARAMETER_MODE", "PARAMETER_NAME",
582  "PARAMETER_ORDINAL_POSITION", "PARAMETER_SPECIFIC_CATALOG", "PARAMETER_SPECIFIC_NAME",
583  "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", "PARTITION", "PASCAL", "PASSWORD", "PATH",
584  "PCTFREE", "PERCENT", "PERCENTILE_CONT", "PERCENTILE_DISC", "PERCENT_RANK", "PLACING",
585  "PLAN", "PLI", "POSITION", "POSTFIX", "POWER", "PRAGMA", "PRECEDING", "PRECISION",
586  "PREFIX", "PREORDER", "PREPARE", "PREPARED", "PRESERVE", "PRIMARY", "PRINT", "PRIOR",
587  "PRIVILEGES", "PROC", "PROCEDURAL", "PROCEDURE", "PROCESS", "PROCESSLIST", "PUBLIC",
588  "PURGE", "QUOTE", "RAID0", "RAISE", "RAISERROR", "RANGE", "RANK", "RAW", "READ", "READS",
589  "READTEXT", "REAL", "RECHECK", "RECONFIGURE", "RECURSIVE", "REF", "REFERENCES",
590  "REFERENCING", "REGEXP", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT",
591  "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "REINDEX", "RELATIVE",
592  "RELEASE", "RELOAD", "RENAME", "REPEAT", "REPEATABLE", "REPLACE", "REPLICATION", "REQUIRE",
593  "RESET", "RESIGNAL", "RESOURCE", "RESTART", "RESTORE", "RESTRICT", "RESULT", "RETURN",
594  "RETURNED_CARDINALITY", "RETURNED_LENGTH", "RETURNED_OCTET_LENGTH", "RETURNED_SQLSTATE",
595  "RETURNS", "REVOKE", "RIGHT", "RLIKE", "ROLE", "ROLLBACK", "ROLLUP", "ROUTINE",
596  "ROUTINE_CATALOG", "ROUTINE_NAME", "ROUTINE_SCHEMA", "ROW", "ROWCOUNT", "ROWGUIDCOL",
597  "ROWID", "ROWNUM", "ROWS", "ROW_COUNT", "ROW_NUMBER", "RULE", "SAVE", "SAVEPOINT", "SCALE",
598  "SCHEMA", "SCHEMAS", "SCHEMA_NAME", "SCOPE", "SCOPE_CATALOG", "SCOPE_NAME", "SCOPE_SCHEMA",
599  "SCROLL", "SEARCH", "SECOND", "SECOND_MICROSECOND", "SECTION", "SECURITY", "SELECT",
600  "SELF", "SENSITIVE", "SEPARATOR", "SEQUENCE", "SERIALIZABLE", "SERVER_NAME", "SESSION",
601  "SESSION_USER", "SET", "SETOF", "SETS", "SETUSER", "SHARE", "SHOW", "SHUTDOWN", "SIGNAL",
602  "SIMILAR", "SIMPLE", "SIZE", "SMALLINT", "SOME", "SONAME", "SOURCE", "SPACE", "SPATIAL",
603  "SPECIFIC", "SPECIFICTYPE", "SPECIFIC_NAME", "SQL", "SQLCA", "SQLCODE", "SQLERROR",
604  "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIG_RESULT", "SQL_BIG_SELECTS",
605  "SQL_BIG_TABLES", "SQL_CALC_FOUND_ROWS", "SQL_LOG_OFF", "SQL_LOG_UPDATE",
606  "SQL_LOW_PRIORITY_UPDATES", "SQL_SELECT_LIMIT", "SQL_SMALL_RESULT", "SQL_WARNINGS", "SQRT",
607  "SSL", "STABLE", "START", "STARTING", "STATE", "STATEMENT", "STATIC", "STATISTICS",
608  "STATUS", "STDDEV_POP", "STDDEV_SAMP", "STDIN", "STDOUT", "STORAGE", "STRAIGHT_JOIN",
609  "STRICT", "STRING", "STRUCTURE", "STYLE", "SUBCLASS_ORIGIN", "SUBLIST", "SUBMULTISET",
610  "SUBSTRING", "SUCCESSFUL", "SUM", "SUPERUSER", "SYMMETRIC", "SYNONYM", "SYSDATE", "SYSID",
611  "SYSTEM", "SYSTEM_USER", "TABLE", "TABLES", "TABLESAMPLE", "TABLESPACE", "TABLE_NAME",
612  "TEMP", "TEMPLATE", "TEMPORARY", "TERMINATE", "TERMINATED", "TEXT", "TEXTSIZE", "THAN",
613  "THEN", "TIES", "TIME", "TIMESTAMP", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TINYBLOB",
614  "TINYINT", "TINYTEXT", "TO", "TOAST", "TOP", "TOP_LEVEL_COUNT", "TRAILING", "TRAN",
615  "TRANSACTION", "TRANSACTIONS_COMMITTED", "TRANSACTIONS_ROLLED_BACK", "TRANSACTION_ACTIVE",
616  "TRANSFORM", "TRANSFORMS", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER",
617  "TRIGGER_CATALOG", "TRIGGER_NAME", "TRIGGER_SCHEMA", "TRIM", "TRUE", "TRUNCATE", "TRUSTED",
618  "TSEQUAL", "TYPE", "UESCAPE", "UID", "UNBOUNDED", "UNCOMMITTED", "UNDER", "UNDO",
619  "UNENCRYPTED", "UNION", "UNIQUE", "UNKNOWN", "UNLISTEN", "UNLOCK", "UNNAMED", "UNNEST",
620  "UNSIGNED", "UNTIL", "UPDATE", "UPDATETEXT", "UPPER", "USAGE", "USE", "USER",
621  "USER_DEFINED_TYPE_CATALOG", "USER_DEFINED_TYPE_CODE", "USER_DEFINED_TYPE_NAME",
622  "USER_DEFINED_TYPE_SCHEMA", "USING", "UTC_DATE", "UTC_TIME", "UTC_TIMESTAMP", "VACUUM",
623  "VALID", "VALIDATE", "VALIDATOR", "VALUE", "VALUES", "VARBINARY", "VARCHAR", "VARCHAR2",
624  "VARCHARACTER", "VARIABLE", "VARIABLES", "VARYING", "VAR_POP", "VAR_SAMP", "VERBOSE",
625  "VIEW", "VOLATILE", "WAITFOR", "WHEN", "WHENEVER", "WHERE", "WHILE", "WIDTH_BUCKET",
626  "WINDOW", "WITH", "WITHIN", "WITHOUT", "WORK", "WRITE", "WRITETEXT", "X509", "XOR", "YEAR",
627  "YEAR_MONTH", "ZEROFILL", "ZONE",
628  ]
629 
630 
631 def quote(name, force=False):
632  """
633  Returns name in quotes and proper-escaped for SQL queries.
634 
635  @param force quote even if name does not need quoting (starts with a letter,
636  contains only alphanumerics, and is not a reserved keyword)
637  """
638  result = name
639  if force or result.upper() in SqlMixin.KEYWORDS \
640  or re.search(r"(^[\W\d])|(?=\W)", result, re.U):
641  result = '"%s"' % result.replace('"', '""')
642  return result
643 
644 
645 __all__ = ["SqlMixin", "quote"]
grepros.plugins.auto.sqlbase.SqlMixin._sql_cache
_sql_cache
Definition: sqlbase.py:53
grepros.plugins.auto.sqlbase.SqlMixin._make_type_insert_sql
def _make_type_insert_sql(self, msg)
Definition: sqlbase.py:200
grepros.plugins.auto.sqlbase.SqlMixin._args
_args
Definition: sqlbase.py:49
grepros.plugins.auto.sqlbase.SqlMixin._convert_column_value
def _convert_column_value(self, value, typename)
Definition: sqlbase.py:345
grepros.plugins.auto.sqlbase.SqlMixin._validate_dialect_file
def _validate_dialect_file(self)
Definition: sqlbase.py:67
grepros.plugins.auto.sqlbase.SqlMixin._make_message_insert_sql
def _make_message_insert_sql(self, topic, msg, extra_cols=())
Definition: sqlbase.py:211
grepros.plugins.auto.sqlbase.SqlMixin._make_column_type
def _make_column_type(self, typename, fallback=None)
Definition: sqlbase.py:317
grepros.plugins.auto.sqlbase.SqlMixin._dialect
_dialect
Definition: sqlbase.py:54
grepros.plugins.auto.sqlbase.SqlMixin.DEFAULT_DIALECT
string DEFAULT_DIALECT
Default SQL dialect used if dialect not specified.
Definition: sqlbase.py:34
grepros.plugins.auto.sqlbase.SqlMixin._get_dialect_option
def _get_dialect_option(self, option)
Definition: sqlbase.py:371
grepros.plugins.auto.sqlbase.SqlMixin._nesting
_nesting
Definition: sqlbase.py:55
grepros.common.ellipsize
def ellipsize(text, limit, ellipsis="..")
Definition: common.py:651
grepros.plugins.auto.sqlbase.SqlMixin._validate_dialect
def _validate_dialect(self)
Definition: sqlbase.py:101
grepros.plugins.auto.sqlbase.SqlMixin._topics
_topics
Definition: sqlbase.py:50
grepros.plugins.auto.sqlbase.SqlMixin.close
def close(self)
Definition: sqlbase.py:114
grepros.plugins.auto.sqlbase.SqlMixin._make_topic_data
def _make_topic_data(self, topic, msg, exclude_cols=())
Definition: sqlbase.py:122
grepros.plugins.auto.sqlbase.SqlMixin.__init__
def __init__(self, args=None, **kwargs)
Definition: sqlbase.py:40
grepros.common.ensure_namespace
def ensure_namespace(val, defaults=None, dashify=("WRITE_OPTIONS",), **kwargs)
Definition: common.py:658
grepros.plugins.auto.sqlbase.SqlMixin._make_column_value
def _make_column_value(self, value, typename=None)
Definition: sqlbase.py:294
grepros.plugins.auto.sqlbase.SqlMixin._make_name
def _make_name(self, category, name, existing=())
Definition: sqlbase.py:263
grepros.plugins.auto.sqlbase.SqlMixin._make_update_sql
def _make_update_sql(self, table, values, where=())
Definition: sqlbase.py:238
grepros.plugins.auto.sqlbase.SqlMixin._convert_time_value
def _convert_time_value(self, value, typename)
Definition: sqlbase.py:358
grepros.plugins.auto.sqlbase.quote
def quote(name, force=False)
Definition: sqlbase.py:631
grepros.plugins.auto.sqlbase.SqlMixin._make_type_data
def _make_type_data(self, msg, extra_cols=(), rootmsg=None)
Definition: sqlbase.py:152
grepros.plugins.auto.sqlbase.SqlMixin.validate
def validate(self)
Definition: sqlbase.py:58
grepros.plugins.auto.sqlbase.SqlMixin._make_topic_insert_sql
def _make_topic_insert_sql(self, topic, msg)
Definition: sqlbase.py:189
grepros.plugins.auto.sqlbase.SqlMixin
Definition: sqlbase.py:25
grepros.plugins.auto.sqlbase.SqlMixin._make_column_names
def _make_column_names(self, col_names)
Definition: sqlbase.py:286
grepros.plugins.auto.sqlbase.SqlMixin._make_entity_name
def _make_entity_name(self, category, args)
Definition: sqlbase.py:250
grepros.plugins.auto.sqlbase.SqlMixin._schema
_schema
Definition: sqlbase.py:52
grepros.common.import_item
def import_item(name)
Definition: common.py:823
grepros.plugins.auto.sqlbase.SqlMixin._types
_types
Definition: sqlbase.py:51
grepros.common.merge_dicts
def merge_dicts(d1, d2)
Definition: common.py:894
grepros.plugins.auto.sqlbase.SqlMixin.DIALECTS
dictionary DIALECTS
Supported SQL dialects and options.
Definition: sqlbase.py:377


grepros
Author(s): Erki Suurjaak
autogenerated on Sat Jan 6 2024 03:11:29