Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  # $Source: /cvsroot/gnumed/gnumed/gnumed/client/pycommon/gmPG2.py,v $ 
  15  __version__ = "$Revision: 1.127 $" 
  16  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  17  __license__ = 'GPL (details at http://www.gnu.org)' 
  18   
  19  ### imports ### 
  20  # stdlib 
  21  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging, locale 
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27   
  28  _log = logging.getLogger('gm.db') 
  29  _log.info(__version__) 
  30   
  31  # 3rd party 
  32  try: 
  33          import psycopg2 as dbapi 
  34  except ImportError: 
  35          _log.exception("Python database adapter psycopg2 not found.") 
  36          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  37          raise 
  38  ### imports ### 
  39   
  40   
  41  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  42  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  43  if not (float(dbapi.apilevel) >= 2.0): 
  44          raise ImportError('gmPG2: supported DB-API level too low') 
  45  if not (dbapi.threadsafety > 0): 
  46          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  47  if not (dbapi.paramstyle == 'pyformat'): 
  48          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  49  try: 
  50          dbapi.__version__.index('dt') 
  51  except ValueError: 
  52          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  53  try: 
  54          dbapi.__version__.index('ext') 
  55  except ValueError: 
  56          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  57  try: 
  58          dbapi.__version__.index('pq3') 
  59  except ValueError: 
  60          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  61   
  62  import psycopg2.extras 
  63  import psycopg2.extensions 
  64  import psycopg2.pool 
  65  import psycopg2.errorcodes as sql_error_codes 
  66   
  67  # ======================================================================= 
  68  _default_client_encoding = 'UTF8' 
  69  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  70   
  71  # things timezone 
  72  _default_client_timezone = None                 # default time zone for connections 
  73  _sql_set_timezone = None 
  74  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  75  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  76   
  77  _default_dsn = None 
  78  _default_login = None 
  79   
  80  postgresql_version_string = None 
  81  postgresql_version = None                       # accuracy: major.minor 
  82   
  83  __ro_conn_pool = None 
  84   
  85  # ======================================================================= 
  86  # global data 
  87  # ======================================================================= 
  88   
  89  known_schema_hashes = { 
  90          'devel': 'not released, testing only', 
  91          'v2': 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  92          'v3': 'e73718eaf230d8f1d2d01afa8462e176', 
  93          'v4': '4428ccf2e54c289136819e701bb095ea', 
  94          'v5': '7e7b093af57aea48c288e76632a382e5',       # ... old (v1) style hashes 
  95          'v6': '90e2026ac2efd236da9c8608b8685b2d',       # new (v2) style hashes ... 
  96          'v7': '6c9f6d3981483f8e9433df99d1947b27', 
  97          'v8': '89b13a7af83337c3aad153b717e52360', 
  98          'v9': '641a9b2be3c378ffc2bb2f0b1c9f051d', 
  99          'v10': '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 100          'v11': '03042ae24f3f92877d986fb0a6184d76', 
 101          'v12': '06183a6616db62257e22814007a8ed07' 
 102  } 
 103   
 104  map_schema_hash2version = { 
 105          'b09d50d7ed3f91ddf4c4ddb8ea507720': 'v2', 
 106          'e73718eaf230d8f1d2d01afa8462e176': 'v3', 
 107          '4428ccf2e54c289136819e701bb095ea': 'v4', 
 108          '7e7b093af57aea48c288e76632a382e5': 'v5', 
 109          '90e2026ac2efd236da9c8608b8685b2d': 'v6', 
 110          '6c9f6d3981483f8e9433df99d1947b27': 'v7', 
 111          '89b13a7af83337c3aad153b717e52360': 'v8', 
 112          '641a9b2be3c378ffc2bb2f0b1c9f051d': 'v9', 
 113          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 'v10', 
 114          '03042ae24f3f92877d986fb0a6184d76': 'v11', 
 115          '06183a6616db62257e22814007a8ed07': 'v12' 
 116  } 
 117   
 118  map_client_branch2required_db_version = { 
 119          u'CVS HEAD': u'devel', 
 120          u'0.3': u'v9', 
 121          u'0.4': u'v10', 
 122          u'0.5': u'v11', 
 123          u'0.6': u'v12' 
 124  } 
 125   
 126  # get columns and data types for a given table 
 127  query_table_col_defs = u"""select 
 128          cols.column_name, 
 129          cols.udt_name 
 130  from 
 131          information_schema.columns cols 
 132  where 
 133          cols.table_schema = %s 
 134                  and 
 135          cols.table_name = %s 
 136  order by 
 137          cols.ordinal_position""" 
 138   
 139  query_table_attributes = u"""select 
 140          cols.column_name 
 141  from 
 142          information_schema.columns cols 
 143  where 
 144          cols.table_schema = %s 
 145                  and 
 146          cols.table_name = %s 
 147  order by 
 148          cols.ordinal_position""" 
 149   
 150  # ======================================================================= 
 151  # module globals API 
 152  # ======================================================================= 
153 -def set_default_client_encoding(encoding = None):
154 # check whether psycopg2 can handle this encoding 155 if encoding not in psycopg2.extensions.encodings: 156 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 157 # check whether Python can handle this encoding 158 py_enc = psycopg2.extensions.encodings[encoding] 159 try: 160 codecs.lookup(py_enc) 161 except LookupError: 162 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 163 raise 164 # FIXME: check encoding against the database 165 # FIXME: - but we may not yet have access 166 # FIXME: - psycopg2 will pull its encodings from the database eventually 167 # it seems save to set it 168 global _default_client_encoding 169 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 170 _default_client_encoding = encoding 171 return True
172 #---------------------------------------------------
173 -def set_default_client_timezone(timezone = None):
174 175 # FIXME: use __validate 176 global _default_client_timezone 177 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 178 _default_client_timezone = timezone 179 180 global _sql_set_timezone 181 _sql_set_timezone = u'set timezone to %s' 182 183 return True
184 #---------------------------------------------------
185 -def __validate_timezone(conn=None, timezone=None):
186 187 _log.debug(u'validating time zone [%s]', timezone) 188 189 cmd = u'set timezone to %(tz)s' 190 args = {u'tz': timezone} 191 192 conn.commit() 193 curs = conn.cursor() 194 is_valid = False 195 try: 196 curs.execute(cmd, args) 197 _log.info(u'time zone [%s] is settable', timezone) 198 # can we actually use it, though ? 199 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 200 try: 201 curs.execute(cmd) 202 curs.fetchone() 203 _log.info(u'time zone [%s] is usable', timezone) 204 is_valid = True 205 except: 206 _log.error('error using time zone [%s]', timezone) 207 except dbapi.DataError: 208 _log.warning(u'time zone [%s] is not settable', timezone) 209 except: 210 _log.error(u'failed to set time zone to [%s]', timezone) 211 _log.exception(u'') 212 213 curs.close() 214 conn.rollback() 215 216 return is_valid
217 #---------------------------------------------------
218 -def __expand_timezone(conn=None, timezone=None):
219 """some timezone defs are abbreviations so try to expand 220 them because "set time zone" doesn't take abbreviations""" 221 222 cmd = u""" 223 select distinct on (abbrev) name 224 from pg_timezone_names 225 where 226 abbrev = %(tz)s and 227 name ~ '^[^/]+/[^/]+$' and 228 name !~ '^Etc/' 229 """ 230 args = {u'tz': timezone} 231 232 conn.commit() 233 curs = conn.cursor() 234 235 result = timezone 236 try: 237 curs.execute(cmd, args) 238 rows = curs.fetchall() 239 if len(rows) > 0: 240 result = rows[0][0] 241 _log.debug(u'[%s] maps to [%s]', timezone, result) 242 except: 243 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 244 245 curs.close() 246 conn.rollback() 247 248 return result
249 #---------------------------------------------------
250 -def __detect_client_timezone(conn=None):
251 """This is run on the very first connection.""" 252 253 # FIXME: check whether server.timezone is the same 254 # FIXME: value as what we eventually detect 255 256 # we need gmDateTime to be initialized 257 if gmDateTime.current_local_iso_numeric_timezone_string is None: 258 gmDateTime.init() 259 260 _log.debug('trying to detect timezone from system') 261 262 tz_candidates = [] 263 try: 264 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 265 tz_candidates.append(tz) 266 expanded = __expand_timezone(conn = conn, timezone = tz) 267 if expanded != tz: 268 tz_candidates.append(expanded) 269 except KeyError: 270 pass 271 272 tz_candidates.append(gmDateTime.current_local_timezone_name) 273 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 274 if expanded != gmDateTime.current_local_timezone_name: 275 tz_candidates.append(expanded) 276 277 _log.debug('candidates: %s', str(tz_candidates)) 278 279 # find best among candidates 280 global _default_client_timezone 281 global _sql_set_timezone 282 found = False 283 for tz in tz_candidates: 284 if __validate_timezone(conn = conn, timezone = tz): 285 _default_client_timezone = tz 286 _sql_set_timezone = u'set timezone to %s' 287 found = True 288 break 289 290 if not found: 291 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 292 _sql_set_timezone = u"set time zone interval %s hour to minute" 293 294 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
295 # ======================================================================= 296 # login API 297 # =======================================================================
298 -def __prompted_input(prompt, default=None):
299 usr_input = raw_input(prompt) 300 if usr_input == '': 301 return default 302 return usr_input
303 #---------------------------------------------------
304 -def __request_login_params_tui():
305 """Text mode request of database login parameters""" 306 import getpass 307 login = gmLoginInfo.LoginInfo() 308 309 print "\nPlease enter the required login parameters:" 310 try: 311 login.host = __prompted_input("host ['' = non-TCP/IP]: ", '') 312 login.database = __prompted_input("database [gnumed_v13]: ", 'gnumed_v13') 313 login.user = __prompted_input("user name: ", '') 314 tmp = 'password for "%s" (not shown): ' % login.user 315 login.password = getpass.getpass(tmp) 316 login.port = __prompted_input("port [5432]: ", 5432) 317 except KeyboardInterrupt: 318 _log.warning("user cancelled text mode login dialog") 319 print "user cancelled text mode login dialog" 320 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 321 322 return login
323 #---------------------------------------------------
324 -def __request_login_params_gui_wx():
325 """GUI (wx) input request for database login parameters. 326 327 Returns gmLoginInfo.LoginInfo object 328 """ 329 import wx 330 # OK, wxPython was already loaded. But has the main Application instance 331 # been initialized yet ? if not, the exception will kick us out 332 if wx.GetApp() is None: 333 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 334 335 # Let's launch the login dialog 336 # if wx was not initialized /no main App loop, an exception should be raised anyway 337 import gmAuthWidgets 338 dlg = gmAuthWidgets.cLoginDialog(None, -1) 339 dlg.ShowModal() 340 login = dlg.panel.GetLoginInfo() 341 dlg.Destroy() 342 343 #if user cancelled or something else went wrong, raise an exception 344 if login is None: 345 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 346 347 return login
348 #---------------------------------------------------
349 -def request_login_params():
350 """Request login parameters for database connection. 351 """ 352 # are we inside X ? 353 # (if we aren't wxGTK will crash hard at 354 # C-level with "can't open Display") 355 if os.environ.has_key('DISPLAY'): 356 # try GUI 357 try: 358 return __request_login_params_gui_wx() 359 except: 360 pass 361 # well, either we are on the console or 362 # wxPython does not work, use text mode 363 return __request_login_params_tui()
364 365 # ======================================================================= 366 # DSN API 367 # -----------------------------------------------------------------------
368 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
369 dsn_parts = [] 370 371 if (database is not None) and (database.strip() != ''): 372 dsn_parts.append('dbname=%s' % database) 373 374 if (host is not None) and (host.strip() != ''): 375 dsn_parts.append('host=%s' % host) 376 377 if (port is not None) and (str(port).strip() != ''): 378 dsn_parts.append('port=%s' % port) 379 380 if (user is not None) and (user.strip() != ''): 381 dsn_parts.append('user=%s' % user) 382 383 if (password is not None) and (password.strip() != ''): 384 dsn_parts.append('password=%s' % password) 385 386 dsn_parts.append('sslmode=prefer') 387 388 return ' '.join(dsn_parts)
389 # ------------------------------------------------------
390 -def get_default_login():
391 # make sure we do have a login 392 get_default_dsn() 393 return _default_login
394 # ------------------------------------------------------
395 -def get_default_dsn():
396 global _default_dsn 397 if _default_dsn is not None: 398 return _default_dsn 399 400 login = request_login_params() 401 set_default_login(login=login) 402 403 return _default_dsn
404 # ------------------------------------------------------
405 -def set_default_login(login=None):
406 if login is None: 407 return False 408 409 if login.host is not None: 410 if login.host.strip() == u'': 411 login.host = None 412 413 global _default_login 414 _default_login = login 415 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 416 417 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 418 419 global _default_dsn 420 _default_dsn = dsn 421 _log.info('setting default DSN from [%s] to [%s]' % (_default_dsn, dsn)) 422 423 return True
424 # ======================================================================= 425 # netadata API 426 # =======================================================================
427 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
428 expected_hash = known_schema_hashes[version] 429 if version == 'devel': 430 args = {'ver': '9999'} 431 else: 432 args = {'ver': version.strip('v')} 433 rows, idx = run_ro_queries ( 434 link_obj = link_obj, 435 queries = [{ 436 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 437 'args': args 438 }] 439 ) 440 if rows[0]['md5'] != expected_hash: 441 _log.error('database schema version mismatch') 442 _log.error('expected: %s (%s)' % (version, expected_hash)) 443 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 444 if verbose: 445 _log.debug('schema dump follows:') 446 for line in get_schema_structure(link_obj=link_obj).split(): 447 _log.debug(line) 448 _log.debug('schema revision history dump follows:') 449 for line in get_schema_revision_history(link_obj=link_obj): 450 _log.debug(u' - '.join(line)) 451 return False 452 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 453 return True
454 #------------------------------------------------------------------------
455 -def get_schema_version(link_obj=None):
456 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 457 try: 458 return map_schema_hash2version[rows[0]['md5']] 459 except KeyError: 460 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
461 #------------------------------------------------------------------------
462 -def get_schema_structure(link_obj=None):
463 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 464 return rows[0][0]
465 #------------------------------------------------------------------------
466 -def get_schema_revision_history(link_obj=None):
467 cmd = u""" 468 select 469 imported::text, 470 version, 471 filename 472 from gm.schema_revision 473 order by imported 474 """ 475 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 476 return rows
477 #------------------------------------------------------------------------
478 -def get_current_user():
479 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 480 return rows[0][0]
481 #------------------------------------------------------------------------
482 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
483 """Get the foreign keys pointing to schema.table.column. 484 485 Does not properly work with multi-column FKs. 486 GNUmed doesn't use any, however. 487 """ 488 cmd = u""" 489 select 490 %(schema)s as referenced_schema, 491 %(tbl)s as referenced_table, 492 %(col)s as referenced_column, 493 pgc.confkey as referenced_column_list, 494 pgc.conrelid::regclass as referencing_table, 495 pgc.conkey as referencing_column_list, 496 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 497 from 498 pg_constraint pgc 499 where 500 pgc.contype = 'f' 501 and 502 pgc.confrelid = ( 503 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 504 select oid from pg_namespace where nspname = %(schema)s 505 ) 506 ) and 507 ( 508 select attnum 509 from pg_attribute 510 where 511 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 512 select oid from pg_namespace where nspname = %(schema)s 513 )) 514 and 515 attname = %(col)s 516 ) = any(pgc.confkey) 517 """ 518 519 args = { 520 'schema': schema, 521 'tbl': table, 522 'col': column 523 } 524 525 rows, idx = run_ro_queries ( 526 link_obj = link_obj, 527 queries = [ 528 {'cmd': cmd, 'args': args} 529 ] 530 ) 531 532 return rows
533 #------------------------------------------------------------------------
534 -def get_child_tables(schema='public', table=None, link_obj=None):
535 """Return child tables of <table>.""" 536 cmd = u""" 537 select 538 pgn.nspname as namespace, 539 pgc.relname as table 540 from 541 pg_namespace pgn, 542 pg_class pgc 543 where 544 pgc.relnamespace = pgn.oid 545 and 546 pgc.oid in ( 547 select inhrelid from pg_inherits where inhparent = ( 548 select oid from pg_class where 549 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 550 relname = %(table)s 551 ) 552 )""" 553 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 554 return rows
555 #------------------------------------------------------------------------
556 -def table_exists(link_obj=None, schema=None, table=None):
557 """Returns false, true.""" 558 cmd = u""" 559 select exists ( 560 select 1 from information_schema.tables 561 where 562 table_schema = %s and 563 table_name = %s and 564 table_type = 'BASE TABLE' 565 )""" 566 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 567 return rows[0][0]
568 #------------------------------------------------------------------------
569 -def get_col_indices(cursor = None):
570 if cursor.description is None: 571 _log.error('no result description available: unused cursor or last query did not select rows') 572 return None 573 col_indices = {} 574 col_index = 0 575 for col_desc in cursor.description: 576 col_name = col_desc[0] 577 # a query like "select 1,2;" will return two columns of the same name ! 578 # hence adjust to that, note, however, that dict-style access won't work 579 # on results of such queries ... 580 if col_indices.has_key(col_name): 581 col_name = '%s_%s' % (col_name, col_index) 582 col_indices[col_name] = col_index 583 col_index += 1 584 585 return col_indices
586 #------------------------------------------------------------------------
587 -def get_col_defs(link_obj=None, schema='public', table=None):
588 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 589 col_names = [] 590 col_type = {} 591 for row in rows: 592 col_names.append(row[0]) 593 # map array types 594 if row[1].startswith('_'): 595 col_type[row[0]] = row[1][1:] + '[]' 596 else: 597 col_type[row[0]] = row[1] 598 col_defs = [] 599 col_defs.append(col_names) 600 col_defs.append(col_type) 601 return col_defs
602 #------------------------------------------------------------------------
603 -def get_col_names(link_obj=None, schema='public', table=None):
604 """Return column attributes of table""" 605 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 606 cols = [] 607 for row in rows: 608 cols.append(row[0]) 609 return cols
610 #------------------------------------------------------------------------
611 -def get_translation_languages():
612 rows, idx = run_ro_queries ( 613 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 614 ) 615 return [ r[0] for r in rows ]
616 #------------------------------------------------------------------------
617 -def get_current_user_language():
618 cmd = u'select i18n.get_curr_lang()' 619 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 620 return rows[0][0]
621 #------------------------------------------------------------------------
622 -def set_user_language(user=None, language=None):
623 """Set the user language in the database. 624 625 user = None: current db user 626 language = None: unset 627 """ 628 _log.info('setting database language for user [%s] to [%s]', user, language) 629 630 args = { 631 'usr': user, 632 'lang': language 633 } 634 635 if language is None: 636 if user is None: 637 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 638 else: 639 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 640 queries.append({'cmd': u'select True'}) 641 else: 642 if user is None: 643 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 644 else: 645 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 646 647 rows, idx = run_rw_queries(queries = queries, return_data = True) 648 649 if not rows[0][0]: 650 _log.error('cannot set database language to [%s] for user [%s]', language, user) 651 652 return rows[0][0]
653 #------------------------------------------------------------------------
654 -def force_user_language(language=None):
655 """Set the user language in the database. 656 657 - regardless of whether there is any translation available. 658 - only for the current user 659 """ 660 _log.info('forcing database language for current db user to [%s]', language) 661 662 run_rw_queries(queries = [{ 663 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 664 'args': {'lang': language} 665 }])
666 #------------------------------------------------------------------------ 667 #------------------------------------------------------------------------ 668 text_expansion_keywords = None 669
670 -def get_text_expansion_keywords():
671 global text_expansion_keywords 672 if text_expansion_keywords is not None: 673 return text_expansion_keywords 674 675 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 676 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 677 text_expansion_keywords = rows 678 679 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 680 681 return text_expansion_keywords
682 #------------------------------------------------------------------------
683 -def expand_keyword(keyword = None):
684 685 # Easter Egg ;-) 686 if keyword == u'$$steffi': 687 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 688 689 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 690 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 691 692 if len(rows) == 0: 693 return None 694 695 return rows[0]['expansion']
696 #------------------------------------------------------------------------
697 -def get_keyword_expansion_candidates(keyword = None):
698 699 if keyword is None: 700 return [] 701 702 get_text_expansion_keywords() 703 704 candidates = [] 705 for kwd in text_expansion_keywords: 706 if kwd['keyword'].startswith(keyword): 707 candidates.append(kwd['keyword']) 708 709 return candidates
710 #------------------------------------------------------------------------
711 -def add_text_expansion(keyword=None, expansion=None, public=None):
712 713 if public: 714 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 715 else: 716 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 717 718 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 719 if len(rows) != 0: 720 return False 721 722 if public: 723 cmd = u""" 724 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 725 values (%(kwd)s, %(exp)s, null)""" 726 else: 727 cmd = u""" 728 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 729 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 730 731 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 732 733 global text_expansion_keywords 734 text_expansion_keywords = None 735 736 return True
737 #------------------------------------------------------------------------
738 -def delete_text_expansion(keyword):
739 cmd = u""" 740 delete from clin.keyword_expansion where 741 keyword = %(kwd)s and ( 742 (fk_staff = (select pk from dem.staff where db_user = current_user)) 743 or 744 (fk_staff is null and owner = current_user) 745 )""" 746 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 747 748 global text_expansion_keywords 749 text_expansion_keywords = None
750 #------------------------------------------------------------------------
751 -def edit_text_expansion(keyword, expansion):
752 753 cmd1 = u""" 754 delete from clin.keyword_expansion where 755 keyword = %(kwd)s and 756 fk_staff = (select pk from dem.staff where db_user = current_user)""" 757 758 cmd2 = u""" 759 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 760 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 761 762 rows, idx = run_rw_queries(queries = [ 763 {'cmd': cmd1, 'args': {'kwd': keyword}}, 764 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}}, 765 ]) 766 767 global text_expansion_keywords 768 text_expansion_keywords = None
769 # ======================================================================= 770 # query runners and helpers 771 # =======================================================================
772 -def send_maintenance_notification():
773 cmd = u'notify "db_maintenance_warning:"' 774 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
775 #------------------------------------------------------------------------
776 -def send_maintenance_shutdown():
777 cmd = u'notify "db_maintenance_disconnect:"' 778 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
779 #------------------------------------------------------------------------
780 -def is_pg_interval(candidate=None):
781 cmd = u'select %(candidate)s::interval' 782 try: 783 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 784 return True 785 except: 786 cmd = u'select %(candidate)s::text::interval' 787 try: 788 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 789 return True 790 except: 791 return False
792 #------------------------------------------------------------------------
793 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
794 outfile = file(filename, 'wb') 795 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 796 outfile.close() 797 return result
798 #------------------------------------------------------------------------
799 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
800 """Store data from a bytea field into a file. 801 802 <data_query> 803 - dict {'cmd': ..., 'args': ...} 804 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 805 - 'args' must be a dict 806 - must return one row with one field of type bytea 807 <file> 808 - must be a file like Python object 809 <data_size> 810 - integer of the total size of the expected data or None 811 <data_size_query> 812 - dict {'cmd': ..., 'args': ...} 813 - cmd must be unicode 814 - must return one row with one field with the octet_length() of the data field 815 - used only when <data_size> is None 816 """ 817 if data_size == 0: 818 return True 819 820 # If the client sets an encoding other than the default we 821 # will receive encoding-parsed data which isn't the binary 822 # content we want. Hence we need to get our own connection. 823 # It must be a read-write one so that we don't affect the 824 # encoding for other users of the shared read-only 825 # connections. 826 # Actually, encodings shouldn't be applied to binary data 827 # (eg. bytea types) in the first place but that is only 828 # reported to be fixed > v7.4. 829 # further tests reveal that at least on PG 8.0 this bug still 830 # manifests itself 831 conn = get_raw_connection(readonly=True) 832 833 if data_size is None: 834 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 835 data_size = rows[0][0] 836 if data_size in [None, 0]: 837 conn.rollback() 838 return True 839 840 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 841 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 842 843 # chunk size of 0 means "retrieve whole field at once" 844 if chunk_size == 0: 845 chunk_size = data_size 846 _log.debug('chunk size [0] bytes: retrieving all data at once') 847 848 # Windoze sucks: it can't transfer objects of arbitrary size, 849 # anyways, we need to split the transfer, 850 # however, only possible if postgres >= 7.2 851 needed_chunks, remainder = divmod(data_size, chunk_size) 852 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 853 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 854 855 # retrieve chunks, skipped if data size < chunk size, 856 # does this not carry the danger of cutting up multi-byte escape sequences ? 857 # no, since bytea is binary, 858 # yes, since in bytea there are *some* escaped values, still 859 # no, since those are only escaped during *transfer*, not on-disk, hence 860 # only complete escape sequences are put on the wire 861 for chunk_id in range(needed_chunks): 862 chunk_start = (chunk_id * chunk_size) + 1 863 data_query['args']['start'] = chunk_start 864 data_query['args']['size'] = chunk_size 865 try: 866 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 867 except: 868 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 869 conn.rollback() 870 raise 871 # it would be a fatal error to see more than one result as ids are supposed to be unique 872 file_obj.write(str(rows[0][0])) 873 874 # retrieve remainder 875 if remainder > 0: 876 chunk_start = (needed_chunks * chunk_size) + 1 877 data_query['args']['start'] = chunk_start 878 data_query['args']['size'] = remainder 879 try: 880 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 881 except: 882 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 883 conn.rollback() 884 raise 885 # it would be a fatal error to see more than one result as ids are supposed to be unique 886 file_obj.write(str(rows[0][0])) 887 888 conn.rollback() 889 return True
890 #------------------------------------------------------------------------
891 -def file2bytea(query=None, filename=None, args=None, conn=None):
892 """Store data from a file into a bytea field. 893 894 The query must: 895 - be in unicode 896 - contain a format spec identifying the row (eg a primary key) 897 matching <args> if it is an UPDATE 898 - contain a format spec %(data)s::bytea 899 """ 900 # read data from file 901 infile = file(filename, "rb") 902 data_as_byte_string = infile.read() 903 infile.close() 904 if args is None: 905 args = {} 906 args['data'] = buffer(data_as_byte_string) 907 del(data_as_byte_string) 908 909 # insert the data 910 if conn is None: 911 conn = get_raw_connection(readonly=False) 912 close_conn = True 913 else: 914 close_conn = False 915 916 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 917 918 if close_conn: 919 conn.close() 920 921 return
922 #------------------------------------------------------------------------
923 -def sanitize_pg_regex(expression=None, escape_all=False):
924 """Escape input for use in a PostgreSQL regular expression. 925 926 If a fragment comes from user input and is to be used 927 as a regular expression we need to make sure it doesn't 928 contain invalid regex patterns such as unbalanced ('s. 929 930 <escape_all> 931 True: try to escape *all* metacharacters 932 False: only escape those which render the regex invalid 933 """ 934 return expression.replace ( 935 '(', '\(' 936 ).replace ( 937 ')', '\)' 938 ).replace ( 939 '[', '\[' 940 ).replace ( 941 '+', '\+' 942 ).replace ( 943 '.', '\.' 944 ).replace ( 945 '*', '\*' 946 )
947 #']', '\]', # not needed 948 #------------------------------------------------------------------------
949 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
950 """Run read-only queries. 951 952 <queries> must be a list of dicts: 953 [ 954 {'cmd': <string>, 'args': <dict> or <tuple>}, 955 {...}, 956 ... 957 ] 958 """ 959 if isinstance(link_obj, dbapi._psycopg.cursor): 960 curs = link_obj 961 curs_close = __noop 962 tx_rollback = __noop 963 elif isinstance(link_obj, dbapi._psycopg.connection): 964 curs = link_obj.cursor() 965 curs_close = curs.close 966 tx_rollback = link_obj.rollback 967 elif link_obj is None: 968 conn = get_connection(readonly=True, verbose=verbose) 969 curs = conn.cursor() 970 curs_close = curs.close 971 tx_rollback = conn.rollback 972 else: 973 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 974 975 if verbose: 976 _log.debug('cursor: %s', curs) 977 978 for query in queries: 979 if type(query['cmd']) is not types.UnicodeType: 980 print "run_ro_queries(): non-unicode query" 981 print query['cmd'] 982 try: 983 args = query['args'] 984 except KeyError: 985 args = None 986 try: 987 curs.execute(query['cmd'], args) 988 if verbose: 989 _log.debug('ran query: [%s]', curs.query) 990 _log.debug('PG status message: %s', curs.statusmessage) 991 _log.debug('cursor description: %s', str(curs.description)) 992 except: 993 # FIXME: use .pgcode 994 try: 995 curs_close() 996 except dbapi.InterfaceError: 997 _log.exception('cannot close cursor') 998 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 999 _log.error('query failed: [%s]', curs.query) 1000 _log.error('PG status message: %s', curs.statusmessage) 1001 raise 1002 1003 data = None 1004 col_idx = None 1005 if return_data: 1006 data = curs.fetchall() 1007 if verbose: 1008 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1009 _log.debug('cursor description: %s', str(curs.description)) 1010 if get_col_idx: 1011 col_idx = get_col_indices(curs) 1012 1013 curs_close() 1014 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1015 return (data, col_idx)
1016 #------------------------------------------------------------------------
1017 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1018 """Convenience function for running a transaction 1019 that is supposed to get committed. 1020 1021 <link_obj> 1022 can be either: 1023 - a cursor 1024 - a connection 1025 1026 <queries> 1027 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1028 to be executed as a single transaction, the last 1029 query may usefully return rows (such as a 1030 "select currval('some_sequence')" statement) 1031 1032 <end_tx> 1033 - controls whether the transaction is finalized (eg. 1034 committed/rolled back) or not, this allows the 1035 call to run_rw_queries() to be part of a framing 1036 transaction 1037 - if link_obj is a connection then <end_tx> will 1038 default to False unless it is explicitly set to 1039 True which is taken to mean "yes, you do have full 1040 control over the transaction" in which case the 1041 transaction is properly finalized 1042 - if link_obj is a cursor we CANNOT finalize the 1043 transaction because we would need the connection for that 1044 - if link_obj is None <end_tx> will, of course, always be True 1045 1046 <return_data> 1047 - if true, the returned data will include the rows 1048 the last query selected 1049 - if false, it returns None instead 1050 1051 <get_col_idx> 1052 - if true, the returned data will include a dictionary 1053 mapping field names to column positions 1054 - if false, the returned data returns None instead 1055 1056 method result: 1057 - returns a tuple (data, idx) 1058 - <data>: 1059 * (None, None) if last query did not return rows 1060 * ("fetchall() result", <index>) if last query returned any rows 1061 * for <index> see <get_col_idx> 1062 """ 1063 if isinstance(link_obj, dbapi._psycopg.cursor): 1064 conn_close = __noop 1065 conn_commit = __noop 1066 conn_rollback = __noop 1067 curs = link_obj 1068 curs_close = __noop 1069 elif isinstance(link_obj, dbapi._psycopg.connection): 1070 conn_close = __noop 1071 if end_tx: 1072 conn_commit = link_obj.commit 1073 conn_rollback = link_obj.rollback 1074 else: 1075 conn_commit = __noop 1076 conn_rollback = __noop 1077 curs = link_obj.cursor() 1078 curs_close = curs.close 1079 elif link_obj is None: 1080 conn = get_connection(readonly=False) 1081 conn_close = conn.close 1082 conn_commit = conn.commit 1083 conn_rollback = conn.rollback 1084 curs = conn.cursor() 1085 curs_close = curs.close 1086 else: 1087 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1088 1089 for query in queries: 1090 if type(query['cmd']) is not types.UnicodeType: 1091 print "run_rw_queries(): non-unicode query" 1092 print query['cmd'] 1093 try: 1094 args = query['args'] 1095 except KeyError: 1096 args = None 1097 try: 1098 curs.execute(query['cmd'], args) 1099 except: 1100 _log.exception('error running RW query') 1101 gmLog2.log_stack_trace() 1102 try: 1103 curs_close() 1104 conn_rollback() 1105 conn_close() 1106 except dbapi.InterfaceError: 1107 _log.exception('cannot cleanup') 1108 raise 1109 raise 1110 1111 data = None 1112 col_idx = None 1113 if return_data: 1114 try: 1115 data = curs.fetchall() 1116 except: 1117 _log.exception('error fetching data from RW query') 1118 gmLog2.log_stack_trace() 1119 try: 1120 curs_close() 1121 conn_rollback() 1122 conn_close() 1123 except dbapi.InterfaceError: 1124 _log.exception('cannot cleanup') 1125 raise 1126 raise 1127 if get_col_idx: 1128 col_idx = get_col_indices(curs) 1129 1130 curs_close() 1131 conn_commit() 1132 conn_close() 1133 1134 return (data, col_idx)
1135 # ======================================================================= 1136 # connection handling API 1137 # -----------------------------------------------------------------------
1138 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1139 """ 1140 GNUmed database connection pool. 1141 1142 Extends psycopg2's PersistentConnectionPool with 1143 a custom _connect() function. Supports one connection 1144 per thread - which also ties it to one particular DSN. 1145 """ 1146 #--------------------------------------------------
1147 - def _connect(self, key=None):
1148 1149 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1150 1151 conn.original_close = conn.close 1152 conn.close = _raise_exception_on_ro_conn_close 1153 1154 if key is not None: 1155 self._used[key] = conn 1156 self._rused[id(conn)] = key 1157 else: 1158 self._pool.append(conn) 1159 1160 return conn
1161 #--------------------------------------------------
1162 - def shutdown(self):
1163 for conn_key in self._used.keys(): 1164 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1165 self._used[conn_key].original_close()
1166 # -----------------------------------------------------------------------
1167 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1168 """Get a raw, unadorned connection. 1169 1170 - this will not set any parameters such as encoding, timezone, datestyle 1171 - the only requirement is a valid DSN 1172 - hence it can be used for "service" connections 1173 for verifying encodings etc 1174 """ 1175 # FIXME: support verbose 1176 if dsn is None: 1177 dsn = get_default_dsn() 1178 1179 try: 1180 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1181 except dbapi.OperationalError, e: 1182 1183 t, v, tb = sys.exc_info() 1184 try: 1185 msg = e.args[0] 1186 except (AttributeError, IndexError, TypeError): 1187 raise 1188 1189 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1190 1191 if msg.find('fe_sendauth') != -1: 1192 raise cAuthenticationError, (dsn, msg), tb 1193 1194 if regex.search('user ".*" does not exist', msg) is not None: 1195 raise cAuthenticationError, (dsn, msg), tb 1196 1197 if msg.find('uthenti') != -1: 1198 raise cAuthenticationError, (dsn, msg), tb 1199 1200 raise 1201 1202 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1203 1204 # do first-time stuff 1205 global postgresql_version 1206 if postgresql_version is None: 1207 curs = conn.cursor() 1208 curs.execute (""" 1209 select 1210 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version 1211 from pg_settings 1212 where name='server_version'""" 1213 ) 1214 postgresql_version = curs.fetchone()['version'] 1215 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1216 try: 1217 curs.execute("select pg_size_pretty(pg_database_size(current_database()))") 1218 _log.info('database size: %s', curs.fetchone()[0]) 1219 except: 1220 pass 1221 if verbose: 1222 __log_PG_settings(curs=curs) 1223 curs.close() 1224 conn.commit() 1225 1226 if _default_client_timezone is None: 1227 __detect_client_timezone(conn = conn) 1228 1229 curs = conn.cursor() 1230 1231 # set access mode 1232 if readonly: 1233 _log.debug('access mode [READ ONLY]') 1234 cmd = 'set session characteristics as transaction READ ONLY' 1235 curs.execute(cmd) 1236 cmd = 'set default_transaction_read_only to on' 1237 curs.execute(cmd) 1238 else: 1239 _log.debug('access mode [READ WRITE]') 1240 cmd = 'set session characteristics as transaction READ WRITE' 1241 curs.execute(cmd) 1242 cmd = 'set default_transaction_read_only to off' 1243 curs.execute(cmd) 1244 1245 curs.close() 1246 conn.commit() 1247 1248 conn.is_decorated = False 1249 1250 return conn
1251 # =======================================================================
1252 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1253 """Get a new connection. 1254 1255 This assumes the locale system has been initialzied 1256 unless an encoding is specified. 1257 """ 1258 # FIXME: support pooled on RW, too 1259 # FIXME: for now, support the default DSN only 1260 if pooled and readonly and (dsn is None): 1261 global __ro_conn_pool 1262 if __ro_conn_pool is None: 1263 __ro_conn_pool = cConnectionPool ( 1264 minconn = 1, 1265 maxconn = 2, 1266 dsn = dsn, 1267 verbose = verbose 1268 ) 1269 conn = __ro_conn_pool.getconn() 1270 else: 1271 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1272 1273 if conn.is_decorated: 1274 return conn 1275 1276 if encoding is None: 1277 encoding = _default_client_encoding 1278 if encoding is None: 1279 encoding = gmI18N.get_encoding() 1280 _log.warning('client encoding not specified') 1281 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1282 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1283 1284 # set connection properties 1285 # 1) client encoding 1286 _log.debug('client string encoding [%s]' % encoding) 1287 try: 1288 conn.set_client_encoding(encoding) 1289 except dbapi.OperationalError: 1290 t, v, tb = sys.exc_info() 1291 if str(v).find("can't set encoding to") != -1: 1292 raise cEncodingError, (encoding, v), tb 1293 raise 1294 1295 # 2) transaction isolation level 1296 if readonly: 1297 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) 1298 _log.debug('isolation level [read committed]') 1299 else: 1300 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1301 _log.debug('isolation level [serializable]') 1302 1303 curs = conn.cursor() 1304 1305 # client time zone 1306 _log.debug('time zone [%s]' % _default_client_timezone) 1307 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1308 1309 # datestyle 1310 # regarding DMY/YMD handling: since we force *input* to 1311 # ISO, too, the DMY/YMD setting is not needed 1312 _log.debug('datestyle [ISO]') 1313 cmd = "set datestyle to 'ISO'" 1314 curs.execute(cmd) 1315 1316 # SQL inheritance mode 1317 _log.debug('sql_inheritance [on]') 1318 cmd = 'set sql_inheritance to on' 1319 curs.execute(cmd) 1320 1321 # version string 1322 global postgresql_version_string 1323 if postgresql_version_string is None: 1324 curs.execute('select version()') 1325 postgresql_version_string = curs.fetchone()['version'] 1326 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1327 1328 curs.close() 1329 conn.commit() 1330 1331 conn.is_decorated = True 1332 1333 return conn
1334 #-----------------------------------------------------------------------
1335 -def shutdown():
1336 if __ro_conn_pool is None: 1337 return 1338 __ro_conn_pool.shutdown()
1339 # ====================================================================== 1340 # internal helpers 1341 #-----------------------------------------------------------------------
1342 -def __noop():
1343 pass
1344 #-----------------------------------------------------------------------
1345 -def _raise_exception_on_ro_conn_close():
1346 raise TypeError(u'close() called on read-only connection')
1347 #-----------------------------------------------------------------------
1348 -def sanity_check_time_skew(tolerance=60):
1349 """Check server time and local time to be within 1350 the given tolerance of each other. 1351 1352 tolerance: seconds 1353 """ 1354 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1355 1356 cmd = u"select now() at time zone 'UTC'" 1357 conn = get_raw_connection(readonly=True) 1358 curs = conn.cursor() 1359 1360 start = time.time() 1361 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1362 end = time.time() 1363 client_now_as_utc = pydt.datetime.utcnow() 1364 1365 curs.close() 1366 conn.commit() 1367 1368 server_now_as_utc = rows[0][0] 1369 query_duration = end - start 1370 _log.info('server "now" (UTC): %s', server_now_as_utc) 1371 _log.info('client "now" (UTC): %s', client_now_as_utc) 1372 _log.debug('wire roundtrip (seconds): %s', query_duration) 1373 1374 if query_duration > tolerance: 1375 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1376 return False 1377 1378 if server_now_as_utc > client_now_as_utc: 1379 real_skew = server_now_as_utc - client_now_as_utc 1380 else: 1381 real_skew = client_now_as_utc - server_now_as_utc 1382 1383 _log.debug('client/server time skew: %s', real_skew) 1384 1385 if real_skew > pydt.timedelta(seconds = tolerance): 1386 _log.error('client/server time skew > tolerance') 1387 return False 1388 1389 return True
1390 #-----------------------------------------------------------------------
1391 -def sanity_check_database_settings():
1392 """Checks database settings. 1393 1394 returns (status, message) 1395 status: 1396 0: no problem 1397 1: non-fatal problem 1398 2: fatal problem 1399 """ 1400 _log.debug('checking database settings') 1401 settings = { 1402 # setting: [expected value, risk, fatal?] 1403 u'allow_system_table_mods': [u'off', u'system breakage', False], 1404 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1405 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1406 u'fsync': [u'on', u'data loss/corruption', True], 1407 u'full_page_writes': [u'on', u'data loss/corruption', False], 1408 u'lc_messages': [u'C', u'suboptimal error detection', False], 1409 u'password_encryption': [u'on', u'breach of confidentiality', False], 1410 u'regex_flavor': [u'advanced', u'query breakage', False], 1411 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1412 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1413 } 1414 1415 from Gnumed.pycommon import gmCfg2 1416 _cfg = gmCfg2.gmCfgData() 1417 if _cfg.get(option = u'hipaa'): 1418 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1419 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1420 else: 1421 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1422 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1423 1424 cmd = u"select name, setting from pg_settings where name in %(settings)s" 1425 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'settings': tuple(settings.keys())}}]) 1426 1427 found_error = False 1428 found_problem = False 1429 msg = [] 1430 for row in rows: 1431 if row[1] != settings[row[0]][0]: 1432 if settings[row[0]][2] is True: 1433 found_error = True 1434 elif settings[row[0]][2] is False: 1435 found_problem = True 1436 elif settings[row[0]][2] is None: 1437 pass 1438 else: 1439 _log.error(settings[row[0]]) 1440 raise ValueError(u'invalid database configuration sanity check') 1441 msg.append(_(' option [%s]: %s') % (row[0], row[1])) 1442 msg.append(_(' risk: %s') % settings[row[0]][1]) 1443 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (row[0], row[1], settings[row[0]][0], settings[row[0]][1])) 1444 1445 if found_error: 1446 return 2, u'\n'.join(msg) 1447 1448 if found_problem: 1449 return 1, u'\n'.join(msg) 1450 1451 return 0, u''
1452 #------------------------------------------------------------------------
1453 -def __log_PG_settings(curs=None):
1454 # don't use any of the run_*()s since that might 1455 # create a loop if we fail here 1456 # FIXME: use pg_settings 1457 try: 1458 curs.execute(u'show all') 1459 except: 1460 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1461 return False 1462 settings = curs.fetchall() 1463 if settings is None: 1464 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1465 return False 1466 for setting in settings: 1467 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1468 return True
1469 # =======================================================================
1470 -def extract_msg_from_pg_exception(exc=None):
1471 1472 try: 1473 msg = exc.args[0] 1474 except (AttributeError, IndexError, TypeError): 1475 return u'cannot extract message from exception' 1476 1477 return unicode(msg, gmI18N.get_encoding(), 'replace')
1478 # =======================================================================
1479 -class cAuthenticationError(dbapi.OperationalError):
1480
1481 - def __init__(self, dsn=None, prev_val=None):
1482 self.dsn = dsn 1483 self.prev_val = prev_val
1484
1485 - def __str__(self):
1486 _log.warning('%s.__str__() called', self.__class__.__name__) 1487 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1488 _log.error(tmp) 1489 return tmp.encode(gmI18N.get_encoding(), 'replace')
1490
1491 - def __unicode__(self):
1492 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1493 1494 # ======================================================================= 1495 # custom psycopg2 extensions 1496 # =======================================================================
1497 -class cEncodingError(dbapi.OperationalError):
1498
1499 - def __init__(self, encoding=None, prev_val=None):
1500 self.encoding = encoding 1501 self.prev_val = prev_val
1502
1503 - def __str__(self):
1504 _log.warning('%s.__str__() called', self.__class__.__name__) 1505 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1506
1507 - def __unicode__(self):
1508 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1509 1510 # ----------------------------------------------------------------------- 1511 # Python -> PostgreSQL 1512 # -----------------------------------------------------------------------
1513 -class cAdapterPyDateTime(object):
1514
1515 - def __init__(self, dt):
1516 if dt.tzinfo is None: 1517 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1518 self.__dt = dt
1519
1520 - def getquoted(self):
1521 return _timestamp_template % self.__dt.isoformat()
1522 1523 # ----------------------------------------------------------------------
1524 -class cAdapterMxDateTime(object):
1525
1526 - def __init__(self, dt):
1527 if dt.tz == '???': 1528 _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1529 self.__dt = dt
1530
1531 - def getquoted(self):
1532 # under some locale settings the mx.DateTime ISO formatter 1533 # will insert "," into the ISO string, 1534 # while this is allowed per the ISO8601 spec PostgreSQL 1535 # cannot currently handle that, 1536 # so map those "," to "." to make things work: 1537 return mxDT.ISO.str(self.__dt).replace(',', '.')
1538 1539 # ---------------------------------------------------------------------- 1540 # PostgreSQL -> Python 1541 # ---------------------------------------------------------------------- 1542 1543 # Delete this later: 1544 1545 # We need this because some places once used time "zones" 1546 # with true local time, IOW having seconds in the UTC offset. 1547 # The Python datetime zone code cannot handle that, however, 1548 # which makes psycopg2 fail when loading timestamps with such 1549 # time zones from the backend ... 1550 # So we (almost silently) drop the seconds and try again.
1551 -def convert_ts_with_odd_tz(string_value, cursor):
1552 #_log.debug('parsing [%s]' % string_value) 1553 try: 1554 return dbapi.DATETIME(string_value, cursor) 1555 except (dbapi.DataError,), exc: 1556 _log.error('unable to parse [%s]' % string_value) 1557 1558 if string_value is None: 1559 raise 1560 1561 if exc.message != "unable to parse time": 1562 raise 1563 1564 _log.debug('unable to parse as <timestamp with time zone>') 1565 1566 if regex.match('(\+|-)\d\d:\d\d:\d\d', string_value[-9:]) is None: 1567 raise 1568 1569 if regex.match('-\d\d:\d\d:\d\d', string_value[-9:]) is not None: 1570 if string_value[-5:-3] != '00': 1571 _log.debug('psycopg2 versions < 2.0.8 may misinterpret this time zone: [%s]', string_value[-9:]) 1572 1573 # parsing doesn't succeed even if seconds 1574 # are ":00" so truncate in any case 1575 _log.debug('time zone with seconds detected (true local time ?): %s', string_value[-9:]) 1576 truncated_string_value = string_value[:-3] 1577 _log.warning('truncating to [%s] and trying again', truncated_string_value) 1578 _log.warning('value will be off by %s seconds', string_value[-2:]) 1579 return dbapi.DATETIME(truncated_string_value, cursor)
1580 1581 1582 TIMESTAMPTZ_OID = 1184 # taken from PostgreSQL headers 1583 if TIMESTAMPTZ_OID not in dbapi.DATETIME.values: 1584 raise ImportError('TIMESTAMPTZ_OID <%s> not in psycopg2.DATETIME.values [%s]' % (TIMESTAMPTZ_OID, dbapi.DATETIME.values)) 1585 1586 DT_W_ODD_TZ = psycopg2.extensions.new_type((TIMESTAMPTZ_OID,), 'DT_W_ODD_TZ', convert_ts_with_odd_tz) 1587 #psycopg2.extensions.register_type(DT_W_ODD_TZ) # now done by psycopg2 during new_type() 1588 1589 # delete until here 1590 1591 #======================================================================= 1592 # main 1593 #----------------------------------------------------------------------- 1594 # properly adapt *tuples* into (a, b, c, ...) for 1595 # "where ... IN (...)" queries 1596 # but only needed/possible in psycopg2 < 2.0.6 1597 #try: 1598 # psycopg2.extensions.register_adapter(tuple, psycopg2.extras.SQL_IN) 1599 #except AttributeError: 1600 # print "SQL_IN not needed" 1601 # pass 1602 1603 # make sure psycopg2 knows how to handle unicode ... 1604 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1605 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1606 1607 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1608 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1609 try: 1610 import mx.DateTime as mxDT 1611 psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1612 except ImportError: 1613 _log.warning('cannot import mx.DateTime') 1614 1615 #try: 1616 # psycopg2.extras.register_tstz_w_secs() 1617 #except AttributeError: 1618 # _log.error('cannot activate parsing time stamps with seconds in the time zone') 1619 1620 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1621 # them adapted to "... ARRAY()..." so we can support PG arrays 1622 #psycopg2.extensions.register_adapter(list, psycopg2.extras.SQL_IN) 1623 1624 #======================================================================= 1625 if __name__ == "__main__": 1626 1627 logging.basicConfig(level=logging.DEBUG) 1628 #--------------------------------------------------------------------
1629 - def test_file2bytea():
1630 run_rw_queries(queries = [ 1631 {'cmd': u'create table test_bytea (data bytea)'} 1632 ]) 1633 1634 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1635 try: 1636 file2bytea(query = cmd, filename = sys.argv[2]) 1637 except: 1638 _log.exception('error') 1639 1640 run_rw_queries(queries = [ 1641 {'cmd': u'drop table test_bytea'} 1642 ])
1643 #--------------------------------------------------------------------
1644 - def test_get_connection():
1645 print "testing get_connection()" 1646 1647 dsn = 'foo' 1648 try: 1649 conn = get_connection(dsn=dsn) 1650 except dbapi.OperationalError, e: 1651 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1652 t, v = sys.exc_info()[:2] 1653 print ' ', t 1654 print ' ', v 1655 1656 dsn = 'dbname=gnumed_v9' 1657 try: 1658 conn = get_connection(dsn=dsn) 1659 except cAuthenticationError: 1660 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1661 t, v = sys.exc_info()[:2] 1662 print ' ', t 1663 print ' ', v 1664 1665 dsn = 'dbname=gnumed_v9 user=abc' 1666 try: 1667 conn = get_connection(dsn=dsn) 1668 except cAuthenticationError: 1669 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1670 t, v = sys.exc_info()[:2] 1671 print ' ', t 1672 print ' ', v 1673 1674 dsn = 'dbname=gnumed_v9 user=any-doc' 1675 try: 1676 conn = get_connection(dsn=dsn) 1677 except cAuthenticationError: 1678 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1679 t, v = sys.exc_info()[:2] 1680 print ' ', t 1681 print ' ', v 1682 1683 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1684 try: 1685 conn = get_connection(dsn=dsn) 1686 except cAuthenticationError: 1687 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1688 t, v = sys.exc_info()[:2] 1689 print ' ', t 1690 print ' ', v 1691 1692 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1693 conn = get_connection(dsn=dsn, readonly=True) 1694 1695 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1696 conn = get_connection(dsn=dsn, readonly=False) 1697 1698 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1699 encoding = 'foo' 1700 try: 1701 conn = get_connection(dsn=dsn, encoding=encoding) 1702 except cEncodingError: 1703 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1704 t, v = sys.exc_info()[:2] 1705 print ' ', t 1706 print ' ', v
1707 #--------------------------------------------------------------------
1708 - def test_exceptions():
1709 print "testing exceptions" 1710 1711 try: 1712 raise cAuthenticationError('no dsn', 'no previous exception') 1713 except cAuthenticationError: 1714 t, v, tb = sys.exc_info() 1715 print t 1716 print v 1717 print tb 1718 1719 try: 1720 raise cEncodingError('no dsn', 'no previous exception') 1721 except cEncodingError: 1722 t, v, tb = sys.exc_info() 1723 print t 1724 print v 1725 print tb
1726 #--------------------------------------------------------------------
1727 - def test_ro_queries():
1728 print "testing run_ro_queries()" 1729 1730 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1731 conn = get_connection(dsn, readonly=True) 1732 1733 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1734 print data 1735 print idx 1736 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True) 1737 print data 1738 print idx 1739 1740 curs = conn.cursor() 1741 1742 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1743 print data 1744 print idx 1745 1746 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True) 1747 print data 1748 print idx 1749 1750 try: 1751 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1752 print data 1753 print idx 1754 except psycopg2.ProgrammingError: 1755 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1756 t, v = sys.exc_info()[:2] 1757 print ' ', t 1758 print ' ', v 1759 1760 curs.close()
1761 #--------------------------------------------------------------------
1762 - def test_request_dsn():
1763 conn = get_connection() 1764 print conn 1765 conn.close()
1766 #--------------------------------------------------------------------
1767 - def test_set_encoding():
1768 print "testing set_default_client_encoding()" 1769 1770 enc = 'foo' 1771 try: 1772 set_default_client_encoding(enc) 1773 print "SUCCESS: encoding [%s] worked" % enc 1774 except ValueError: 1775 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1776 t, v = sys.exc_info()[:2] 1777 print ' ', t 1778 print ' ', v 1779 1780 enc = '' 1781 try: 1782 set_default_client_encoding(enc) 1783 print "SUCCESS: encoding [%s] worked" % enc 1784 except ValueError: 1785 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1786 t, v = sys.exc_info()[:2] 1787 print ' ', t 1788 print ' ', v 1789 1790 enc = 'latin1' 1791 try: 1792 set_default_client_encoding(enc) 1793 print "SUCCESS: encoding [%s] worked" % enc 1794 except ValueError: 1795 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1796 t, v = sys.exc_info()[:2] 1797 print ' ', t 1798 print ' ', v 1799 1800 enc = 'utf8' 1801 try: 1802 set_default_client_encoding(enc) 1803 print "SUCCESS: encoding [%s] worked" % enc 1804 except ValueError: 1805 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1806 t, v = sys.exc_info()[:2] 1807 print ' ', t 1808 print ' ', v 1809 1810 enc = 'unicode' 1811 try: 1812 set_default_client_encoding(enc) 1813 print "SUCCESS: encoding [%s] worked" % enc 1814 except ValueError: 1815 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1816 t, v = sys.exc_info()[:2] 1817 print ' ', t 1818 print ' ', v 1819 1820 enc = 'UNICODE' 1821 try: 1822 set_default_client_encoding(enc) 1823 print "SUCCESS: encoding [%s] worked" % enc 1824 except ValueError: 1825 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1826 t, v = sys.exc_info()[:2] 1827 print ' ', t 1828 print ' ', v
1829 #--------------------------------------------------------------------
1830 - def test_connection_pool():
1831 dsn = get_default_dsn() 1832 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1833 print pool 1834 print pool.getconn() 1835 print pool.getconn() 1836 print pool.getconn() 1837 print type(pool.getconn())
1838 #--------------------------------------------------------------------
1839 - def test_list_args():
1840 dsn = get_default_dsn() 1841 conn = get_connection(dsn, readonly=True) 1842 curs = conn.cursor() 1843 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
1844 #--------------------------------------------------------------------
1845 - def test_sanitize_pg_regex():
1846 tests = [ 1847 ['(', '\\('] 1848 , ['[', '\\['] 1849 , [')', '\\)'] 1850 ] 1851 for test in tests: 1852 result = sanitize_pg_regex(test[0]) 1853 if result != test[1]: 1854 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1855 #--------------------------------------------------------------------
1856 - def test_is_pg_interval():
1857 status = True 1858 tests = [ 1859 [None, True], # None == NULL == succeeds ! 1860 [1, True], 1861 ['1', True], 1862 ['abc', False] 1863 ] 1864 1865 if not is_pg_interval(): 1866 print 'ERROR: is_pg_interval() returned "False", expected "True"' 1867 status = False 1868 1869 for test in tests: 1870 result = is_pg_interval(test[0]) 1871 if result != test[1]: 1872 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 1873 status = False 1874 1875 return status
1876 #--------------------------------------------------------------------
1877 - def test_sanity_check_time_skew():
1878 sanity_check_time_skew()
1879 #--------------------------------------------------------------------
1880 - def test_keyword_expansion():
1881 print "keywords, from database:" 1882 print get_text_expansion_keywords() 1883 print "keywords, cached:" 1884 print get_text_expansion_keywords() 1885 print "'$keyword' expands to:" 1886 print expand_keyword(keyword = u'$dvt')
1887 #--------------------------------------------------------------------
1888 - def test_get_foreign_key_details():
1889 for row in get_foreign_keys2column ( 1890 schema = u'dem', 1891 table = u'identity', 1892 column = u'pk' 1893 ): 1894 print '%s.%s references %s.%s.%s' % ( 1895 row['referencing_table'], 1896 row['referencing_column'], 1897 row['referenced_schema'], 1898 row['referenced_table'], 1899 row['referenced_column'] 1900 )
1901 #--------------------------------------------------------------------
1902 - def test_set_user_language():
1903 # (user, language, result, exception type) 1904 tests = [ 1905 # current user 1906 [None, 'de_DE', True], 1907 [None, 'lang_w/o_tx', False], 1908 [None, None, True], 1909 # valid user 1910 ['any-doc', 'de_DE', True], 1911 ['any-doc', 'lang_w/o_tx', False], 1912 ['any-doc', None, True], 1913 # invalid user 1914 ['invalid user', 'de_DE', None], 1915 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 1916 ['invalid user', None, True] 1917 ] 1918 for test in tests: 1919 try: 1920 result = set_user_language(user = test[0], language = test[1]) 1921 if result != test[2]: 1922 print "test:", test 1923 print "result:", result, "expected:", test[2] 1924 except psycopg2.IntegrityError, e: 1925 if test[2] is None: 1926 continue 1927 print "test:", test 1928 print "expected exception" 1929 print "result:", e
1930 #--------------------------------------------------------------------
1931 - def test_get_schema_revision_history():
1932 for line in get_schema_revision_history(): 1933 print u' - '.join(line)
1934 #-------------------------------------------------------------------- 1935 if len(sys.argv) > 1 and sys.argv[1] == 'test': 1936 # run tests 1937 #test_file2bytea() 1938 #test_get_connection() 1939 #test_exceptions() 1940 #test_ro_queries() 1941 #test_request_dsn() 1942 #test_set_encoding() 1943 #test_connection_pool() 1944 #test_list_args() 1945 #test_sanitize_pg_regex() 1946 #test_is_pg_interval() 1947 #test_sanity_check_time_skew() 1948 #test_keyword_expansion() 1949 #test_get_foreign_key_details() 1950 #test_set_user_language() 1951 test_get_schema_revision_history() 1952 1953 # ======================================================================= 1954 # $Log: gmPG2.py,v $ 1955 # Revision 1.127 2010/02/02 13:53:16 ncq 1956 # - bump default database 1957 # 1958 # Revision 1.126 2010/01/31 16:39:17 ncq 1959 # - we do still need our own ts with tz and seconds handler as the one in psycopg2 is buggy 1960 # 1961 # Revision 1.125 2010/01/21 08:41:37 ncq 1962 # - in file -> bytea only close conn if we opened it ourselves 1963 # 1964 # Revision 1.124 2010/01/11 22:02:49 ncq 1965 # - properly log stack trace 1966 # 1967 # Revision 1.123 2010/01/06 14:38:17 ncq 1968 # - log database size 1969 # 1970 # Revision 1.122 2009/12/21 15:02:18 ncq 1971 # - fix typo 1972 # 1973 # Revision 1.121 2009/12/03 17:46:37 ncq 1974 # - somewhat better logging in run_rw_queries 1975 # 1976 # Revision 1.120 2009/12/01 22:06:22 ncq 1977 # - adjust v12 hash 1978 # 1979 # Revision 1.119 2009/11/19 15:06:50 ncq 1980 # - add 0.6/v12 client/server mapping and database hash 1981 # 1982 # Revision 1.118 2009/11/06 15:08:13 ncq 1983 # - expect check-function-bodies to be on 1984 # 1985 # Revision 1.117 2009/09/01 22:24:35 ncq 1986 # - better comment 1987 # 1988 # Revision 1.116 2009/08/24 20:11:27 ncq 1989 # - bump db version 1990 # - fix tag creation 1991 # - provider inbox: 1992 # enable filter-to-active-patient, 1993 # listen to new signal, 1994 # use cInboxMessage class 1995 # - properly constrain LOINC phrasewheel SQL 1996 # - include v12 scripts in release 1997 # - install arriba jar to /usr/local/bin/ 1998 # - check for table existence in audit schema generator 1999 # - include dem.message inbox with additional generic signals 2000 # 2001 # Revision 1.115 2009/07/30 12:02:30 ncq 2002 # - better error handling 2003 # 2004 # Revision 1.114 2009/07/23 16:32:01 ncq 2005 # - get_current_user_language 2006 # 2007 # Revision 1.113 2009/07/02 20:48:24 ncq 2008 # - log creation/closure of connections with PID 2009 # 2010 # Revision 1.112 2009/06/29 15:01:33 ncq 2011 # - better wording re time zones 2012 # 2013 # Revision 1.111 2009/06/11 13:03:52 ncq 2014 # - add proper hash for v11 2015 # 2016 # Revision 1.110 2009/06/04 16:26:22 ncq 2017 # - normalize login.host 2018 # 2019 # Revision 1.109 2009/05/24 16:28:06 ncq 2020 # - better output 2021 # 2022 # Revision 1.108 2009/05/22 11:00:47 ncq 2023 # - gm_schema_revision -> gm.schema_revision 2024 # 2025 # Revision 1.107 2009/04/03 09:34:26 ncq 2026 # - bump DB version 2027 # 2028 # Revision 1.106 2009/03/18 14:28:49 ncq 2029 # - add 0.5 -> v11 2030 # - properly handle unfound timezone 2031 # 2032 # Revision 1.105 2009/03/10 14:19:29 ncq 2033 # - improve comment 2034 # 2035 # Revision 1.104 2009/02/24 10:19:21 ncq 2036 # - improved TZ caster 2037 # 2038 # Revision 1.103 2009/02/20 15:42:51 ncq 2039 # - warn on negative non-whole-number timezones as those are 2040 # currently wrongly calculated by psycopg2 2041 # 2042 # Revision 1.102 2009/02/18 13:45:04 ncq 2043 # - narrow down exception handler for odd time zones 2044 # 2045 # Revision 1.101 2009/02/17 17:46:42 ncq 2046 # - work around Python datetime not being able 2047 # to use time zones with seconds 2048 # 2049 # Revision 1.100 2009/02/17 08:00:46 ncq 2050 # - get_keyword_expansion_candidates 2051 # 2052 # Revision 1.99 2009/02/10 18:39:11 ncq 2053 # - test time zone for usability, not just for settability ... 2054 # - get_schema_revision_history and use it 2055 # 2056 # Revision 1.98 2009/02/05 13:00:56 ncq 2057 # - add v10 hashes 2058 # 2059 # Revision 1.97 2008/12/25 17:43:08 ncq 2060 # - add exception msg extraction function 2061 # 2062 # Revision 1.96 2008/12/25 16:54:01 ncq 2063 # - support around user db language handling 2064 # 2065 # Revision 1.95 2008/12/17 21:55:38 ncq 2066 # - get_foreign_keys2column 2067 # - only check HIPAA compliance when --hipaa was given 2068 # 2069 # Revision 1.94 2008/12/12 16:35:06 ncq 2070 # - add HIPAA compliance to db settings checks, needs configurability 2071 # 2072 # Revision 1.93 2008/12/01 12:13:24 ncq 2073 # - log exeption on __str__ in auth error so we have *something* 2074 # on encoding errors 2075 # 2076 # Revision 1.92 2008/11/20 18:45:10 ncq 2077 # - modernize read/write conn mode setting 2078 # 2079 # Revision 1.91 2008/11/17 23:12:29 ncq 2080 # - need to unicodify "$TZ" 2081 # 2082 # Revision 1.90 2008/10/22 12:08:17 ncq 2083 # - improved query logging 2084 # 2085 # Revision 1.89 2008/10/12 15:40:46 ncq 2086 # - cleanup 2087 # - add mapping for client to database version 2088 # 2089 # Revision 1.88 2008/09/02 20:19:37 ncq 2090 # - send_maintenance_* 2091 # 2092 # Revision 1.87 2008/08/21 10:21:40 ncq 2093 # - update v9 hash 2094 # 2095 # Revision 1.86 2008/07/30 12:51:14 ncq 2096 # - set_default_client_timezone needs to set _sql_set_timezone, 2097 # too, as noticed by Gour 2098 # 2099 # Revision 1.85 2008/07/24 13:58:08 ncq 2100 # - import SQL error codes 2101 # 2102 # Revision 1.84 2008/07/17 21:31:00 ncq 2103 # - missing arg for log.exception 2104 # 2105 # Revision 1.83 2008/07/13 17:15:30 ncq 2106 # - update v9 database hash 2107 # 2108 # Revision 1.82 2008/07/13 16:04:54 ncq 2109 # - use views when handling keyword expansions 2110 # - add/delete/edit_text_expansion, 2111 # 2112 # Revision 1.81 2008/07/10 19:52:50 ncq 2113 # - add expansion keyword functions with tests 2114 # 2115 # Revision 1.80 2008/06/24 16:54:20 ncq 2116 # - make v9 database hash known 2117 # 2118 # Revision 1.79 2008/06/15 20:32:46 ncq 2119 # - improve sanitize_pg_regex 2120 # 2121 # Revision 1.78 2008/06/13 10:32:55 ncq 2122 # - better time zone detection logging 2123 # 2124 # Revision 1.77 2008/05/31 17:45:03 ncq 2125 # - log other sorts of time zone errors, too 2126 # 2127 # Revision 1.76 2008/05/19 15:55:01 ncq 2128 # - some cleanup 2129 # - redo timezone detection since numeric timezones will do the right 2130 # thing *now* but will not allow for DST boundary crossing detection 2131 # and correction, so try to find a TZ name first, but fallback to 2132 # numeric offset if no name is found and verifiable against PostgreSQL 2133 # - don't close() RO conns and raise an error if we do (unless we 2134 # *know* what we are doing) 2135 # 2136 # Revision 1.75 2008/04/11 12:21:59 ncq 2137 # - support link_obj in get_child_tables() 2138 # 2139 # Revision 1.74 2008/03/20 15:29:13 ncq 2140 # - sanity_check_time_skew() and test 2141 # 2142 # Revision 1.73 2008/03/11 16:59:54 ncq 2143 # - push readonly setting down into get_raw_connection() so callers 2144 # can now decide what to request since default transactions are 2145 # readonly now 2146 # - add file2bytea() test 2147 # 2148 # Revision 1.72 2008/03/06 21:24:02 ncq 2149 # - add shutdown() code 2150 # 2151 # Revision 1.71 2008/03/02 11:26:25 ncq 2152 # - cleanup 2153 # 2154 # Revision 1.70 2008/02/25 17:32:50 ncq 2155 # - improve database settings sanity checks 2156 # 2157 # Revision 1.69 2008/01/14 20:29:16 ncq 2158 # - improve exception type detection in get_raw_connection() 2159 # 2160 # Revision 1.68 2008/01/13 01:15:58 ncq 2161 # - remove faulty flush() 2162 # 2163 # Revision 1.67 2008/01/07 19:51:04 ncq 2164 # - better comments 2165 # - some cleanup 2166 # - bump db version 2167 # - add __unicode__ to exceptions 2168 # - improve test suite 2169 # 2170 # Revision 1.66 2007/12/26 18:34:53 ncq 2171 # - check for lc_messages being C 2172 # 2173 # Revision 1.65 2007/12/12 16:17:15 ncq 2174 # - better logger names 2175 # 2176 # Revision 1.64 2007/12/11 15:38:11 ncq 2177 # - use std logging 2178 # 2179 # Revision 1.63 2007/12/06 13:07:19 ncq 2180 # - add v8 schema hash 2181 # 2182 # Revision 1.62 2007/12/04 16:14:24 ncq 2183 # - use gmAuthWidgets 2184 # 2185 # Revision 1.61 2007/12/04 15:11:20 ncq 2186 # - sanity_check_database_settings() 2187 # - force sql_inheritance to on after connect 2188 # 2189 # Revision 1.60 2007/11/09 14:39:10 ncq 2190 # - log schema dump if verbose on failed version detection 2191 # 2192 # Revision 1.59 2007/10/25 16:41:30 ncq 2193 # - is_pg_interval() + test 2194 # 2195 # Revision 1.58 2007/10/22 12:37:59 ncq 2196 # - default db change 2197 # 2198 # Revision 1.57 2007/09/24 18:29:42 ncq 2199 # - select 1,2; will return two columns with the same name ! 2200 # hence, mapping names to column indices in a dict will not work :-( 2201 # fix breakage but don't really support it, either 2202 # 2203 # Revision 1.56 2007/09/18 22:53:26 ncq 2204 # - enhance file2bytea to accept conn argument 2205 # 2206 # Revision 1.55 2007/09/17 21:46:28 ncq 2207 # - make hash for v7 known 2208 # 2209 # Revision 1.54 2007/08/31 14:28:29 ncq 2210 # - improved docs 2211 # 2212 # Revision 1.53 2007/08/08 21:25:39 ncq 2213 # - improve bytea2file() 2214 # 2215 # Revision 1.52 2007/07/22 09:03:33 ncq 2216 # - bytea2file(_object)() 2217 # - file2bytea() 2218 # 2219 # Revision 1.51 2007/07/03 15:53:50 ncq 2220 # - import re as regex 2221 # - sanitize_pg_regex() and test 2222 # 2223 # Revision 1.50 2007/06/28 12:35:38 ncq 2224 # - optionalize SQL IN tuple adaptation as it's now builtin to 0.2.6 psycopg2 2225 # 2226 # Revision 1.49 2007/06/15 10:24:24 ncq 2227 # - add a test to the test suite 2228 # 2229 # Revision 1.48 2007/06/12 16:02:12 ncq 2230 # - fix case when there are no args for execute() 2231 # 2232 # Revision 1.47 2007/06/11 20:24:18 ncq 2233 # - bump database version 2234 # 2235 # Revision 1.46 2007/05/07 16:45:12 ncq 2236 # - add v6 schema hash 2237 # 2238 # Revision 1.45 2007/05/07 16:28:34 ncq 2239 # - use database maintenance functions in schema "gm" 2240 # 2241 # Revision 1.44 2007/04/27 13:19:58 ncq 2242 # - get_schema_structure() 2243 # 2244 # Revision 1.43 2007/04/02 18:36:17 ncq 2245 # - fix comment 2246 # 2247 # Revision 1.42 2007/04/02 14:31:17 ncq 2248 # - v5 -> v6 2249 # 2250 # Revision 1.41 2007/04/01 15:27:09 ncq 2251 # - safely get_encoding() 2252 # 2253 # Revision 1.40 2007/03/26 16:08:06 ncq 2254 # - added v5 hash 2255 # 2256 # Revision 1.39 2007/03/08 11:37:24 ncq 2257 # - simplified gmLogin 2258 # - log PG settings on first connection if verbose 2259 # 2260 # Revision 1.38 2007/03/01 14:05:53 ncq 2261 # - rollback in run_ro_queries() even if no error occurred such that 2262 # we don't stay IDLE IN TRANSACTION 2263 # 2264 # Revision 1.37 2007/03/01 14:03:53 ncq 2265 # - in run_ro_queries() we now need to rollback failed transactions due to 2266 # the connections being pooled - or else abort state could carry over into 2267 # the next use of that connection - since transactions aren't really 2268 # in need of ending 2269 # 2270 # Revision 1.36 2007/02/19 15:00:53 ncq 2271 # - restrict pooling to the default DSN, too 2272 # 2273 # Revision 1.35 2007/02/18 16:56:21 ncq 2274 # - add connection pool for read-only connections ... 2275 # 2276 # Revision 1.34 2007/02/06 12:11:25 ncq 2277 # - gnumed_v5 2278 # 2279 # Revision 1.33 2007/01/24 11:03:55 ncq 2280 # - add sslmode=prefer to DSN 2281 # 2282 # Revision 1.32 2007/01/23 14:03:14 ncq 2283 # - add known v4 schema hash - backport from 0.2.4 2284 # 2285 # Revision 1.31 2007/01/17 13:26:02 ncq 2286 # - note on MDY/DMY handling 2287 # - slightly easier python datetime adaptation 2288 # 2289 # Revision 1.30 2007/01/16 12:45:21 ncq 2290 # - properly import/adapt mx.DateTime 2291 # 2292 # Revision 1.29 2007/01/16 10:28:49 ncq 2293 # - do not FAIL on mxDT timezone string being ??? as 2294 # it should then be assumed to be local time 2295 # - use mx.DateTime.ISO.str() to include timestamp in output 2296 # 2297 # Revision 1.28 2007/01/04 22:51:10 ncq 2298 # - change hash for unreleased v4 2299 # 2300 # Revision 1.27 2007/01/03 11:54:16 ncq 2301 # - log successful schema hash, too 2302 # 2303 # Revision 1.26 2007/01/02 19:47:29 ncq 2304 # - support (and use) <link_obj> in get_schema_version() 2305 # 2306 # Revision 1.25 2007/01/02 16:17:13 ncq 2307 # - slightly improved logging 2308 # - fix fatal typo in set_default_login() 2309 # - add <link_obj> support to database_schema_compatible() 2310 # - really apply end_tx to run_rw_queries ! 2311 # 2312 # Revision 1.24 2006/12/29 16:25:35 ncq 2313 # - add PostgreSQL version handling 2314 # 2315 # Revision 1.23 2006/12/27 16:41:15 ncq 2316 # - make sure python datetime adapter does not put ',' into string 2317 # 2318 # Revision 1.22 2006/12/22 16:54:44 ncq 2319 # - init gmDateTime if necessary 2320 # 2321 # Revision 1.21 2006/12/21 17:44:54 ncq 2322 # - use gmDateTime.current_iso_timezone_*string* as that is ISO conformant 2323 # 2324 # Revision 1.20 2006/12/21 10:52:52 ncq 2325 # - fix test suite 2326 # - set default client encoding to "UTF8" which is more precise than "UNICODE" 2327 # - use gmDateTime for timezone handling thereby fixing the time.daylight error 2328 # 2329 # Revision 1.19 2006/12/18 17:39:55 ncq 2330 # - make v3 database have known hash 2331 # 2332 # Revision 1.18 2006/12/18 14:55:40 ncq 2333 # - u''ify a query 2334 # 2335 # Revision 1.17 2006/12/15 15:23:50 ncq 2336 # - improve database_schema_compatible() 2337 # 2338 # Revision 1.16 2006/12/12 13:14:32 ncq 2339 # - u''ify queries 2340 # 2341 # Revision 1.15 2006/12/06 20:32:09 ncq 2342 # - careful about port.strip() 2343 # 2344 # Revision 1.14 2006/12/06 16:06:30 ncq 2345 # - cleanup 2346 # - handle empty port def in make_psycopg2_dsn() 2347 # - get_col_defs() 2348 # - get_col_indices() 2349 # - get_col_names() 2350 # - table_exists() 2351 # 2352 # Revision 1.13 2006/12/05 13:58:45 ncq 2353 # - add get_schema_version() 2354 # - improve handling of known schema hashes 2355 # - register UNICODEARRAY psycopg2 extension 2356 # 2357 # Revision 1.12 2006/11/24 09:51:16 ncq 2358 # - whitespace fix 2359 # 2360 # Revision 1.11 2006/11/14 16:56:23 ncq 2361 # - improved (and documented) rationale for registering SQL_IN adapter on tuples only 2362 # 2363 # Revision 1.10 2006/11/07 23:52:48 ncq 2364 # - register our own adapters for mx.DateTime and datetime.datetime so 2365 # we can solve the "ss,ms" issue in locale-aware str(timestamp) 2366 # 2367 # Revision 1.9 2006/11/07 00:30:36 ncq 2368 # - activate SQL_IN for lists only 2369 # 2370 # Revision 1.8 2006/11/05 17:03:26 ncq 2371 # - register SQL_INI adapter for tuples and lists 2372 # 2373 # Revision 1.7 2006/10/24 13:20:07 ncq 2374 # - fix get_current_user() 2375 # - add default login handling 2376 # - remove set_default_dsn() - now use set_default_login() which will create the DSN, too 2377 # - slighly less verbose logging for log size sanity 2378 # 2379 # Revision 1.6 2006/10/23 13:22:38 ncq 2380 # - add get_child_tables() 2381 # 2382 # Revision 1.5 2006/10/10 07:38:22 ncq 2383 # - tighten checks on psycopg2 capabilities 2384 # 2385 # Revision 1.4 2006/10/08 09:23:40 ncq 2386 # - default encoding UNICODE, not utf8 2387 # - add database_schema_compatible() 2388 # - smartify set_default_client_encoding() 2389 # - support <verbose> in run_ro_queries() 2390 # - non-fatally warn on non-unicode queries 2391 # - register unicode type so psycopg2 knows how to deal with u'' 2392 # - improve test suite 2393 # 2394 # Revision 1.3 2006/09/30 11:57:48 ncq 2395 # - document get_raw_connection() 2396 # 2397 # Revision 1.2 2006/09/30 11:52:40 ncq 2398 # - factor out get_raw_connection() 2399 # - reorder conecction customization in get_connection() 2400 # 2401 # Revision 1.1 2006/09/21 19:18:35 ncq 2402 # - first psycopg2 version 2403 # 2404 # 2405