Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          0: 'not released, testing only', 
  94          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          3: 'e73718eaf230d8f1d2d01afa8462e176', 
  96          4: '4428ccf2e54c289136819e701bb095ea', 
  97          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
  98          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
  99          7: '6c9f6d3981483f8e9433df99d1947b27', 
 100          8: '89b13a7af83337c3aad153b717e52360', 
 101          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          11: '03042ae24f3f92877d986fb0a6184d76', 
 104          12: '06183a6616db62257e22814007a8ed07', 
 105          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 107          15: '70012ff960b77ecdff4981c94b5b55b6' 
 108  } 
 109   
 110  map_schema_hash2version = { 
 111          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 112          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 113          '4428ccf2e54c289136819e701bb095ea': 4, 
 114          '7e7b093af57aea48c288e76632a382e5': 5, 
 115          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 116          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 117          '89b13a7af83337c3aad153b717e52360': 8, 
 118          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 119          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 120          '03042ae24f3f92877d986fb0a6184d76': 11, 
 121          '06183a6616db62257e22814007a8ed07': 12, 
 122          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 123          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 124          '70012ff960b77ecdff4981c94b5b55b6': 15 
 125  } 
 126   
 127  map_client_branch2required_db_version = { 
 128          u'GIT tree': 0, 
 129          u'0.3': 9, 
 130          u'0.4': 10, 
 131          u'0.5': 11, 
 132          u'0.6': 12, 
 133          u'0.7': 13, 
 134          u'0.8': 14, 
 135          u'0.9': 15, 
 136          u'1.1': 16 
 137  } 
 138   
 139  # get columns and data types for a given table 
 140  query_table_col_defs = u"""select 
 141          cols.column_name, 
 142          cols.udt_name 
 143  from 
 144          information_schema.columns cols 
 145  where 
 146          cols.table_schema = %s 
 147                  and 
 148          cols.table_name = %s 
 149  order by 
 150          cols.ordinal_position""" 
 151   
 152  query_table_attributes = u"""select 
 153          cols.column_name 
 154  from 
 155          information_schema.columns cols 
 156  where 
 157          cols.table_schema = %s 
 158                  and 
 159          cols.table_name = %s 
 160  order by 
 161          cols.ordinal_position""" 
 162   
 163  # ======================================================================= 
 164  # module globals API 
 165  # ======================================================================= 
166 -def set_default_client_encoding(encoding = None):
167 # check whether psycopg2 can handle this encoding 168 if encoding not in psycopg2.extensions.encodings: 169 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 170 # check whether Python can handle this encoding 171 py_enc = psycopg2.extensions.encodings[encoding] 172 try: 173 codecs.lookup(py_enc) 174 except LookupError: 175 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 176 raise 177 # FIXME: check encoding against the database 178 # FIXME: - but we may not yet have access 179 # FIXME: - psycopg2 will pull its encodings from the database eventually 180 # it seems save to set it 181 global _default_client_encoding 182 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 183 _default_client_encoding = encoding 184 return True
185 #---------------------------------------------------
186 -def set_default_client_timezone(timezone = None):
187 188 # FIXME: use __validate 189 global _default_client_timezone 190 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 191 _default_client_timezone = timezone 192 193 global _sql_set_timezone 194 _sql_set_timezone = u'set timezone to %s' 195 196 return True
197 #---------------------------------------------------
198 -def __validate_timezone(conn=None, timezone=None):
199 200 _log.debug(u'validating time zone [%s]', timezone) 201 202 cmd = u'set timezone to %(tz)s' 203 args = {u'tz': timezone} 204 205 conn.commit() 206 curs = conn.cursor() 207 is_valid = False 208 try: 209 curs.execute(cmd, args) 210 _log.info(u'time zone [%s] is settable', timezone) 211 # can we actually use it, though ? 212 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 213 try: 214 curs.execute(cmd) 215 curs.fetchone() 216 _log.info(u'time zone [%s] is usable', timezone) 217 is_valid = True 218 except: 219 _log.error('error using time zone [%s]', timezone) 220 except dbapi.DataError: 221 _log.warning(u'time zone [%s] is not settable', timezone) 222 except: 223 _log.error(u'failed to set time zone to [%s]', timezone) 224 _log.exception(u'') 225 226 curs.close() 227 conn.rollback() 228 229 return is_valid
230 #---------------------------------------------------
231 -def __expand_timezone(conn=None, timezone=None):
232 """some timezone defs are abbreviations so try to expand 233 them because "set time zone" doesn't take abbreviations""" 234 235 cmd = u""" 236 select distinct on (abbrev) name 237 from pg_timezone_names 238 where 239 abbrev = %(tz)s and 240 name ~ '^[^/]+/[^/]+$' and 241 name !~ '^Etc/' 242 """ 243 args = {u'tz': timezone} 244 245 conn.commit() 246 curs = conn.cursor() 247 248 result = timezone 249 try: 250 curs.execute(cmd, args) 251 rows = curs.fetchall() 252 if len(rows) > 0: 253 result = rows[0][0] 254 _log.debug(u'[%s] maps to [%s]', timezone, result) 255 except: 256 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 257 258 curs.close() 259 conn.rollback() 260 261 return result
262 #---------------------------------------------------
263 -def __detect_client_timezone(conn=None):
264 """This is run on the very first connection.""" 265 266 # FIXME: check whether server.timezone is the same 267 # FIXME: value as what we eventually detect 268 269 # we need gmDateTime to be initialized 270 if gmDateTime.current_local_iso_numeric_timezone_string is None: 271 gmDateTime.init() 272 273 _log.debug('trying to detect timezone from system') 274 275 tz_candidates = [] 276 try: 277 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 278 tz_candidates.append(tz) 279 expanded = __expand_timezone(conn = conn, timezone = tz) 280 if expanded != tz: 281 tz_candidates.append(expanded) 282 except KeyError: 283 pass 284 285 tz_candidates.append(gmDateTime.current_local_timezone_name) 286 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 287 if expanded != gmDateTime.current_local_timezone_name: 288 tz_candidates.append(expanded) 289 290 _log.debug('candidates: %s', str(tz_candidates)) 291 292 # find best among candidates 293 global _default_client_timezone 294 global _sql_set_timezone 295 found = False 296 for tz in tz_candidates: 297 if __validate_timezone(conn = conn, timezone = tz): 298 _default_client_timezone = tz 299 _sql_set_timezone = u'set timezone to %s' 300 found = True 301 break 302 303 if not found: 304 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 305 _sql_set_timezone = u"set time zone interval %s hour to minute" 306 307 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
308 # ======================================================================= 309 # login API 310 # =======================================================================
311 -def __request_login_params_tui():
312 """Text mode request of database login parameters""" 313 import getpass 314 login = gmLoginInfo.LoginInfo() 315 316 print "\nPlease enter the required login parameters:" 317 try: 318 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 319 login.database = prompted_input(prompt = "database", default = 'gnumed_v16') 320 login.user = prompted_input(prompt = "user name", default = '') 321 tmp = 'password for "%s" (not shown): ' % login.user 322 login.password = getpass.getpass(tmp) 323 login.port = prompted_input(prompt = "port", default = 5432) 324 except KeyboardInterrupt: 325 _log.warning("user cancelled text mode login dialog") 326 print "user cancelled text mode login dialog" 327 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 328 329 return login
330 #---------------------------------------------------
331 -def __request_login_params_gui_wx():
332 """GUI (wx) input request for database login parameters. 333 334 Returns gmLoginInfo.LoginInfo object 335 """ 336 import wx 337 # OK, wxPython was already loaded. But has the main Application instance 338 # been initialized yet ? if not, the exception will kick us out 339 if wx.GetApp() is None: 340 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 341 342 # Let's launch the login dialog 343 # if wx was not initialized /no main App loop, an exception should be raised anyway 344 import gmAuthWidgets 345 dlg = gmAuthWidgets.cLoginDialog(None, -1) 346 dlg.ShowModal() 347 login = dlg.panel.GetLoginInfo() 348 dlg.Destroy() 349 350 #if user cancelled or something else went wrong, raise an exception 351 if login is None: 352 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 353 354 return login
355 #---------------------------------------------------
356 -def request_login_params():
357 """Request login parameters for database connection.""" 358 # do we auto-request parameters at all ? 359 if not auto_request_login_params: 360 raise Exception('Cannot request login parameters.') 361 362 # are we inside X ? 363 # (if we aren't wxGTK will crash hard at 364 # C-level with "can't open Display") 365 if os.environ.has_key('DISPLAY'): 366 # try wxPython GUI 367 try: return __request_login_params_gui_wx() 368 except: pass 369 370 # well, either we are on the console or 371 # wxPython does not work, use text mode 372 return __request_login_params_tui()
373 374 # ======================================================================= 375 # DSN API 376 # -----------------------------------------------------------------------
377 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
378 dsn_parts = [] 379 380 if (database is not None) and (database.strip() != ''): 381 dsn_parts.append('dbname=%s' % database) 382 383 if (host is not None) and (host.strip() != ''): 384 dsn_parts.append('host=%s' % host) 385 386 if (port is not None) and (str(port).strip() != ''): 387 dsn_parts.append('port=%s' % port) 388 389 if (user is not None) and (user.strip() != ''): 390 dsn_parts.append('user=%s' % user) 391 392 if (password is not None) and (password.strip() != ''): 393 dsn_parts.append('password=%s' % password) 394 395 dsn_parts.append('sslmode=prefer') 396 397 return ' '.join(dsn_parts)
398 # ------------------------------------------------------
399 -def get_default_login():
400 # make sure we do have a login 401 get_default_dsn() 402 return _default_login
403 # ------------------------------------------------------
404 -def get_default_dsn():
405 global _default_dsn 406 if _default_dsn is not None: 407 return _default_dsn 408 409 login = request_login_params() 410 set_default_login(login=login) 411 412 return _default_dsn
413 # ------------------------------------------------------
414 -def set_default_login(login=None):
415 if login is None: 416 return False 417 418 if login.host is not None: 419 if login.host.strip() == u'': 420 login.host = None 421 422 global _default_login 423 _default_login = login 424 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 425 426 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 427 428 global _default_dsn 429 _default_dsn = dsn 430 _log.info('setting default DSN from [%s] to [%s]' % (_default_dsn, dsn)) 431 432 return True
433 # ======================================================================= 434 # netadata API 435 # =======================================================================
436 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
437 expected_hash = known_schema_hashes[version] 438 if version == 0: 439 args = {'ver': 9999} 440 else: 441 args = {'ver': version} 442 rows, idx = run_ro_queries ( 443 link_obj = link_obj, 444 queries = [{ 445 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 446 'args': args 447 }] 448 ) 449 if rows[0]['md5'] != expected_hash: 450 _log.error('database schema version mismatch') 451 _log.error('expected: %s (%s)' % (version, expected_hash)) 452 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 453 if verbose: 454 _log.debug('schema dump follows:') 455 for line in get_schema_structure(link_obj=link_obj).split(): 456 _log.debug(line) 457 _log.debug('schema revision history dump follows:') 458 for line in get_schema_revision_history(link_obj=link_obj): 459 _log.debug(u' - '.join(line)) 460 return False 461 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 462 return True
463 #------------------------------------------------------------------------
464 -def get_schema_version(link_obj=None):
465 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 466 try: 467 return map_schema_hash2version[rows[0]['md5']] 468 except KeyError: 469 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
470 #------------------------------------------------------------------------
471 -def get_schema_structure(link_obj=None):
472 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 473 return rows[0][0]
474 #------------------------------------------------------------------------
475 -def get_schema_revision_history(link_obj=None):
476 cmd = u""" 477 select 478 imported::text, 479 version, 480 filename 481 from gm.schema_revision 482 order by imported 483 """ 484 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 485 return rows
486 #------------------------------------------------------------------------
487 -def get_current_user():
488 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 489 return rows[0][0]
490 #------------------------------------------------------------------------
491 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
492 """Get the foreign keys pointing to schema.table.column. 493 494 Does not properly work with multi-column FKs. 495 GNUmed doesn't use any, however. 496 """ 497 cmd = u""" 498 select 499 %(schema)s as referenced_schema, 500 %(tbl)s as referenced_table, 501 %(col)s as referenced_column, 502 pgc.confkey as referenced_column_list, 503 pgc.conrelid::regclass as referencing_table, 504 pgc.conkey as referencing_column_list, 505 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 506 from 507 pg_constraint pgc 508 where 509 pgc.contype = 'f' 510 and 511 pgc.confrelid = ( 512 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 513 select oid from pg_namespace where nspname = %(schema)s 514 ) 515 ) and 516 ( 517 select attnum 518 from pg_attribute 519 where 520 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 521 select oid from pg_namespace where nspname = %(schema)s 522 )) 523 and 524 attname = %(col)s 525 ) = any(pgc.confkey) 526 """ 527 528 args = { 529 'schema': schema, 530 'tbl': table, 531 'col': column 532 } 533 534 rows, idx = run_ro_queries ( 535 link_obj = link_obj, 536 queries = [ 537 {'cmd': cmd, 'args': args} 538 ] 539 ) 540 541 return rows
542 #------------------------------------------------------------------------
543 -def get_child_tables(schema='public', table=None, link_obj=None):
544 """Return child tables of <table>.""" 545 cmd = u""" 546 select 547 pgn.nspname as namespace, 548 pgc.relname as table 549 from 550 pg_namespace pgn, 551 pg_class pgc 552 where 553 pgc.relnamespace = pgn.oid 554 and 555 pgc.oid in ( 556 select inhrelid from pg_inherits where inhparent = ( 557 select oid from pg_class where 558 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 559 relname = %(table)s 560 ) 561 )""" 562 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 563 return rows
564 #------------------------------------------------------------------------
565 -def table_exists(link_obj=None, schema=None, table=None):
566 """Returns false, true.""" 567 cmd = u""" 568 select exists ( 569 select 1 from information_schema.tables 570 where 571 table_schema = %s and 572 table_name = %s and 573 table_type = 'BASE TABLE' 574 )""" 575 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 576 return rows[0][0]
577 #------------------------------------------------------------------------
578 -def get_col_indices(cursor = None):
579 if cursor.description is None: 580 _log.error('no result description available: unused cursor or last query did not select rows') 581 return None 582 col_indices = {} 583 col_index = 0 584 for col_desc in cursor.description: 585 col_name = col_desc[0] 586 # a query like "select 1,2;" will return two columns of the same name ! 587 # hence adjust to that, note, however, that dict-style access won't work 588 # on results of such queries ... 589 if col_indices.has_key(col_name): 590 col_name = '%s_%s' % (col_name, col_index) 591 col_indices[col_name] = col_index 592 col_index += 1 593 594 return col_indices
595 #------------------------------------------------------------------------
596 -def get_col_defs(link_obj=None, schema='public', table=None):
597 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 598 col_names = [] 599 col_type = {} 600 for row in rows: 601 col_names.append(row[0]) 602 # map array types 603 if row[1].startswith('_'): 604 col_type[row[0]] = row[1][1:] + '[]' 605 else: 606 col_type[row[0]] = row[1] 607 col_defs = [] 608 col_defs.append(col_names) 609 col_defs.append(col_type) 610 return col_defs
611 #------------------------------------------------------------------------
612 -def get_col_names(link_obj=None, schema='public', table=None):
613 """Return column attributes of table""" 614 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 615 cols = [] 616 for row in rows: 617 cols.append(row[0]) 618 return cols
619 620 #------------------------------------------------------------------------ 621 # i18n functions 622 #------------------------------------------------------------------------
623 -def export_translations_from_database(filename=None):
624 tx_file = codecs.open(filename, 'wb', 'utf8') 625 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 626 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 627 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 628 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 629 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 630 tx_file.write(u'set default_transaction_read_only to off\n\n') 631 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 632 633 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 634 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 635 for row in rows: 636 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 637 row['lang'].replace("'", "\\'"), 638 row['orig'].replace("'", "\\'"), 639 row['trans'].replace("'", "\\'") 640 ) 641 tx_file.write(line) 642 tx_file.write(u'\n') 643 644 tx_file.write(u'\set ON_ERROR_STOP 1\n') 645 tx_file.close() 646 647 return True
648 #------------------------------------------------------------------------
649 -def delete_translation_from_database(link_obj=None, language=None, original=None):
650 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 651 args = {'lang': language, 'orig': original} 652 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 653 return True
654 655 #------------------------------------------------------------------------
656 -def update_translation_in_database(language=None, original=None, translation=None):
657 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 658 args = {'lang': language, 'orig': original, 'trans': translation} 659 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 660 return args
661 662 #------------------------------------------------------------------------
663 -def get_translation_languages():
664 rows, idx = run_ro_queries ( 665 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 666 ) 667 return [ r[0] for r in rows ]
668 669 #------------------------------------------------------------------------
670 -def get_database_translations(language=None, order_by=None):
671 672 args = {'lang': language} 673 _log.debug('language [%s]', language) 674 675 if order_by is None: 676 order_by = u'ORDER BY %s' % order_by 677 else: 678 order_by = u'ORDER BY lang, orig' 679 680 if language is None: 681 cmd = u""" 682 SELECT DISTINCT ON (orig, lang) 683 lang, orig, trans 684 FROM (( 685 686 -- strings stored as translation keys whether translated or not 687 SELECT 688 NULL as lang, 689 ik.orig, 690 NULL AS trans 691 FROM 692 i18n.keys ik 693 694 ) UNION ALL ( 695 696 -- already translated strings 697 SELECT 698 it.lang, 699 it.orig, 700 it.trans 701 FROM 702 i18n.translations it 703 704 )) as translatable_strings 705 %s""" % order_by 706 else: 707 cmd = u""" 708 SELECT DISTINCT ON (orig, lang) 709 lang, orig, trans 710 FROM (( 711 712 -- strings stored as translation keys whether translated or not 713 SELECT 714 %%(lang)s as lang, 715 ik.orig, 716 i18n._(ik.orig, %%(lang)s) AS trans 717 FROM 718 i18n.keys ik 719 720 ) UNION ALL ( 721 722 -- already translated strings 723 SELECT 724 %%(lang)s as lang, 725 it.orig, 726 i18n._(it.orig, %%(lang)s) AS trans 727 FROM 728 i18n.translations it 729 730 )) AS translatable_strings 731 %s""" % order_by 732 733 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 734 735 if rows is None: 736 _log.error('no translatable strings found') 737 else: 738 _log.debug('%s translatable strings found', len(rows)) 739 740 return rows
741 742 #------------------------------------------------------------------------
743 -def get_current_user_language():
744 cmd = u'select i18n.get_curr_lang()' 745 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 746 return rows[0][0]
747 748 #------------------------------------------------------------------------
749 -def set_user_language(user=None, language=None):
750 """Set the user language in the database. 751 752 user = None: current db user 753 language = None: unset 754 """ 755 _log.info('setting database language for user [%s] to [%s]', user, language) 756 757 args = { 758 'usr': user, 759 'lang': language 760 } 761 762 if language is None: 763 if user is None: 764 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 765 else: 766 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 767 queries.append({'cmd': u'select True'}) 768 else: 769 if user is None: 770 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 771 else: 772 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 773 774 rows, idx = run_rw_queries(queries = queries, return_data = True) 775 776 if not rows[0][0]: 777 _log.error('cannot set database language to [%s] for user [%s]', language, user) 778 779 return rows[0][0]
780 #------------------------------------------------------------------------
781 -def force_user_language(language=None):
782 """Set the user language in the database. 783 784 - regardless of whether there is any translation available. 785 - only for the current user 786 """ 787 _log.info('forcing database language for current db user to [%s]', language) 788 789 run_rw_queries(queries = [{ 790 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 791 'args': {'lang': language} 792 }])
793 #------------------------------------------------------------------------ 794 #------------------------------------------------------------------------ 795 text_expansion_keywords = None 796
797 -def get_text_expansion_keywords():
798 global text_expansion_keywords 799 if text_expansion_keywords is not None: 800 return text_expansion_keywords 801 802 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 803 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 804 text_expansion_keywords = rows 805 806 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 807 808 return text_expansion_keywords
809 #------------------------------------------------------------------------
810 -def expand_keyword(keyword = None):
811 812 # Easter Egg ;-) 813 if keyword == u'$$steffi': 814 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 815 816 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 817 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 818 819 if len(rows) == 0: 820 return None 821 822 return rows[0]['expansion']
823 #------------------------------------------------------------------------
824 -def get_keyword_expansion_candidates(keyword = None):
825 826 if keyword is None: 827 return [] 828 829 get_text_expansion_keywords() 830 831 candidates = [] 832 for kwd in text_expansion_keywords: 833 if kwd['keyword'].startswith(keyword): 834 candidates.append(kwd['keyword']) 835 836 return candidates
837 #------------------------------------------------------------------------
838 -def add_text_expansion(keyword=None, expansion=None, public=None):
839 840 if public: 841 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 842 else: 843 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 844 845 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 846 if len(rows) != 0: 847 return False 848 849 if public: 850 cmd = u""" 851 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 852 values (%(kwd)s, %(exp)s, null)""" 853 else: 854 cmd = u""" 855 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 856 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 857 858 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 859 860 global text_expansion_keywords 861 text_expansion_keywords = None 862 863 return True
864 #------------------------------------------------------------------------
865 -def delete_text_expansion(keyword):
866 cmd = u""" 867 delete from clin.keyword_expansion where 868 keyword = %(kwd)s and ( 869 (fk_staff = (select pk from dem.staff where db_user = current_user)) 870 or 871 (fk_staff is null and owner = current_user) 872 )""" 873 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 874 875 global text_expansion_keywords 876 text_expansion_keywords = None
877 #------------------------------------------------------------------------
878 -def edit_text_expansion(keyword, expansion):
879 880 cmd1 = u""" 881 delete from clin.keyword_expansion where 882 keyword = %(kwd)s and 883 fk_staff = (select pk from dem.staff where db_user = current_user)""" 884 885 cmd2 = u""" 886 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 887 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))""" 888 889 rows, idx = run_rw_queries(queries = [ 890 {'cmd': cmd1, 'args': {'kwd': keyword}}, 891 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}}, 892 ]) 893 894 global text_expansion_keywords 895 text_expansion_keywords = None
896 # ======================================================================= 897 # query runners and helpers 898 # =======================================================================
899 -def send_maintenance_notification():
900 cmd = u'notify "db_maintenance_warning:"' 901 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
902 #------------------------------------------------------------------------
903 -def send_maintenance_shutdown():
904 cmd = u'notify "db_maintenance_disconnect:"' 905 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
906 #------------------------------------------------------------------------
907 -def is_pg_interval(candidate=None):
908 cmd = u'select %(candidate)s::interval' 909 try: 910 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 911 return True 912 except: 913 cmd = u'select %(candidate)s::text::interval' 914 try: 915 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 916 return True 917 except: 918 return False
919 #------------------------------------------------------------------------
920 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
921 outfile = file(filename, 'wb') 922 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 923 outfile.close() 924 return result
925 #------------------------------------------------------------------------
926 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
927 """Store data from a bytea field into a file. 928 929 <data_query> 930 - dict {'cmd': ..., 'args': ...} 931 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 932 - 'args' must be a dict 933 - must return one row with one field of type bytea 934 <file> 935 - must be a file like Python object 936 <data_size> 937 - integer of the total size of the expected data or None 938 <data_size_query> 939 - dict {'cmd': ..., 'args': ...} 940 - cmd must be unicode 941 - must return one row with one field with the octet_length() of the data field 942 - used only when <data_size> is None 943 """ 944 if data_size == 0: 945 return True 946 947 # If the client sets an encoding other than the default we 948 # will receive encoding-parsed data which isn't the binary 949 # content we want. Hence we need to get our own connection. 950 # It must be a read-write one so that we don't affect the 951 # encoding for other users of the shared read-only 952 # connections. 953 # Actually, encodings shouldn't be applied to binary data 954 # (eg. bytea types) in the first place but that is only 955 # reported to be fixed > v7.4. 956 # further tests reveal that at least on PG 8.0 this bug still 957 # manifests itself 958 conn = get_raw_connection(readonly=True) 959 960 if data_size is None: 961 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 962 data_size = rows[0][0] 963 if data_size in [None, 0]: 964 conn.rollback() 965 return True 966 967 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 968 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 969 970 # chunk size of 0 means "retrieve whole field at once" 971 if chunk_size == 0: 972 chunk_size = data_size 973 _log.debug('chunk size [0] bytes: retrieving all data at once') 974 975 # Windoze sucks: it can't transfer objects of arbitrary size, 976 # anyways, we need to split the transfer, 977 # however, only possible if postgres >= 7.2 978 needed_chunks, remainder = divmod(data_size, chunk_size) 979 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 980 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 981 982 # try setting "bytea_output" 983 # - fails if not necessary 984 # - succeeds if necessary 985 try: 986 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 987 except dbapi.ProgrammingError: 988 _log.debug('failed to set bytea_output to "escape", not necessary') 989 990 # retrieve chunks, skipped if data size < chunk size, 991 # does this not carry the danger of cutting up multi-byte escape sequences ? 992 # no, since bytea is binary, 993 # yes, since in bytea there are *some* escaped values, still 994 # no, since those are only escaped during *transfer*, not on-disk, hence 995 # only complete escape sequences are put on the wire 996 for chunk_id in range(needed_chunks): 997 chunk_start = (chunk_id * chunk_size) + 1 998 data_query['args']['start'] = chunk_start 999 data_query['args']['size'] = chunk_size 1000 try: 1001 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1002 except: 1003 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1004 conn.rollback() 1005 raise 1006 # it would be a fatal error to see more than one result as ids are supposed to be unique 1007 file_obj.write(str(rows[0][0])) 1008 1009 # retrieve remainder 1010 if remainder > 0: 1011 chunk_start = (needed_chunks * chunk_size) + 1 1012 data_query['args']['start'] = chunk_start 1013 data_query['args']['size'] = remainder 1014 try: 1015 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1016 except: 1017 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1018 conn.rollback() 1019 raise 1020 # it would be a fatal error to see more than one result as ids are supposed to be unique 1021 file_obj.write(str(rows[0][0])) 1022 1023 conn.rollback() 1024 return True
1025 #------------------------------------------------------------------------
1026 -def file2bytea(query=None, filename=None, args=None, conn=None):
1027 """Store data from a file into a bytea field. 1028 1029 The query must: 1030 - be in unicode 1031 - contain a format spec identifying the row (eg a primary key) 1032 matching <args> if it is an UPDATE 1033 - contain a format spec %(data)s::bytea 1034 """ 1035 # read data from file 1036 infile = file(filename, "rb") 1037 data_as_byte_string = infile.read() 1038 infile.close() 1039 if args is None: 1040 args = {} 1041 args['data'] = buffer(data_as_byte_string) 1042 del(data_as_byte_string) 1043 1044 # insert the data 1045 if conn is None: 1046 conn = get_raw_connection(readonly=False) 1047 close_conn = True 1048 else: 1049 close_conn = False 1050 1051 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1052 1053 if close_conn: 1054 conn.close() 1055 1056 return
1057 #------------------------------------------------------------------------
1058 -def sanitize_pg_regex(expression=None, escape_all=False):
1059 """Escape input for use in a PostgreSQL regular expression. 1060 1061 If a fragment comes from user input and is to be used 1062 as a regular expression we need to make sure it doesn't 1063 contain invalid regex patterns such as unbalanced ('s. 1064 1065 <escape_all> 1066 True: try to escape *all* metacharacters 1067 False: only escape those which render the regex invalid 1068 """ 1069 return expression.replace ( 1070 '(', '\(' 1071 ).replace ( 1072 ')', '\)' 1073 ).replace ( 1074 '[', '\[' 1075 ).replace ( 1076 '+', '\+' 1077 ).replace ( 1078 '.', '\.' 1079 ).replace ( 1080 '*', '\*' 1081 )
1082 #']', '\]', # not needed 1083 #------------------------------------------------------------------------
1084 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1085 """Run read-only queries. 1086 1087 <queries> must be a list of dicts: 1088 [ 1089 {'cmd': <string>, 'args': <dict> or <tuple>}, 1090 {...}, 1091 ... 1092 ] 1093 """ 1094 if isinstance(link_obj, dbapi._psycopg.cursor): 1095 curs = link_obj 1096 curs_close = __noop 1097 tx_rollback = __noop 1098 elif isinstance(link_obj, dbapi._psycopg.connection): 1099 curs = link_obj.cursor() 1100 curs_close = curs.close 1101 tx_rollback = link_obj.rollback 1102 elif link_obj is None: 1103 conn = get_connection(readonly=True, verbose=verbose) 1104 curs = conn.cursor() 1105 curs_close = curs.close 1106 tx_rollback = conn.rollback 1107 else: 1108 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1109 1110 if verbose: 1111 _log.debug('cursor: %s', curs) 1112 1113 for query in queries: 1114 if type(query['cmd']) is not types.UnicodeType: 1115 print "run_ro_queries(): non-unicode query" 1116 print query['cmd'] 1117 try: 1118 args = query['args'] 1119 except KeyError: 1120 args = None 1121 try: 1122 curs.execute(query['cmd'], args) 1123 if verbose: 1124 _log.debug('ran query: [%s]', curs.query) 1125 _log.debug('PG status message: %s', curs.statusmessage) 1126 _log.debug('cursor description: %s', str(curs.description)) 1127 except: 1128 # FIXME: use .pgcode 1129 try: 1130 curs_close() 1131 except dbapi.InterfaceError: 1132 _log.exception('cannot close cursor') 1133 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1134 _log.error('query failed: [%s]', curs.query) 1135 _log.error('PG status message: %s', curs.statusmessage) 1136 raise 1137 1138 data = None 1139 col_idx = None 1140 if return_data: 1141 data = curs.fetchall() 1142 if verbose: 1143 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1144 _log.debug('cursor description: %s', str(curs.description)) 1145 if get_col_idx: 1146 col_idx = get_col_indices(curs) 1147 1148 curs_close() 1149 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1150 return (data, col_idx)
1151 #------------------------------------------------------------------------
1152 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1153 """Convenience function for running a transaction 1154 that is supposed to get committed. 1155 1156 <link_obj> 1157 can be either: 1158 - a cursor 1159 - a connection 1160 1161 <queries> 1162 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1163 to be executed as a single transaction, the last 1164 query may usefully return rows (such as a 1165 "select currval('some_sequence')" statement) 1166 1167 <end_tx> 1168 - controls whether the transaction is finalized (eg. 1169 committed/rolled back) or not, this allows the 1170 call to run_rw_queries() to be part of a framing 1171 transaction 1172 - if link_obj is a connection then <end_tx> will 1173 default to False unless it is explicitly set to 1174 True which is taken to mean "yes, you do have full 1175 control over the transaction" in which case the 1176 transaction is properly finalized 1177 - if link_obj is a cursor we CANNOT finalize the 1178 transaction because we would need the connection for that 1179 - if link_obj is None <end_tx> will, of course, always be True 1180 1181 <return_data> 1182 - if true, the returned data will include the rows 1183 the last query selected 1184 - if false, it returns None instead 1185 1186 <get_col_idx> 1187 - if true, the returned data will include a dictionary 1188 mapping field names to column positions 1189 - if false, the returned data returns None instead 1190 1191 method result: 1192 - returns a tuple (data, idx) 1193 - <data>: 1194 * (None, None) if last query did not return rows 1195 * ("fetchall() result", <index>) if last query returned any rows 1196 * for <index> see <get_col_idx> 1197 """ 1198 if isinstance(link_obj, dbapi._psycopg.cursor): 1199 conn_close = __noop 1200 conn_commit = __noop 1201 conn_rollback = __noop 1202 curs = link_obj 1203 curs_close = __noop 1204 elif isinstance(link_obj, dbapi._psycopg.connection): 1205 conn_close = __noop 1206 if end_tx: 1207 conn_commit = link_obj.commit 1208 conn_rollback = link_obj.rollback 1209 else: 1210 conn_commit = __noop 1211 conn_rollback = __noop 1212 curs = link_obj.cursor() 1213 curs_close = curs.close 1214 elif link_obj is None: 1215 conn = get_connection(readonly=False) 1216 conn_close = conn.close 1217 conn_commit = conn.commit 1218 conn_rollback = conn.rollback 1219 curs = conn.cursor() 1220 curs_close = curs.close 1221 else: 1222 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1223 1224 for query in queries: 1225 if type(query['cmd']) is not types.UnicodeType: 1226 print "run_rw_queries(): non-unicode query" 1227 print query['cmd'] 1228 try: 1229 args = query['args'] 1230 except KeyError: 1231 args = None 1232 try: 1233 curs.execute(query['cmd'], args) 1234 except: 1235 _log.exception('error running RW query') 1236 gmLog2.log_stack_trace() 1237 try: 1238 curs_close() 1239 conn_rollback() 1240 conn_close() 1241 except dbapi.InterfaceError: 1242 _log.exception('cannot cleanup') 1243 raise 1244 raise 1245 1246 data = None 1247 col_idx = None 1248 if return_data: 1249 try: 1250 data = curs.fetchall() 1251 except: 1252 _log.exception('error fetching data from RW query') 1253 gmLog2.log_stack_trace() 1254 try: 1255 curs_close() 1256 conn_rollback() 1257 conn_close() 1258 except dbapi.InterfaceError: 1259 _log.exception('cannot cleanup') 1260 raise 1261 raise 1262 if get_col_idx: 1263 col_idx = get_col_indices(curs) 1264 1265 curs_close() 1266 conn_commit() 1267 conn_close() 1268 1269 return (data, col_idx)
1270 #------------------------------------------------------------------------
1271 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1272 """Generates SQL for an INSERT query. 1273 1274 values: dict of values keyed by field to insert them into 1275 """ 1276 if schema is None: 1277 schema = u'public' 1278 1279 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1280 val_snippets = [] 1281 for field in fields: 1282 val_snippets.append(u'%%(%s)s' % field) 1283 1284 if returning is None: 1285 returning = u'' 1286 return_data = False 1287 else: 1288 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1289 return_data = True 1290 1291 cmd = u"""\nINSERT INTO %s.%s ( 1292 %s 1293 ) VALUES ( 1294 %s 1295 )%s""" % ( 1296 schema, 1297 table, 1298 u',\n\t\t'.join(fields), 1299 u',\n\t\t'.join(val_snippets), 1300 returning 1301 ) 1302 1303 _log.debug(u'running SQL: >>>%s<<<', cmd) 1304 1305 return run_rw_queries ( 1306 link_obj = link_obj, 1307 queries = [{'cmd': cmd, 'args': values}], 1308 end_tx = end_tx, 1309 return_data = return_data, 1310 get_col_idx = get_col_idx, 1311 verbose = verbose 1312 )
1313 # ======================================================================= 1314 # connection handling API 1315 # -----------------------------------------------------------------------
1316 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1317 """ 1318 GNUmed database connection pool. 1319 1320 Extends psycopg2's PersistentConnectionPool with 1321 a custom _connect() function. Supports one connection 1322 per thread - which also ties it to one particular DSN. 1323 """ 1324 #--------------------------------------------------
1325 - def _connect(self, key=None):
1326 1327 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1328 1329 conn.original_close = conn.close 1330 conn.close = _raise_exception_on_ro_conn_close 1331 1332 if key is not None: 1333 self._used[key] = conn 1334 self._rused[id(conn)] = key 1335 else: 1336 self._pool.append(conn) 1337 1338 return conn
1339 #--------------------------------------------------
1340 - def shutdown(self):
1341 for conn_key in self._used.keys(): 1342 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1343 self._used[conn_key].original_close()
1344 # -----------------------------------------------------------------------
1345 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1346 """Get a raw, unadorned connection. 1347 1348 - this will not set any parameters such as encoding, timezone, datestyle 1349 - the only requirement is a valid DSN 1350 - hence it can be used for "service" connections 1351 for verifying encodings etc 1352 """ 1353 # FIXME: support verbose 1354 if dsn is None: 1355 dsn = get_default_dsn() 1356 1357 if u'host=salaam.homeunix' in dsn: 1358 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1359 1360 try: 1361 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1362 except dbapi.OperationalError, e: 1363 1364 t, v, tb = sys.exc_info() 1365 try: 1366 msg = e.args[0] 1367 except (AttributeError, IndexError, TypeError): 1368 raise 1369 1370 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1371 1372 if msg.find('fe_sendauth') != -1: 1373 raise cAuthenticationError, (dsn, msg), tb 1374 1375 if regex.search('user ".*" does not exist', msg) is not None: 1376 raise cAuthenticationError, (dsn, msg), tb 1377 1378 if msg.find('uthenti') != -1: 1379 raise cAuthenticationError, (dsn, msg), tb 1380 1381 raise 1382 1383 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1384 1385 # do first-time stuff 1386 global postgresql_version 1387 if postgresql_version is None: 1388 curs = conn.cursor() 1389 curs.execute (""" 1390 select 1391 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version 1392 from pg_settings 1393 where name='server_version'""" 1394 ) 1395 postgresql_version = curs.fetchone()['version'] 1396 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1397 try: 1398 curs.execute("select pg_size_pretty(pg_database_size(current_database()))") 1399 _log.info('database size: %s', curs.fetchone()[0]) 1400 except: 1401 pass 1402 if verbose: 1403 __log_PG_settings(curs=curs) 1404 curs.close() 1405 conn.commit() 1406 1407 if _default_client_timezone is None: 1408 __detect_client_timezone(conn = conn) 1409 1410 curs = conn.cursor() 1411 1412 # set access mode 1413 if readonly: 1414 _log.debug('access mode [READ ONLY]') 1415 cmd = 'set session characteristics as transaction READ ONLY' 1416 curs.execute(cmd) 1417 cmd = 'set default_transaction_read_only to on' 1418 curs.execute(cmd) 1419 else: 1420 _log.debug('access mode [READ WRITE]') 1421 cmd = 'set session characteristics as transaction READ WRITE' 1422 curs.execute(cmd) 1423 cmd = 'set default_transaction_read_only to off' 1424 curs.execute(cmd) 1425 1426 curs.close() 1427 conn.commit() 1428 1429 conn.is_decorated = False 1430 1431 return conn
1432 # =======================================================================
1433 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1434 """Get a new connection. 1435 1436 This assumes the locale system has been initialized 1437 unless an encoding is specified. 1438 """ 1439 # FIXME: support pooled on RW, too 1440 # FIXME: for now, support the default DSN only 1441 if pooled and readonly and (dsn is None): 1442 global __ro_conn_pool 1443 if __ro_conn_pool is None: 1444 __ro_conn_pool = cConnectionPool ( 1445 minconn = 1, 1446 maxconn = 2, 1447 dsn = dsn, 1448 verbose = verbose 1449 ) 1450 conn = __ro_conn_pool.getconn() 1451 else: 1452 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1453 1454 if conn.is_decorated: 1455 return conn 1456 1457 if encoding is None: 1458 encoding = _default_client_encoding 1459 if encoding is None: 1460 encoding = gmI18N.get_encoding() 1461 _log.warning('client encoding not specified') 1462 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1463 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1464 1465 # set connection properties 1466 # - client encoding 1467 try: 1468 conn.set_client_encoding(encoding) 1469 except dbapi.OperationalError: 1470 t, v, tb = sys.exc_info() 1471 if str(v).find("can't set encoding to") != -1: 1472 raise cEncodingError, (encoding, v), tb 1473 raise 1474 1475 # - transaction isolation level 1476 if readonly: 1477 iso_level = u'read committed' 1478 else: 1479 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1480 iso_level = u'serializable' 1481 1482 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1483 1484 curs = conn.cursor() 1485 1486 # - client time zone 1487 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1488 1489 conn.commit() 1490 1491 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1492 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1493 # 1494 # - bytea data format 1495 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1496 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1497 # as the transmission mode for bytea output, 1498 # so try to set this setting back to "escape", 1499 # if that's not possible the reason will be that PG < 9.0 does not support 1500 # that setting - which also means we don't need it and can ignore the 1501 # failure 1502 cmd = "set bytea_output to 'escape'" 1503 try: 1504 curs.execute(cmd) 1505 except dbapi.ProgrammingError: 1506 _log.error('cannot set bytea_output format') 1507 1508 curs.close() 1509 conn.commit() 1510 1511 conn.is_decorated = True 1512 1513 return conn
1514 #-----------------------------------------------------------------------
1515 -def shutdown():
1516 if __ro_conn_pool is None: 1517 return 1518 __ro_conn_pool.shutdown()
1519 # ====================================================================== 1520 # internal helpers 1521 #-----------------------------------------------------------------------
1522 -def __noop():
1523 pass
1524 #-----------------------------------------------------------------------
1525 -def _raise_exception_on_ro_conn_close():
1526 raise TypeError(u'close() called on read-only connection')
1527 #-----------------------------------------------------------------------
1528 -def log_database_access(action=None):
1529 run_insert ( 1530 schema = u'gm', 1531 table = u'access_log', 1532 values = {u'user_action': action}, 1533 end_tx = True 1534 )
1535 #-----------------------------------------------------------------------
1536 -def sanity_check_time_skew(tolerance=60):
1537 """Check server time and local time to be within 1538 the given tolerance of each other. 1539 1540 tolerance: seconds 1541 """ 1542 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1543 1544 cmd = u"select now() at time zone 'UTC'" 1545 conn = get_raw_connection(readonly=True) 1546 curs = conn.cursor() 1547 1548 start = time.time() 1549 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1550 end = time.time() 1551 client_now_as_utc = pydt.datetime.utcnow() 1552 1553 curs.close() 1554 conn.commit() 1555 1556 server_now_as_utc = rows[0][0] 1557 query_duration = end - start 1558 _log.info('server "now" (UTC): %s', server_now_as_utc) 1559 _log.info('client "now" (UTC): %s', client_now_as_utc) 1560 _log.debug('wire roundtrip (seconds): %s', query_duration) 1561 1562 if query_duration > tolerance: 1563 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1564 return False 1565 1566 if server_now_as_utc > client_now_as_utc: 1567 real_skew = server_now_as_utc - client_now_as_utc 1568 else: 1569 real_skew = client_now_as_utc - server_now_as_utc 1570 1571 _log.debug('client/server time skew: %s', real_skew) 1572 1573 if real_skew > pydt.timedelta(seconds = tolerance): 1574 _log.error('client/server time skew > tolerance') 1575 return False 1576 1577 return True
1578 #-----------------------------------------------------------------------
1579 -def sanity_check_database_settings():
1580 """Checks database settings. 1581 1582 returns (status, message) 1583 status: 1584 0: no problem 1585 1: non-fatal problem 1586 2: fatal problem 1587 """ 1588 _log.debug('checking database settings') 1589 1590 conn = get_connection() 1591 1592 # - version string 1593 global postgresql_version_string 1594 if postgresql_version_string is None: 1595 curs = conn.cursor() 1596 curs.execute('select version()') 1597 postgresql_version_string = curs.fetchone()['version'] 1598 curs.close() 1599 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1600 1601 options2check = { 1602 # setting: [expected value, risk, fatal?] 1603 u'allow_system_table_mods': [u'off', u'system breakage', False], 1604 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1605 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1606 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1607 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1608 u'fsync': [u'on', u'data loss/corruption', True], 1609 u'full_page_writes': [u'on', u'data loss/corruption', False], 1610 u'lc_messages': [u'C', u'suboptimal error detection', False], 1611 u'password_encryption': [u'on', u'breach of confidentiality', False], 1612 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1613 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1614 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1615 } 1616 1617 from Gnumed.pycommon import gmCfg2 1618 _cfg = gmCfg2.gmCfgData() 1619 if _cfg.get(option = u'hipaa'): 1620 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1621 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1622 else: 1623 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1624 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1625 1626 cmd = u"select name, setting from pg_settings where name in %(settings)s" 1627 rows, idx = run_ro_queries ( 1628 link_obj = conn, 1629 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1630 get_col_idx = False 1631 ) 1632 1633 found_error = False 1634 found_problem = False 1635 msg = [] 1636 for row in rows: 1637 option = row['name'] 1638 value_found = row['setting'] 1639 value_expected = options2check[option][0] 1640 risk = options2check[option][1] 1641 fatal_setting = options2check[option][2] 1642 if value_found != value_expected: 1643 if fatal_setting is True: 1644 found_error = True 1645 elif fatal_setting is False: 1646 found_problem = True 1647 elif fatal_setting is None: 1648 pass 1649 else: 1650 _log.error(options2check[option]) 1651 raise ValueError(u'invalid database configuration sanity check') 1652 msg.append(_(' option [%s]: %s') % (option, value_found)) 1653 msg.append(_(' risk: %s') % risk) 1654 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1655 1656 if found_error: 1657 return 2, u'\n'.join(msg) 1658 1659 if found_problem: 1660 return 1, u'\n'.join(msg) 1661 1662 return 0, u''
1663 #------------------------------------------------------------------------
1664 -def __log_PG_settings(curs=None):
1665 # don't use any of the run_*()s since that might 1666 # create a loop if we fail here 1667 # FIXME: use pg_settings 1668 try: 1669 curs.execute(u'show all') 1670 except: 1671 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1672 return False 1673 settings = curs.fetchall() 1674 if settings is None: 1675 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1676 return False 1677 for setting in settings: 1678 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1679 return True
1680 # =======================================================================
1681 -def extract_msg_from_pg_exception(exc=None):
1682 1683 try: 1684 msg = exc.args[0] 1685 except (AttributeError, IndexError, TypeError): 1686 return u'cannot extract message from exception' 1687 1688 return unicode(msg, gmI18N.get_encoding(), 'replace')
1689 # =======================================================================
1690 -class cAuthenticationError(dbapi.OperationalError):
1691
1692 - def __init__(self, dsn=None, prev_val=None):
1693 self.dsn = dsn 1694 self.prev_val = prev_val
1695
1696 - def __str__(self):
1697 _log.warning('%s.__str__() called', self.__class__.__name__) 1698 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1699 _log.error(tmp) 1700 return tmp.encode(gmI18N.get_encoding(), 'replace')
1701
1702 - def __unicode__(self):
1703 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1704 1705 # ======================================================================= 1706 # custom psycopg2 extensions 1707 # =======================================================================
1708 -class cEncodingError(dbapi.OperationalError):
1709
1710 - def __init__(self, encoding=None, prev_val=None):
1711 self.encoding = encoding 1712 self.prev_val = prev_val
1713
1714 - def __str__(self):
1715 _log.warning('%s.__str__() called', self.__class__.__name__) 1716 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1717
1718 - def __unicode__(self):
1719 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1720 1721 # ----------------------------------------------------------------------- 1722 # Python -> PostgreSQL 1723 # ----------------------------------------------------------------------- 1724 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1725 -class cAdapterPyDateTime(object):
1726
1727 - def __init__(self, dt):
1728 if dt.tzinfo is None: 1729 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1730 self.__dt = dt
1731
1732 - def getquoted(self):
1733 return _timestamp_template % self.__dt.isoformat()
1734 1735 ## remove for 0.9 1736 ## ---------------------------------------------------------------------- 1737 ##class cAdapterMxDateTime(object): 1738 ## 1739 ## def __init__(self, dt): 1740 ## if dt.tz == '???': 1741 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1742 ## self.__dt = dt 1743 ## 1744 ## def getquoted(self): 1745 ## # under some locale settings the mx.DateTime ISO formatter 1746 ## # will insert "," into the ISO string, 1747 ## # while this is allowed per the ISO8601 spec PostgreSQL 1748 ## # cannot currently handle that, 1749 ## # so map those "," to "." to make things work: 1750 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1751 ## 1752 ## ---------------------------------------------------------------------- 1753 ## PostgreSQL -> Python 1754 ## ---------------------------------------------------------------------- 1755 1756 #======================================================================= 1757 # main 1758 #----------------------------------------------------------------------- 1759 1760 # make sure psycopg2 knows how to handle unicode ... 1761 # intended to become standard 1762 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1763 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1764 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1765 1766 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1767 # check in 0.9: 1768 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1769 1770 ## remove for 0.9 1771 #try: 1772 # import mx.DateTime as mxDT 1773 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1774 #except ImportError: 1775 # _log.warning('cannot import mx.DateTime') 1776 1777 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1778 # them adapted to "... ARRAY[]..." so we can support PG arrays 1779 1780 #======================================================================= 1781 if __name__ == "__main__": 1782 1783 if len(sys.argv) < 2: 1784 sys.exit() 1785 1786 if sys.argv[1] != 'test': 1787 sys.exit() 1788 1789 logging.basicConfig(level=logging.DEBUG) 1790 #--------------------------------------------------------------------
1791 - def test_file2bytea():
1792 run_rw_queries(queries = [ 1793 {'cmd': u'create table test_bytea (data bytea)'} 1794 ]) 1795 1796 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1797 try: 1798 file2bytea(query = cmd, filename = sys.argv[2]) 1799 except: 1800 _log.exception('error') 1801 1802 run_rw_queries(queries = [ 1803 {'cmd': u'drop table test_bytea'} 1804 ])
1805 #--------------------------------------------------------------------
1806 - def test_get_connection():
1807 print "testing get_connection()" 1808 1809 dsn = 'foo' 1810 try: 1811 conn = get_connection(dsn=dsn) 1812 except dbapi.OperationalError, e: 1813 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1814 t, v = sys.exc_info()[:2] 1815 print ' ', t 1816 print ' ', v 1817 1818 dsn = 'dbname=gnumed_v9' 1819 try: 1820 conn = get_connection(dsn=dsn) 1821 except cAuthenticationError: 1822 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1823 t, v = sys.exc_info()[:2] 1824 print ' ', t 1825 print ' ', v 1826 1827 dsn = 'dbname=gnumed_v9 user=abc' 1828 try: 1829 conn = get_connection(dsn=dsn) 1830 except cAuthenticationError: 1831 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1832 t, v = sys.exc_info()[:2] 1833 print ' ', t 1834 print ' ', v 1835 1836 dsn = 'dbname=gnumed_v9 user=any-doc' 1837 try: 1838 conn = get_connection(dsn=dsn) 1839 except cAuthenticationError: 1840 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1841 t, v = sys.exc_info()[:2] 1842 print ' ', t 1843 print ' ', v 1844 1845 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1846 try: 1847 conn = get_connection(dsn=dsn) 1848 except cAuthenticationError: 1849 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1850 t, v = sys.exc_info()[:2] 1851 print ' ', t 1852 print ' ', v 1853 1854 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1855 conn = get_connection(dsn=dsn, readonly=True) 1856 1857 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1858 conn = get_connection(dsn=dsn, readonly=False) 1859 1860 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1861 encoding = 'foo' 1862 try: 1863 conn = get_connection(dsn=dsn, encoding=encoding) 1864 except cEncodingError: 1865 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1866 t, v = sys.exc_info()[:2] 1867 print ' ', t 1868 print ' ', v
1869 #--------------------------------------------------------------------
1870 - def test_exceptions():
1871 print "testing exceptions" 1872 1873 try: 1874 raise cAuthenticationError('no dsn', 'no previous exception') 1875 except cAuthenticationError: 1876 t, v, tb = sys.exc_info() 1877 print t 1878 print v 1879 print tb 1880 1881 try: 1882 raise cEncodingError('no dsn', 'no previous exception') 1883 except cEncodingError: 1884 t, v, tb = sys.exc_info() 1885 print t 1886 print v 1887 print tb
1888 #--------------------------------------------------------------------
1889 - def test_ro_queries():
1890 print "testing run_ro_queries()" 1891 1892 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1893 conn = get_connection(dsn, readonly=True) 1894 1895 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1896 print data 1897 print idx 1898 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True) 1899 print data 1900 print idx 1901 1902 curs = conn.cursor() 1903 1904 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True) 1905 print data 1906 print idx 1907 1908 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True) 1909 print data 1910 print idx 1911 1912 try: 1913 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1914 print data 1915 print idx 1916 except psycopg2.ProgrammingError: 1917 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1918 t, v = sys.exc_info()[:2] 1919 print ' ', t 1920 print ' ', v 1921 1922 curs.close()
1923 #--------------------------------------------------------------------
1924 - def test_request_dsn():
1925 conn = get_connection() 1926 print conn 1927 conn.close()
1928 #--------------------------------------------------------------------
1929 - def test_set_encoding():
1930 print "testing set_default_client_encoding()" 1931 1932 enc = 'foo' 1933 try: 1934 set_default_client_encoding(enc) 1935 print "SUCCESS: encoding [%s] worked" % enc 1936 except ValueError: 1937 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1938 t, v = sys.exc_info()[:2] 1939 print ' ', t 1940 print ' ', v 1941 1942 enc = '' 1943 try: 1944 set_default_client_encoding(enc) 1945 print "SUCCESS: encoding [%s] worked" % enc 1946 except ValueError: 1947 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1948 t, v = sys.exc_info()[:2] 1949 print ' ', t 1950 print ' ', v 1951 1952 enc = 'latin1' 1953 try: 1954 set_default_client_encoding(enc) 1955 print "SUCCESS: encoding [%s] worked" % enc 1956 except ValueError: 1957 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1958 t, v = sys.exc_info()[:2] 1959 print ' ', t 1960 print ' ', v 1961 1962 enc = 'utf8' 1963 try: 1964 set_default_client_encoding(enc) 1965 print "SUCCESS: encoding [%s] worked" % enc 1966 except ValueError: 1967 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1968 t, v = sys.exc_info()[:2] 1969 print ' ', t 1970 print ' ', v 1971 1972 enc = 'unicode' 1973 try: 1974 set_default_client_encoding(enc) 1975 print "SUCCESS: encoding [%s] worked" % enc 1976 except ValueError: 1977 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1978 t, v = sys.exc_info()[:2] 1979 print ' ', t 1980 print ' ', v 1981 1982 enc = 'UNICODE' 1983 try: 1984 set_default_client_encoding(enc) 1985 print "SUCCESS: encoding [%s] worked" % enc 1986 except ValueError: 1987 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1988 t, v = sys.exc_info()[:2] 1989 print ' ', t 1990 print ' ', v
1991 #--------------------------------------------------------------------
1992 - def test_connection_pool():
1993 dsn = get_default_dsn() 1994 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 1995 print pool 1996 print pool.getconn() 1997 print pool.getconn() 1998 print pool.getconn() 1999 print type(pool.getconn())
2000 #--------------------------------------------------------------------
2001 - def test_list_args():
2002 dsn = get_default_dsn() 2003 conn = get_connection(dsn, readonly=True) 2004 curs = conn.cursor() 2005 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
2006 #--------------------------------------------------------------------
2007 - def test_sanitize_pg_regex():
2008 tests = [ 2009 ['(', '\\('] 2010 , ['[', '\\['] 2011 , [')', '\\)'] 2012 ] 2013 for test in tests: 2014 result = sanitize_pg_regex(test[0]) 2015 if result != test[1]: 2016 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2017 #--------------------------------------------------------------------
2018 - def test_is_pg_interval():
2019 status = True 2020 tests = [ 2021 [None, True], # None == NULL == succeeds ! 2022 [1, True], 2023 ['1', True], 2024 ['abc', False] 2025 ] 2026 2027 if not is_pg_interval(): 2028 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2029 status = False 2030 2031 for test in tests: 2032 result = is_pg_interval(test[0]) 2033 if result != test[1]: 2034 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2035 status = False 2036 2037 return status
2038 #--------------------------------------------------------------------
2039 - def test_sanity_check_time_skew():
2040 sanity_check_time_skew()
2041 #--------------------------------------------------------------------
2042 - def test_keyword_expansion():
2043 print "keywords, from database:" 2044 print get_text_expansion_keywords() 2045 print "keywords, cached:" 2046 print get_text_expansion_keywords() 2047 print "'$keyword' expands to:" 2048 print expand_keyword(keyword = u'$dvt')
2049 #--------------------------------------------------------------------
2050 - def test_get_foreign_key_details():
2051 for row in get_foreign_keys2column ( 2052 schema = u'dem', 2053 table = u'identity', 2054 column = u'pk' 2055 ): 2056 print '%s.%s references %s.%s.%s' % ( 2057 row['referencing_table'], 2058 row['referencing_column'], 2059 row['referenced_schema'], 2060 row['referenced_table'], 2061 row['referenced_column'] 2062 )
2063 #--------------------------------------------------------------------
2064 - def test_set_user_language():
2065 # (user, language, result, exception type) 2066 tests = [ 2067 # current user 2068 [None, 'de_DE', True], 2069 [None, 'lang_w/o_tx', False], 2070 [None, None, True], 2071 # valid user 2072 ['any-doc', 'de_DE', True], 2073 ['any-doc', 'lang_w/o_tx', False], 2074 ['any-doc', None, True], 2075 # invalid user 2076 ['invalid user', 'de_DE', None], 2077 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2078 ['invalid user', None, True] 2079 ] 2080 for test in tests: 2081 try: 2082 result = set_user_language(user = test[0], language = test[1]) 2083 if result != test[2]: 2084 print "test:", test 2085 print "result:", result, "expected:", test[2] 2086 except psycopg2.IntegrityError, e: 2087 if test[2] is None: 2088 continue 2089 print "test:", test 2090 print "expected exception" 2091 print "result:", e
2092 #--------------------------------------------------------------------
2093 - def test_get_schema_revision_history():
2094 for line in get_schema_revision_history(): 2095 print u' - '.join(line)
2096 #--------------------------------------------------------------------
2097 - def test_run_query():
2098 gmDateTime.init() 2099 args = {'dt': gmDateTime.pydt_max_here()} 2100 cmd = u"select %(dt)s" 2101 2102 #cmd = u"select 'infinity'::timestamp with time zone" 2103 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2104 print rows
2105 #-------------------------------------------------------------------- 2106 # run tests 2107 #test_file2bytea() 2108 #test_get_connection() 2109 #test_exceptions() 2110 #test_ro_queries() 2111 #test_request_dsn() 2112 #test_set_encoding() 2113 #test_connection_pool() 2114 #test_list_args() 2115 #test_sanitize_pg_regex() 2116 #test_is_pg_interval() 2117 #test_sanity_check_time_skew() 2118 #test_keyword_expansion() 2119 #test_get_foreign_key_details() 2120 #test_set_user_language() 2121 #test_get_schema_revision_history() 2122 test_run_query() 2123 2124 # ====================================================================== 2125