1 """GNUmed PostgreSQL connection handling.
2
3 TODO: iterator/generator batch fetching:
4 - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad
5 - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4"
6
7 winner:
8 def resultset_functional_batchgenerator(cursor, size=100):
9 for results in iter(lambda: cursor.fetchmany(size), []):
10 for rec in results:
11 yield rec
12 """
13
14
15 __version__ = "$Revision: 1.127 $"
16 __author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
17 __license__ = 'GPL (details at http://www.gnu.org)'
18
19
20
21 import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging, locale
22
23
24 if __name__ == '__main__':
25 sys.path.insert(0, '../../')
26 from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2
27
28 _log = logging.getLogger('gm.db')
29 _log.info(__version__)
30
31
32 try:
33 import psycopg2 as dbapi
34 except ImportError:
35 _log.exception("Python database adapter psycopg2 not found.")
36 print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server."
37 raise
38
39
40
41 _log.info('psycopg2 version: %s' % dbapi.__version__)
42 _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle))
43 if not (float(dbapi.apilevel) >= 2.0):
44 raise ImportError('gmPG2: supported DB-API level too low')
45 if not (dbapi.threadsafety > 0):
46 raise ImportError('gmPG2: lacking minimum thread safety in psycopg2')
47 if not (dbapi.paramstyle == 'pyformat'):
48 raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2')
49 try:
50 dbapi.__version__.index('dt')
51 except ValueError:
52 raise ImportError('gmPG2: lacking datetime support in psycopg2')
53 try:
54 dbapi.__version__.index('ext')
55 except ValueError:
56 raise ImportError('gmPG2: lacking extensions support in psycopg2')
57 try:
58 dbapi.__version__.index('pq3')
59 except ValueError:
60 raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2')
61
62 import psycopg2.extras
63 import psycopg2.extensions
64 import psycopg2.pool
65 import psycopg2.errorcodes as sql_error_codes
66
67
68 _default_client_encoding = 'UTF8'
69 _log.info('assuming default client encoding of [%s]' % _default_client_encoding)
70
71
72 _default_client_timezone = None
73 _sql_set_timezone = None
74 _timestamp_template = "cast('%s' as timestamp with time zone)"
75 FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone
76
77 _default_dsn = None
78 _default_login = None
79
80 postgresql_version_string = None
81 postgresql_version = None
82
83 __ro_conn_pool = None
84
85
86
87
88
89 known_schema_hashes = {
90 'devel': 'not released, testing only',
91 'v2': 'b09d50d7ed3f91ddf4c4ddb8ea507720',
92 'v3': 'e73718eaf230d8f1d2d01afa8462e176',
93 'v4': '4428ccf2e54c289136819e701bb095ea',
94 'v5': '7e7b093af57aea48c288e76632a382e5',
95 'v6': '90e2026ac2efd236da9c8608b8685b2d',
96 'v7': '6c9f6d3981483f8e9433df99d1947b27',
97 'v8': '89b13a7af83337c3aad153b717e52360',
98 'v9': '641a9b2be3c378ffc2bb2f0b1c9f051d',
99 'v10': '7ef42a8fb2bd929a2cdd0c63864b4e8a',
100 'v11': '03042ae24f3f92877d986fb0a6184d76',
101 'v12': '06183a6616db62257e22814007a8ed07'
102 }
103
104 map_schema_hash2version = {
105 'b09d50d7ed3f91ddf4c4ddb8ea507720': 'v2',
106 'e73718eaf230d8f1d2d01afa8462e176': 'v3',
107 '4428ccf2e54c289136819e701bb095ea': 'v4',
108 '7e7b093af57aea48c288e76632a382e5': 'v5',
109 '90e2026ac2efd236da9c8608b8685b2d': 'v6',
110 '6c9f6d3981483f8e9433df99d1947b27': 'v7',
111 '89b13a7af83337c3aad153b717e52360': 'v8',
112 '641a9b2be3c378ffc2bb2f0b1c9f051d': 'v9',
113 '7ef42a8fb2bd929a2cdd0c63864b4e8a': 'v10',
114 '03042ae24f3f92877d986fb0a6184d76': 'v11',
115 '06183a6616db62257e22814007a8ed07': 'v12'
116 }
117
118 map_client_branch2required_db_version = {
119 u'CVS HEAD': u'devel',
120 u'0.3': u'v9',
121 u'0.4': u'v10',
122 u'0.5': u'v11',
123 u'0.6': u'v12'
124 }
125
126
127 query_table_col_defs = u"""select
128 cols.column_name,
129 cols.udt_name
130 from
131 information_schema.columns cols
132 where
133 cols.table_schema = %s
134 and
135 cols.table_name = %s
136 order by
137 cols.ordinal_position"""
138
139 query_table_attributes = u"""select
140 cols.column_name
141 from
142 information_schema.columns cols
143 where
144 cols.table_schema = %s
145 and
146 cols.table_name = %s
147 order by
148 cols.ordinal_position"""
149
150
151
152
154
155 if encoding not in psycopg2.extensions.encodings:
156 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding)
157
158 py_enc = psycopg2.extensions.encodings[encoding]
159 try:
160 codecs.lookup(py_enc)
161 except LookupError:
162 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc))
163 raise
164
165
166
167
168 global _default_client_encoding
169 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding)))
170 _default_client_encoding = encoding
171 return True
172
184
186
187 _log.debug(u'validating time zone [%s]', timezone)
188
189 cmd = u'set timezone to %(tz)s'
190 args = {u'tz': timezone}
191
192 conn.commit()
193 curs = conn.cursor()
194 is_valid = False
195 try:
196 curs.execute(cmd, args)
197 _log.info(u'time zone [%s] is settable', timezone)
198
199 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone"""
200 try:
201 curs.execute(cmd)
202 curs.fetchone()
203 _log.info(u'time zone [%s] is usable', timezone)
204 is_valid = True
205 except:
206 _log.error('error using time zone [%s]', timezone)
207 except dbapi.DataError:
208 _log.warning(u'time zone [%s] is not settable', timezone)
209 except:
210 _log.error(u'failed to set time zone to [%s]', timezone)
211 _log.exception(u'')
212
213 curs.close()
214 conn.rollback()
215
216 return is_valid
217
219 """some timezone defs are abbreviations so try to expand
220 them because "set time zone" doesn't take abbreviations"""
221
222 cmd = u"""
223 select distinct on (abbrev) name
224 from pg_timezone_names
225 where
226 abbrev = %(tz)s and
227 name ~ '^[^/]+/[^/]+$' and
228 name !~ '^Etc/'
229 """
230 args = {u'tz': timezone}
231
232 conn.commit()
233 curs = conn.cursor()
234
235 result = timezone
236 try:
237 curs.execute(cmd, args)
238 rows = curs.fetchall()
239 if len(rows) > 0:
240 result = rows[0][0]
241 _log.debug(u'[%s] maps to [%s]', timezone, result)
242 except:
243 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone)
244
245 curs.close()
246 conn.rollback()
247
248 return result
249
295
296
297
303
305 """Text mode request of database login parameters"""
306 import getpass
307 login = gmLoginInfo.LoginInfo()
308
309 print "\nPlease enter the required login parameters:"
310 try:
311 login.host = __prompted_input("host ['' = non-TCP/IP]: ", '')
312 login.database = __prompted_input("database [gnumed_v13]: ", 'gnumed_v13')
313 login.user = __prompted_input("user name: ", '')
314 tmp = 'password for "%s" (not shown): ' % login.user
315 login.password = getpass.getpass(tmp)
316 login.port = __prompted_input("port [5432]: ", 5432)
317 except KeyboardInterrupt:
318 _log.warning("user cancelled text mode login dialog")
319 print "user cancelled text mode login dialog"
320 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!"))
321
322 return login
323
325 """GUI (wx) input request for database login parameters.
326
327 Returns gmLoginInfo.LoginInfo object
328 """
329 import wx
330
331
332 if wx.GetApp() is None:
333 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!"))
334
335
336
337 import gmAuthWidgets
338 dlg = gmAuthWidgets.cLoginDialog(None, -1)
339 dlg.ShowModal()
340 login = dlg.panel.GetLoginInfo()
341 dlg.Destroy()
342
343
344 if login is None:
345 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!"))
346
347 return login
348
350 """Request login parameters for database connection.
351 """
352
353
354
355 if os.environ.has_key('DISPLAY'):
356
357 try:
358 return __request_login_params_gui_wx()
359 except:
360 pass
361
362
363 return __request_login_params_tui()
364
365
366
367
368 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
369 dsn_parts = []
370
371 if (database is not None) and (database.strip() != ''):
372 dsn_parts.append('dbname=%s' % database)
373
374 if (host is not None) and (host.strip() != ''):
375 dsn_parts.append('host=%s' % host)
376
377 if (port is not None) and (str(port).strip() != ''):
378 dsn_parts.append('port=%s' % port)
379
380 if (user is not None) and (user.strip() != ''):
381 dsn_parts.append('user=%s' % user)
382
383 if (password is not None) and (password.strip() != ''):
384 dsn_parts.append('password=%s' % password)
385
386 dsn_parts.append('sslmode=prefer')
387
388 return ' '.join(dsn_parts)
389
394
404
424
425
426
428 expected_hash = known_schema_hashes[version]
429 if version == 'devel':
430 args = {'ver': '9999'}
431 else:
432 args = {'ver': version.strip('v')}
433 rows, idx = run_ro_queries (
434 link_obj = link_obj,
435 queries = [{
436 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5',
437 'args': args
438 }]
439 )
440 if rows[0]['md5'] != expected_hash:
441 _log.error('database schema version mismatch')
442 _log.error('expected: %s (%s)' % (version, expected_hash))
443 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
444 if verbose:
445 _log.debug('schema dump follows:')
446 for line in get_schema_structure(link_obj=link_obj).split():
447 _log.debug(line)
448 _log.debug('schema revision history dump follows:')
449 for line in get_schema_revision_history(link_obj=link_obj):
450 _log.debug(u' - '.join(line))
451 return False
452 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
453 return True
454
456 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}])
457 try:
458 return map_schema_hash2version[rows[0]['md5']]
459 except KeyError:
460 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
461
463 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}])
464 return rows[0][0]
465
467 cmd = u"""
468 select
469 imported::text,
470 version,
471 filename
472 from gm.schema_revision
473 order by imported
474 """
475 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}])
476 return rows
477
479 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}])
480 return rows[0][0]
481
483 """Get the foreign keys pointing to schema.table.column.
484
485 Does not properly work with multi-column FKs.
486 GNUmed doesn't use any, however.
487 """
488 cmd = u"""
489 select
490 %(schema)s as referenced_schema,
491 %(tbl)s as referenced_table,
492 %(col)s as referenced_column,
493 pgc.confkey as referenced_column_list,
494 pgc.conrelid::regclass as referencing_table,
495 pgc.conkey as referencing_column_list,
496 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column
497 from
498 pg_constraint pgc
499 where
500 pgc.contype = 'f'
501 and
502 pgc.confrelid = (
503 select oid from pg_class where relname = %(tbl)s and relnamespace = (
504 select oid from pg_namespace where nspname = %(schema)s
505 )
506 ) and
507 (
508 select attnum
509 from pg_attribute
510 where
511 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = (
512 select oid from pg_namespace where nspname = %(schema)s
513 ))
514 and
515 attname = %(col)s
516 ) = any(pgc.confkey)
517 """
518
519 args = {
520 'schema': schema,
521 'tbl': table,
522 'col': column
523 }
524
525 rows, idx = run_ro_queries (
526 link_obj = link_obj,
527 queries = [
528 {'cmd': cmd, 'args': args}
529 ]
530 )
531
532 return rows
533
535 """Return child tables of <table>."""
536 cmd = u"""
537 select
538 pgn.nspname as namespace,
539 pgc.relname as table
540 from
541 pg_namespace pgn,
542 pg_class pgc
543 where
544 pgc.relnamespace = pgn.oid
545 and
546 pgc.oid in (
547 select inhrelid from pg_inherits where inhparent = (
548 select oid from pg_class where
549 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and
550 relname = %(table)s
551 )
552 )"""
553 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}])
554 return rows
555
557 """Returns false, true."""
558 cmd = u"""
559 select exists (
560 select 1 from information_schema.tables
561 where
562 table_schema = %s and
563 table_name = %s and
564 table_type = 'BASE TABLE'
565 )"""
566 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}])
567 return rows[0][0]
568
570 if cursor.description is None:
571 _log.error('no result description available: unused cursor or last query did not select rows')
572 return None
573 col_indices = {}
574 col_index = 0
575 for col_desc in cursor.description:
576 col_name = col_desc[0]
577
578
579
580 if col_indices.has_key(col_name):
581 col_name = '%s_%s' % (col_name, col_index)
582 col_indices[col_name] = col_index
583 col_index += 1
584
585 return col_indices
586
587 -def get_col_defs(link_obj=None, schema='public', table=None):
588 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}])
589 col_names = []
590 col_type = {}
591 for row in rows:
592 col_names.append(row[0])
593
594 if row[1].startswith('_'):
595 col_type[row[0]] = row[1][1:] + '[]'
596 else:
597 col_type[row[0]] = row[1]
598 col_defs = []
599 col_defs.append(col_names)
600 col_defs.append(col_type)
601 return col_defs
602
604 """Return column attributes of table"""
605 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}])
606 cols = []
607 for row in rows:
608 cols.append(row[0])
609 return cols
610
612 rows, idx = run_ro_queries (
613 queries = [{'cmd': u'select distinct lang from i18n.translations'}]
614 )
615 return [ r[0] for r in rows ]
616
618 cmd = u'select i18n.get_curr_lang()'
619 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
620 return rows[0][0]
621
623 """Set the user language in the database.
624
625 user = None: current db user
626 language = None: unset
627 """
628 _log.info('setting database language for user [%s] to [%s]', user, language)
629
630 args = {
631 'usr': user,
632 'lang': language
633 }
634
635 if language is None:
636 if user is None:
637 queries = [{'cmd': u'select i18n.unset_curr_lang()'}]
638 else:
639 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}]
640 queries.append({'cmd': u'select True'})
641 else:
642 if user is None:
643 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}]
644 else:
645 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}]
646
647 rows, idx = run_rw_queries(queries = queries, return_data = True)
648
649 if not rows[0][0]:
650 _log.error('cannot set database language to [%s] for user [%s]', language, user)
651
652 return rows[0][0]
653
655 """Set the user language in the database.
656
657 - regardless of whether there is any translation available.
658 - only for the current user
659 """
660 _log.info('forcing database language for current db user to [%s]', language)
661
662 run_rw_queries(queries = [{
663 'cmd': u'select i18n.force_curr_lang(%(lang)s)',
664 'args': {'lang': language}
665 }])
666
667
668 text_expansion_keywords = None
669
671 global text_expansion_keywords
672 if text_expansion_keywords is not None:
673 return text_expansion_keywords
674
675 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions"""
676 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
677 text_expansion_keywords = rows
678
679 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords))
680
681 return text_expansion_keywords
682
684
685
686 if keyword == u'$$steffi':
687 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)'
688
689 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s"""
690 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
691
692 if len(rows) == 0:
693 return None
694
695 return rows[0]['expansion']
696
698
699 if keyword is None:
700 return []
701
702 get_text_expansion_keywords()
703
704 candidates = []
705 for kwd in text_expansion_keywords:
706 if kwd['keyword'].startswith(keyword):
707 candidates.append(kwd['keyword'])
708
709 return candidates
710
711 -def add_text_expansion(keyword=None, expansion=None, public=None):
712
713 if public:
714 cmd = u"select 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s"
715 else:
716 cmd = u"select 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s"
717
718 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
719 if len(rows) != 0:
720 return False
721
722 if public:
723 cmd = u"""
724 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
725 values (%(kwd)s, %(exp)s, null)"""
726 else:
727 cmd = u"""
728 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
729 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))"""
730
731 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}])
732
733 global text_expansion_keywords
734 text_expansion_keywords = None
735
736 return True
737
739 cmd = u"""
740 delete from clin.keyword_expansion where
741 keyword = %(kwd)s and (
742 (fk_staff = (select pk from dem.staff where db_user = current_user))
743 or
744 (fk_staff is null and owner = current_user)
745 )"""
746 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}])
747
748 global text_expansion_keywords
749 text_expansion_keywords = None
750
751 -def edit_text_expansion(keyword, expansion):
752
753 cmd1 = u"""
754 delete from clin.keyword_expansion where
755 keyword = %(kwd)s and
756 fk_staff = (select pk from dem.staff where db_user = current_user)"""
757
758 cmd2 = u"""
759 insert into clin.keyword_expansion (keyword, expansion, fk_staff)
760 values (%(kwd)s, %(exp)s, (select pk from dem.staff where db_user = current_user))"""
761
762 rows, idx = run_rw_queries(queries = [
763 {'cmd': cmd1, 'args': {'kwd': keyword}},
764 {'cmd': cmd2, 'args': {'kwd': keyword, 'exp': expansion}},
765 ])
766
767 global text_expansion_keywords
768 text_expansion_keywords = None
769
770
771
773 cmd = u'notify "db_maintenance_warning:"'
774 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
775
777 cmd = u'notify "db_maintenance_disconnect:"'
778 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
779
781 cmd = u'select %(candidate)s::interval'
782 try:
783 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
784 return True
785 except:
786 cmd = u'select %(candidate)s::text::interval'
787 try:
788 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
789 return True
790 except:
791 return False
792
793 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
794 outfile = file(filename, 'wb')
795 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query)
796 outfile.close()
797 return result
798
799 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
800 """Store data from a bytea field into a file.
801
802 <data_query>
803 - dict {'cmd': ..., 'args': ...}
804 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..."
805 - 'args' must be a dict
806 - must return one row with one field of type bytea
807 <file>
808 - must be a file like Python object
809 <data_size>
810 - integer of the total size of the expected data or None
811 <data_size_query>
812 - dict {'cmd': ..., 'args': ...}
813 - cmd must be unicode
814 - must return one row with one field with the octet_length() of the data field
815 - used only when <data_size> is None
816 """
817 if data_size == 0:
818 return True
819
820
821
822
823
824
825
826
827
828
829
830
831 conn = get_raw_connection(readonly=True)
832
833 if data_size is None:
834 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
835 data_size = rows[0][0]
836 if data_size in [None, 0]:
837 conn.rollback()
838 return True
839
840 _log.debug('expecting bytea data of size: [%s] bytes' % data_size)
841 _log.debug('using chunk size of: [%s] bytes' % chunk_size)
842
843
844 if chunk_size == 0:
845 chunk_size = data_size
846 _log.debug('chunk size [0] bytes: retrieving all data at once')
847
848
849
850
851 needed_chunks, remainder = divmod(data_size, chunk_size)
852 _log.debug('chunks to retrieve: [%s]' % needed_chunks)
853 _log.debug('remainder to retrieve: [%s] bytes' % remainder)
854
855
856
857
858
859
860
861 for chunk_id in range(needed_chunks):
862 chunk_start = (chunk_id * chunk_size) + 1
863 data_query['args']['start'] = chunk_start
864 data_query['args']['size'] = chunk_size
865 try:
866 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
867 except:
868 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size))
869 conn.rollback()
870 raise
871
872 file_obj.write(str(rows[0][0]))
873
874
875 if remainder > 0:
876 chunk_start = (needed_chunks * chunk_size) + 1
877 data_query['args']['start'] = chunk_start
878 data_query['args']['size'] = remainder
879 try:
880 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
881 except:
882 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
883 conn.rollback()
884 raise
885
886 file_obj.write(str(rows[0][0]))
887
888 conn.rollback()
889 return True
890
891 -def file2bytea(query=None, filename=None, args=None, conn=None):
892 """Store data from a file into a bytea field.
893
894 The query must:
895 - be in unicode
896 - contain a format spec identifying the row (eg a primary key)
897 matching <args> if it is an UPDATE
898 - contain a format spec %(data)s::bytea
899 """
900
901 infile = file(filename, "rb")
902 data_as_byte_string = infile.read()
903 infile.close()
904 if args is None:
905 args = {}
906 args['data'] = buffer(data_as_byte_string)
907 del(data_as_byte_string)
908
909
910 if conn is None:
911 conn = get_raw_connection(readonly=False)
912 close_conn = True
913 else:
914 close_conn = False
915
916 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True)
917
918 if close_conn:
919 conn.close()
920
921 return
922
924 """Escape input for use in a PostgreSQL regular expression.
925
926 If a fragment comes from user input and is to be used
927 as a regular expression we need to make sure it doesn't
928 contain invalid regex patterns such as unbalanced ('s.
929
930 <escape_all>
931 True: try to escape *all* metacharacters
932 False: only escape those which render the regex invalid
933 """
934 return expression.replace (
935 '(', '\('
936 ).replace (
937 ')', '\)'
938 ).replace (
939 '[', '\['
940 ).replace (
941 '+', '\+'
942 ).replace (
943 '.', '\.'
944 ).replace (
945 '*', '\*'
946 )
947
948
949 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
950 """Run read-only queries.
951
952 <queries> must be a list of dicts:
953 [
954 {'cmd': <string>, 'args': <dict> or <tuple>},
955 {...},
956 ...
957 ]
958 """
959 if isinstance(link_obj, dbapi._psycopg.cursor):
960 curs = link_obj
961 curs_close = __noop
962 tx_rollback = __noop
963 elif isinstance(link_obj, dbapi._psycopg.connection):
964 curs = link_obj.cursor()
965 curs_close = curs.close
966 tx_rollback = link_obj.rollback
967 elif link_obj is None:
968 conn = get_connection(readonly=True, verbose=verbose)
969 curs = conn.cursor()
970 curs_close = curs.close
971 tx_rollback = conn.rollback
972 else:
973 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
974
975 if verbose:
976 _log.debug('cursor: %s', curs)
977
978 for query in queries:
979 if type(query['cmd']) is not types.UnicodeType:
980 print "run_ro_queries(): non-unicode query"
981 print query['cmd']
982 try:
983 args = query['args']
984 except KeyError:
985 args = None
986 try:
987 curs.execute(query['cmd'], args)
988 if verbose:
989 _log.debug('ran query: [%s]', curs.query)
990 _log.debug('PG status message: %s', curs.statusmessage)
991 _log.debug('cursor description: %s', str(curs.description))
992 except:
993
994 try:
995 curs_close()
996 except dbapi.InterfaceError:
997 _log.exception('cannot close cursor')
998 tx_rollback()
999 _log.error('query failed: [%s]', curs.query)
1000 _log.error('PG status message: %s', curs.statusmessage)
1001 raise
1002
1003 data = None
1004 col_idx = None
1005 if return_data:
1006 data = curs.fetchall()
1007 if verbose:
1008 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data))
1009 _log.debug('cursor description: %s', str(curs.description))
1010 if get_col_idx:
1011 col_idx = get_col_indices(curs)
1012
1013 curs_close()
1014 tx_rollback()
1015 return (data, col_idx)
1016
1017 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1018 """Convenience function for running a transaction
1019 that is supposed to get committed.
1020
1021 <link_obj>
1022 can be either:
1023 - a cursor
1024 - a connection
1025
1026 <queries>
1027 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>)
1028 to be executed as a single transaction, the last
1029 query may usefully return rows (such as a
1030 "select currval('some_sequence')" statement)
1031
1032 <end_tx>
1033 - controls whether the transaction is finalized (eg.
1034 committed/rolled back) or not, this allows the
1035 call to run_rw_queries() to be part of a framing
1036 transaction
1037 - if link_obj is a connection then <end_tx> will
1038 default to False unless it is explicitly set to
1039 True which is taken to mean "yes, you do have full
1040 control over the transaction" in which case the
1041 transaction is properly finalized
1042 - if link_obj is a cursor we CANNOT finalize the
1043 transaction because we would need the connection for that
1044 - if link_obj is None <end_tx> will, of course, always be True
1045
1046 <return_data>
1047 - if true, the returned data will include the rows
1048 the last query selected
1049 - if false, it returns None instead
1050
1051 <get_col_idx>
1052 - if true, the returned data will include a dictionary
1053 mapping field names to column positions
1054 - if false, the returned data returns None instead
1055
1056 method result:
1057 - returns a tuple (data, idx)
1058 - <data>:
1059 * (None, None) if last query did not return rows
1060 * ("fetchall() result", <index>) if last query returned any rows
1061 * for <index> see <get_col_idx>
1062 """
1063 if isinstance(link_obj, dbapi._psycopg.cursor):
1064 conn_close = __noop
1065 conn_commit = __noop
1066 conn_rollback = __noop
1067 curs = link_obj
1068 curs_close = __noop
1069 elif isinstance(link_obj, dbapi._psycopg.connection):
1070 conn_close = __noop
1071 if end_tx:
1072 conn_commit = link_obj.commit
1073 conn_rollback = link_obj.rollback
1074 else:
1075 conn_commit = __noop
1076 conn_rollback = __noop
1077 curs = link_obj.cursor()
1078 curs_close = curs.close
1079 elif link_obj is None:
1080 conn = get_connection(readonly=False)
1081 conn_close = conn.close
1082 conn_commit = conn.commit
1083 conn_rollback = conn.rollback
1084 curs = conn.cursor()
1085 curs_close = curs.close
1086 else:
1087 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj)
1088
1089 for query in queries:
1090 if type(query['cmd']) is not types.UnicodeType:
1091 print "run_rw_queries(): non-unicode query"
1092 print query['cmd']
1093 try:
1094 args = query['args']
1095 except KeyError:
1096 args = None
1097 try:
1098 curs.execute(query['cmd'], args)
1099 except:
1100 _log.exception('error running RW query')
1101 gmLog2.log_stack_trace()
1102 try:
1103 curs_close()
1104 conn_rollback()
1105 conn_close()
1106 except dbapi.InterfaceError:
1107 _log.exception('cannot cleanup')
1108 raise
1109 raise
1110
1111 data = None
1112 col_idx = None
1113 if return_data:
1114 try:
1115 data = curs.fetchall()
1116 except:
1117 _log.exception('error fetching data from RW query')
1118 gmLog2.log_stack_trace()
1119 try:
1120 curs_close()
1121 conn_rollback()
1122 conn_close()
1123 except dbapi.InterfaceError:
1124 _log.exception('cannot cleanup')
1125 raise
1126 raise
1127 if get_col_idx:
1128 col_idx = get_col_indices(curs)
1129
1130 curs_close()
1131 conn_commit()
1132 conn_close()
1133
1134 return (data, col_idx)
1135
1136
1137
1139 """
1140 GNUmed database connection pool.
1141
1142 Extends psycopg2's PersistentConnectionPool with
1143 a custom _connect() function. Supports one connection
1144 per thread - which also ties it to one particular DSN.
1145 """
1146
1148
1149 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True)
1150
1151 conn.original_close = conn.close
1152 conn.close = _raise_exception_on_ro_conn_close
1153
1154 if key is not None:
1155 self._used[key] = conn
1156 self._rused[id(conn)] = key
1157 else:
1158 self._pool.append(conn)
1159
1160 return conn
1161
1163 for conn_key in self._used.keys():
1164 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid())
1165 self._used[conn_key].original_close()
1166
1168 """Get a raw, unadorned connection.
1169
1170 - this will not set any parameters such as encoding, timezone, datestyle
1171 - the only requirement is a valid DSN
1172 - hence it can be used for "service" connections
1173 for verifying encodings etc
1174 """
1175
1176 if dsn is None:
1177 dsn = get_default_dsn()
1178
1179 try:
1180 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection)
1181 except dbapi.OperationalError, e:
1182
1183 t, v, tb = sys.exc_info()
1184 try:
1185 msg = e.args[0]
1186 except (AttributeError, IndexError, TypeError):
1187 raise
1188
1189 msg = unicode(msg, gmI18N.get_encoding(), 'replace')
1190
1191 if msg.find('fe_sendauth') != -1:
1192 raise cAuthenticationError, (dsn, msg), tb
1193
1194 if regex.search('user ".*" does not exist', msg) is not None:
1195 raise cAuthenticationError, (dsn, msg), tb
1196
1197 if msg.find('uthenti') != -1:
1198 raise cAuthenticationError, (dsn, msg), tb
1199
1200 raise
1201
1202 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly)
1203
1204
1205 global postgresql_version
1206 if postgresql_version is None:
1207 curs = conn.cursor()
1208 curs.execute ("""
1209 select
1210 (split_part(setting, '.', 1) || '.' || split_part(setting, '.', 2))::numeric as version
1211 from pg_settings
1212 where name='server_version'"""
1213 )
1214 postgresql_version = curs.fetchone()['version']
1215 _log.info('PostgreSQL version (numeric): %s' % postgresql_version)
1216 try:
1217 curs.execute("select pg_size_pretty(pg_database_size(current_database()))")
1218 _log.info('database size: %s', curs.fetchone()[0])
1219 except:
1220 pass
1221 if verbose:
1222 __log_PG_settings(curs=curs)
1223 curs.close()
1224 conn.commit()
1225
1226 if _default_client_timezone is None:
1227 __detect_client_timezone(conn = conn)
1228
1229 curs = conn.cursor()
1230
1231
1232 if readonly:
1233 _log.debug('access mode [READ ONLY]')
1234 cmd = 'set session characteristics as transaction READ ONLY'
1235 curs.execute(cmd)
1236 cmd = 'set default_transaction_read_only to on'
1237 curs.execute(cmd)
1238 else:
1239 _log.debug('access mode [READ WRITE]')
1240 cmd = 'set session characteristics as transaction READ WRITE'
1241 curs.execute(cmd)
1242 cmd = 'set default_transaction_read_only to off'
1243 curs.execute(cmd)
1244
1245 curs.close()
1246 conn.commit()
1247
1248 conn.is_decorated = False
1249
1250 return conn
1251
1252 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1253 """Get a new connection.
1254
1255 This assumes the locale system has been initialzied
1256 unless an encoding is specified.
1257 """
1258
1259
1260 if pooled and readonly and (dsn is None):
1261 global __ro_conn_pool
1262 if __ro_conn_pool is None:
1263 __ro_conn_pool = cConnectionPool (
1264 minconn = 1,
1265 maxconn = 2,
1266 dsn = dsn,
1267 verbose = verbose
1268 )
1269 conn = __ro_conn_pool.getconn()
1270 else:
1271 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False)
1272
1273 if conn.is_decorated:
1274 return conn
1275
1276 if encoding is None:
1277 encoding = _default_client_encoding
1278 if encoding is None:
1279 encoding = gmI18N.get_encoding()
1280 _log.warning('client encoding not specified')
1281 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding)
1282 _log.warning('for this to work properly the application MUST have called locale.setlocale() before')
1283
1284
1285
1286 _log.debug('client string encoding [%s]' % encoding)
1287 try:
1288 conn.set_client_encoding(encoding)
1289 except dbapi.OperationalError:
1290 t, v, tb = sys.exc_info()
1291 if str(v).find("can't set encoding to") != -1:
1292 raise cEncodingError, (encoding, v), tb
1293 raise
1294
1295
1296 if readonly:
1297 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
1298 _log.debug('isolation level [read committed]')
1299 else:
1300 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
1301 _log.debug('isolation level [serializable]')
1302
1303 curs = conn.cursor()
1304
1305
1306 _log.debug('time zone [%s]' % _default_client_timezone)
1307 curs.execute(_sql_set_timezone, [_default_client_timezone])
1308
1309
1310
1311
1312 _log.debug('datestyle [ISO]')
1313 cmd = "set datestyle to 'ISO'"
1314 curs.execute(cmd)
1315
1316
1317 _log.debug('sql_inheritance [on]')
1318 cmd = 'set sql_inheritance to on'
1319 curs.execute(cmd)
1320
1321
1322 global postgresql_version_string
1323 if postgresql_version_string is None:
1324 curs.execute('select version()')
1325 postgresql_version_string = curs.fetchone()['version']
1326 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string)
1327
1328 curs.close()
1329 conn.commit()
1330
1331 conn.is_decorated = True
1332
1333 return conn
1334
1339
1340
1341
1344
1346 raise TypeError(u'close() called on read-only connection')
1347
1349 """Check server time and local time to be within
1350 the given tolerance of each other.
1351
1352 tolerance: seconds
1353 """
1354 _log.debug('maximum skew tolerance (seconds): %s', tolerance)
1355
1356 cmd = u"select now() at time zone 'UTC'"
1357 conn = get_raw_connection(readonly=True)
1358 curs = conn.cursor()
1359
1360 start = time.time()
1361 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}])
1362 end = time.time()
1363 client_now_as_utc = pydt.datetime.utcnow()
1364
1365 curs.close()
1366 conn.commit()
1367
1368 server_now_as_utc = rows[0][0]
1369 query_duration = end - start
1370 _log.info('server "now" (UTC): %s', server_now_as_utc)
1371 _log.info('client "now" (UTC): %s', client_now_as_utc)
1372 _log.debug('wire roundtrip (seconds): %s', query_duration)
1373
1374 if query_duration > tolerance:
1375 _log.error('useless to check client/server time skew, wire roundtrip > tolerance')
1376 return False
1377
1378 if server_now_as_utc > client_now_as_utc:
1379 real_skew = server_now_as_utc - client_now_as_utc
1380 else:
1381 real_skew = client_now_as_utc - server_now_as_utc
1382
1383 _log.debug('client/server time skew: %s', real_skew)
1384
1385 if real_skew > pydt.timedelta(seconds = tolerance):
1386 _log.error('client/server time skew > tolerance')
1387 return False
1388
1389 return True
1390
1392 """Checks database settings.
1393
1394 returns (status, message)
1395 status:
1396 0: no problem
1397 1: non-fatal problem
1398 2: fatal problem
1399 """
1400 _log.debug('checking database settings')
1401 settings = {
1402
1403 u'allow_system_table_mods': [u'off', u'system breakage', False],
1404 u'check_function_bodies': [u'on', u'suboptimal error detection', False],
1405 u'default_transaction_read_only': [u'on', u'accidental database writes', False],
1406 u'fsync': [u'on', u'data loss/corruption', True],
1407 u'full_page_writes': [u'on', u'data loss/corruption', False],
1408 u'lc_messages': [u'C', u'suboptimal error detection', False],
1409 u'password_encryption': [u'on', u'breach of confidentiality', False],
1410 u'regex_flavor': [u'advanced', u'query breakage', False],
1411 u'synchronous_commit': [u'on', u'data loss/corruption', False],
1412 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True]
1413 }
1414
1415 from Gnumed.pycommon import gmCfg2
1416 _cfg = gmCfg2.gmCfgData()
1417 if _cfg.get(option = u'hipaa'):
1418 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True]
1419 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True]
1420 else:
1421 settings[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None]
1422 settings[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None]
1423
1424 cmd = u"select name, setting from pg_settings where name in %(settings)s"
1425 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'settings': tuple(settings.keys())}}])
1426
1427 found_error = False
1428 found_problem = False
1429 msg = []
1430 for row in rows:
1431 if row[1] != settings[row[0]][0]:
1432 if settings[row[0]][2] is True:
1433 found_error = True
1434 elif settings[row[0]][2] is False:
1435 found_problem = True
1436 elif settings[row[0]][2] is None:
1437 pass
1438 else:
1439 _log.error(settings[row[0]])
1440 raise ValueError(u'invalid database configuration sanity check')
1441 msg.append(_(' option [%s]: %s') % (row[0], row[1]))
1442 msg.append(_(' risk: %s') % settings[row[0]][1])
1443 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (row[0], row[1], settings[row[0]][0], settings[row[0]][1]))
1444
1445 if found_error:
1446 return 2, u'\n'.join(msg)
1447
1448 if found_problem:
1449 return 1, u'\n'.join(msg)
1450
1451 return 0, u''
1452
1454
1455
1456
1457 try:
1458 curs.execute(u'show all')
1459 except:
1460 _log.exception(u'cannot log PG settings (>>>show all<<< failed)')
1461 return False
1462 settings = curs.fetchall()
1463 if settings is None:
1464 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)')
1465 return False
1466 for setting in settings:
1467 _log.debug(u'PG option [%s]: %s', setting[0], setting[1])
1468 return True
1469
1471
1472 try:
1473 msg = exc.args[0]
1474 except (AttributeError, IndexError, TypeError):
1475 return u'cannot extract message from exception'
1476
1477 return unicode(msg, gmI18N.get_encoding(), 'replace')
1478
1480
1481 - def __init__(self, dsn=None, prev_val=None):
1482 self.dsn = dsn
1483 self.prev_val = prev_val
1484
1486 _log.warning('%s.__str__() called', self.__class__.__name__)
1487 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1488 _log.error(tmp)
1489 return tmp.encode(gmI18N.get_encoding(), 'replace')
1490
1492 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1493
1494
1495
1496
1498
1499 - def __init__(self, encoding=None, prev_val=None):
1500 self.encoding = encoding
1501 self.prev_val = prev_val
1502
1504 _log.warning('%s.__str__() called', self.__class__.__name__)
1505 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1506
1508 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1509
1510
1511
1512
1514
1516 if dt.tzinfo is None:
1517 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat())
1518 self.__dt = dt
1519
1522
1523
1525
1527 if dt.tz == '???':
1528 _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt)
1529 self.__dt = dt
1530
1532
1533
1534
1535
1536
1537 return mxDT.ISO.str(self.__dt).replace(',', '.')
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1552
1553 try:
1554 return dbapi.DATETIME(string_value, cursor)
1555 except (dbapi.DataError,), exc:
1556 _log.error('unable to parse [%s]' % string_value)
1557
1558 if string_value is None:
1559 raise
1560
1561 if exc.message != "unable to parse time":
1562 raise
1563
1564 _log.debug('unable to parse as <timestamp with time zone>')
1565
1566 if regex.match('(\+|-)\d\d:\d\d:\d\d', string_value[-9:]) is None:
1567 raise
1568
1569 if regex.match('-\d\d:\d\d:\d\d', string_value[-9:]) is not None:
1570 if string_value[-5:-3] != '00':
1571 _log.debug('psycopg2 versions < 2.0.8 may misinterpret this time zone: [%s]', string_value[-9:])
1572
1573
1574
1575 _log.debug('time zone with seconds detected (true local time ?): %s', string_value[-9:])
1576 truncated_string_value = string_value[:-3]
1577 _log.warning('truncating to [%s] and trying again', truncated_string_value)
1578 _log.warning('value will be off by %s seconds', string_value[-2:])
1579 return dbapi.DATETIME(truncated_string_value, cursor)
1580
1581
1582 TIMESTAMPTZ_OID = 1184
1583 if TIMESTAMPTZ_OID not in dbapi.DATETIME.values:
1584 raise ImportError('TIMESTAMPTZ_OID <%s> not in psycopg2.DATETIME.values [%s]' % (TIMESTAMPTZ_OID, dbapi.DATETIME.values))
1585
1586 DT_W_ODD_TZ = psycopg2.extensions.new_type((TIMESTAMPTZ_OID,), 'DT_W_ODD_TZ', convert_ts_with_odd_tz)
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
1605 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY)
1606
1607
1608 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime)
1609 try:
1610 import mx.DateTime as mxDT
1611 psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime)
1612 except ImportError:
1613 _log.warning('cannot import mx.DateTime')
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625 if __name__ == "__main__":
1626
1627 logging.basicConfig(level=logging.DEBUG)
1628
1630 run_rw_queries(queries = [
1631 {'cmd': u'create table test_bytea (data bytea)'}
1632 ])
1633
1634 cmd = u'insert into test_bytea values (%(data)s::bytea)'
1635 try:
1636 file2bytea(query = cmd, filename = sys.argv[2])
1637 except:
1638 _log.exception('error')
1639
1640 run_rw_queries(queries = [
1641 {'cmd': u'drop table test_bytea'}
1642 ])
1643
1645 print "testing get_connection()"
1646
1647 dsn = 'foo'
1648 try:
1649 conn = get_connection(dsn=dsn)
1650 except dbapi.OperationalError, e:
1651 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1652 t, v = sys.exc_info()[:2]
1653 print ' ', t
1654 print ' ', v
1655
1656 dsn = 'dbname=gnumed_v9'
1657 try:
1658 conn = get_connection(dsn=dsn)
1659 except cAuthenticationError:
1660 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1661 t, v = sys.exc_info()[:2]
1662 print ' ', t
1663 print ' ', v
1664
1665 dsn = 'dbname=gnumed_v9 user=abc'
1666 try:
1667 conn = get_connection(dsn=dsn)
1668 except cAuthenticationError:
1669 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1670 t, v = sys.exc_info()[:2]
1671 print ' ', t
1672 print ' ', v
1673
1674 dsn = 'dbname=gnumed_v9 user=any-doc'
1675 try:
1676 conn = get_connection(dsn=dsn)
1677 except cAuthenticationError:
1678 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1679 t, v = sys.exc_info()[:2]
1680 print ' ', t
1681 print ' ', v
1682
1683 dsn = 'dbname=gnumed_v9 user=any-doc password=abc'
1684 try:
1685 conn = get_connection(dsn=dsn)
1686 except cAuthenticationError:
1687 print "SUCCESS: get_connection(%s) failed as expected" % dsn
1688 t, v = sys.exc_info()[:2]
1689 print ' ', t
1690 print ' ', v
1691
1692 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1693 conn = get_connection(dsn=dsn, readonly=True)
1694
1695 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1696 conn = get_connection(dsn=dsn, readonly=False)
1697
1698 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1699 encoding = 'foo'
1700 try:
1701 conn = get_connection(dsn=dsn, encoding=encoding)
1702 except cEncodingError:
1703 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding)
1704 t, v = sys.exc_info()[:2]
1705 print ' ', t
1706 print ' ', v
1707
1709 print "testing exceptions"
1710
1711 try:
1712 raise cAuthenticationError('no dsn', 'no previous exception')
1713 except cAuthenticationError:
1714 t, v, tb = sys.exc_info()
1715 print t
1716 print v
1717 print tb
1718
1719 try:
1720 raise cEncodingError('no dsn', 'no previous exception')
1721 except cEncodingError:
1722 t, v, tb = sys.exc_info()
1723 print t
1724 print v
1725 print tb
1726
1728 print "testing run_ro_queries()"
1729
1730 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
1731 conn = get_connection(dsn, readonly=True)
1732
1733 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True)
1734 print data
1735 print idx
1736 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True)
1737 print data
1738 print idx
1739
1740 curs = conn.cursor()
1741
1742 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select version()'}], return_data=True, get_col_idx=True, verbose=True)
1743 print data
1744 print idx
1745
1746 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'select 1'}], return_data=True, get_col_idx=True, verbose=True)
1747 print data
1748 print idx
1749
1750 try:
1751 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True)
1752 print data
1753 print idx
1754 except psycopg2.ProgrammingError:
1755 print 'SUCCESS: run_ro_queries("selec 1") failed as expected'
1756 t, v = sys.exc_info()[:2]
1757 print ' ', t
1758 print ' ', v
1759
1760 curs.close()
1761
1766
1768 print "testing set_default_client_encoding()"
1769
1770 enc = 'foo'
1771 try:
1772 set_default_client_encoding(enc)
1773 print "SUCCESS: encoding [%s] worked" % enc
1774 except ValueError:
1775 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1776 t, v = sys.exc_info()[:2]
1777 print ' ', t
1778 print ' ', v
1779
1780 enc = ''
1781 try:
1782 set_default_client_encoding(enc)
1783 print "SUCCESS: encoding [%s] worked" % enc
1784 except ValueError:
1785 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1786 t, v = sys.exc_info()[:2]
1787 print ' ', t
1788 print ' ', v
1789
1790 enc = 'latin1'
1791 try:
1792 set_default_client_encoding(enc)
1793 print "SUCCESS: encoding [%s] worked" % enc
1794 except ValueError:
1795 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1796 t, v = sys.exc_info()[:2]
1797 print ' ', t
1798 print ' ', v
1799
1800 enc = 'utf8'
1801 try:
1802 set_default_client_encoding(enc)
1803 print "SUCCESS: encoding [%s] worked" % enc
1804 except ValueError:
1805 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1806 t, v = sys.exc_info()[:2]
1807 print ' ', t
1808 print ' ', v
1809
1810 enc = 'unicode'
1811 try:
1812 set_default_client_encoding(enc)
1813 print "SUCCESS: encoding [%s] worked" % enc
1814 except ValueError:
1815 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1816 t, v = sys.exc_info()[:2]
1817 print ' ', t
1818 print ' ', v
1819
1820 enc = 'UNICODE'
1821 try:
1822 set_default_client_encoding(enc)
1823 print "SUCCESS: encoding [%s] worked" % enc
1824 except ValueError:
1825 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc
1826 t, v = sys.exc_info()[:2]
1827 print ' ', t
1828 print ' ', v
1829
1838
1840 dsn = get_default_dsn()
1841 conn = get_connection(dsn, readonly=True)
1842 curs = conn.cursor()
1843 curs.execute('select * from clin.clin_narrative where narrative = %s', ['a'])
1844
1846 tests = [
1847 ['(', '\\(']
1848 , ['[', '\\[']
1849 , [')', '\\)']
1850 ]
1851 for test in tests:
1852 result = sanitize_pg_regex(test[0])
1853 if result != test[1]:
1854 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1855
1857 status = True
1858 tests = [
1859 [None, True],
1860 [1, True],
1861 ['1', True],
1862 ['abc', False]
1863 ]
1864
1865 if not is_pg_interval():
1866 print 'ERROR: is_pg_interval() returned "False", expected "True"'
1867 status = False
1868
1869 for test in tests:
1870 result = is_pg_interval(test[0])
1871 if result != test[1]:
1872 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
1873 status = False
1874
1875 return status
1876
1879
1887
1889 for row in get_foreign_keys2column (
1890 schema = u'dem',
1891 table = u'identity',
1892 column = u'pk'
1893 ):
1894 print '%s.%s references %s.%s.%s' % (
1895 row['referencing_table'],
1896 row['referencing_column'],
1897 row['referenced_schema'],
1898 row['referenced_table'],
1899 row['referenced_column']
1900 )
1901
1903
1904 tests = [
1905
1906 [None, 'de_DE', True],
1907 [None, 'lang_w/o_tx', False],
1908 [None, None, True],
1909
1910 ['any-doc', 'de_DE', True],
1911 ['any-doc', 'lang_w/o_tx', False],
1912 ['any-doc', None, True],
1913
1914 ['invalid user', 'de_DE', None],
1915 ['invalid user', 'lang_w/o_tx', False],
1916 ['invalid user', None, True]
1917 ]
1918 for test in tests:
1919 try:
1920 result = set_user_language(user = test[0], language = test[1])
1921 if result != test[2]:
1922 print "test:", test
1923 print "result:", result, "expected:", test[2]
1924 except psycopg2.IntegrityError, e:
1925 if test[2] is None:
1926 continue
1927 print "test:", test
1928 print "expected exception"
1929 print "result:", e
1930
1932 for line in get_schema_revision_history():
1933 print u' - '.join(line)
1934
1935 if len(sys.argv) > 1 and sys.argv[1] == 'test':
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951 test_get_schema_revision_history()
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405