-
Notifications
You must be signed in to change notification settings - Fork 126
/
Copy pathconfig_manager.py
1312 lines (1152 loc) · 51.4 KB
/
config_manager.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import logging
import string
import random
from typing import TYPE_CHECKING, Dict, List, Optional, Union, Tuple
import ibis.expr.datatypes as dt
import yaml
from data_validation import clients, consts, gcs_helper, state_manager
from data_validation.result_handlers.factory import build_result_handler
from data_validation.validation_builder import ValidationBuilder
if TYPE_CHECKING:
import ibis.expr.types.Table
class ConfigManager(object):
_config: dict = None
_source_conn = None
_target_conn = None
_state_manager = None
source_client = None
target_client = None
def __init__(self, config, source_client=None, target_client=None, verbose=False):
"""Initialize a ConfigManager client which supplies the
source and target queries to run.
Args:
config (Dict): The Validation config supplied
source_client (IbisClient): The Ibis client for the source DB
target_client (IbisClient): The Ibis client for the target DB
verbose (Bool): If verbose, the Data Validation client will print queries run
google_credentials (google.auth.credentials.Credentials):
Explicit credentials to use in case default credentials
aren't working properly.
"""
self._state_manager = state_manager.StateManager()
self._config = config
self.source_client = source_client or clients.get_data_client(
self.get_source_connection()
)
self.target_client = target_client or clients.get_data_client(
self.get_target_connection()
)
self.verbose = verbose
if self.validation_type not in consts.CONFIG_TYPES:
raise ValueError(f"Unknown Configuration Type: {self.validation_type}")
self._comparison_max_col_length = None
# For some engines we need to know the actual raw data type rather than the Ibis canonical type.
self._source_raw_data_types = None
self._target_raw_data_types = None
@property
def config(self):
"""Return config object."""
return self._config
def get_source_connection(self):
"""Return source connection object."""
if not self._source_conn:
if self._config.get(consts.CONFIG_SOURCE_CONN):
self._source_conn = self._config.get(consts.CONFIG_SOURCE_CONN)
else:
conn_name = self._config.get(consts.CONFIG_SOURCE_CONN_NAME)
self._source_conn = self._state_manager.get_connection_config(conn_name)
return self._source_conn
def get_target_connection(self):
"""Return target connection object."""
if not self._target_conn:
if self._config.get(consts.CONFIG_TARGET_CONN):
self._target_conn = self._config.get(consts.CONFIG_TARGET_CONN)
else:
conn_name = self._config.get(consts.CONFIG_TARGET_CONN_NAME)
self._target_conn = self._state_manager.get_connection_config(conn_name)
return self._target_conn
def get_source_raw_data_types(self) -> Dict[str, Tuple]:
"""Return raw data type information from source system.
The raw data type is the source/target engine type, for example it might
be "NCLOB" or "char" when the Ibis type simply states "string".
The data is cached in state when fetched for the first time.
The retuen value is keyed on the casefolded column name and the tuple is
the remaining 6 elements of the DB API cursor description specification."""
if self._source_raw_data_types is None:
if hasattr(self.source_client, "raw_column_metadata"):
raw_data_types = self.source_client.raw_column_metadata(
database=self.source_schema,
table=self.source_table,
query=self.source_query,
)
self._source_raw_data_types = {
_[0].casefold(): _[1:] for _ in raw_data_types
}
else:
self._source_raw_data_types = {}
return self._source_raw_data_types
def get_target_raw_data_types(self) -> Dict[str, Tuple]:
"""Return raw data type information from target system.
The raw data type is the source/target engine type, for example it might
be "NCLOB" or "char" when the Ibis type simply states "string".
The data is cached in state when fetched for the first time.
The retuen value is keyed on the casefolded column name and the tuple is
the remaining 6 elements of the DB API cursor description specification."""
if self._target_raw_data_types is None:
if hasattr(self.target_client, "raw_column_metadata"):
raw_data_types = self.target_client.raw_column_metadata(
database=self.target_schema,
table=self.target_table,
query=self.target_query,
)
self._target_raw_data_types = {
_[0].casefold(): _[1:] for _ in raw_data_types
}
else:
self._target_raw_data_types = {}
return self._target_raw_data_types
def close_client_connections(self):
"""Attempt to clean up any source/target connections, based on the client types.
Not all clients are covered here, we at least have Oracle and PostgreSQL for which we
have seen connections being accumulated.
https://github.com/GoogleCloudPlatform/professional-services-data-validator/issues/1195
"""
try:
if self.source_client and self.source_client.name in ("oracle", "postgres"):
self.source_client.con.dispose()
if self.target_client and self.target_client.name in ("oracle", "postgres"):
self.target_client.con.dispose()
except Exception as exc:
# No need to reraise, we can silently fail if exiting throws up an issue.
logging.warning("Exception closing connections: %s", str(exc))
@property
def validation_type(self):
"""Return string validation type (Column|Schema)."""
return self._config[consts.CONFIG_TYPE]
def use_random_rows(self):
"""Return if the validation should use a random row filter."""
return self._config.get(consts.CONFIG_USE_RANDOM_ROWS) or False
def random_row_batch_size(self):
"""Return batch size for random row filter."""
return int(
self._config.get(consts.CONFIG_RANDOM_ROW_BATCH_SIZE)
or consts.DEFAULT_NUM_RANDOM_ROWS
)
def get_random_row_batch_size(self):
"""Return number of random rows or None."""
return self.random_row_batch_size() if self.use_random_rows() else None
def trim_string_pks(self):
"""Return if the validation should trim string primary keys."""
return self._config.get(consts.CONFIG_TRIM_STRING_PKS) or False
def case_insensitive_match(self):
"""Return if the validation should perform a case insensitive match."""
return self._config.get(consts.CONFIG_CASE_INSENSITIVE_MATCH) or False
@property
def max_recursive_query_size(self):
"""Return Aggregates from Config"""
return self._config.get(consts.CONFIG_MAX_RECURSIVE_QUERY_SIZE, 50000)
@property
def aggregates(self):
"""Return Aggregates from Config"""
return self._config.get(consts.CONFIG_AGGREGATES, [])
def append_aggregates(self, aggregate_configs):
"""Append aggregate configs to existing config."""
self._config[consts.CONFIG_AGGREGATES] = self.aggregates + aggregate_configs
@property
def calculated_fields(self):
return self._config.get(consts.CONFIG_CALCULATED_FIELDS, [])
def append_calculated_fields(self, calculated_configs):
self._config[consts.CONFIG_CALCULATED_FIELDS] = (
self.calculated_fields + calculated_configs
)
@property
def query_groups(self):
"""Return Query Groups from Config"""
return self._config.get(consts.CONFIG_GROUPED_COLUMNS, [])
def append_query_groups(self, grouped_column_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_GROUPED_COLUMNS] = (
self.query_groups + grouped_column_configs
)
@property
def custom_query_type(self):
"""Return custom query type from config"""
return self._config.get(consts.CONFIG_CUSTOM_QUERY_TYPE, "")
def append_custom_query_type(self, custom_query_type):
"""Append custom query type config to existing config."""
self._config[consts.CONFIG_CUSTOM_QUERY_TYPE] = (
self.custom_query_type + custom_query_type
)
@property
def source_query_file(self):
"""Return SQL Query File from Config"""
return self._config.get(consts.CONFIG_SOURCE_QUERY_FILE, [])
def append_source_query_file(self, query_file_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_SOURCE_QUERY_FILE] = (
self.source_query_file + query_file_configs
)
@property
def target_query_file(self):
"""Return SQL Query File from Config"""
return self._config.get(consts.CONFIG_TARGET_QUERY_FILE, [])
def append_target_query_file(self, query_file_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_TARGET_QUERY_FILE] = (
self.target_query_file + query_file_configs
)
@property
def primary_keys(self):
"""Return Primary keys from Config"""
return self._config.get(consts.CONFIG_PRIMARY_KEYS, [])
def append_primary_keys(self, primary_key_configs):
"""Append primary key configs to existing config."""
self._config[consts.CONFIG_PRIMARY_KEYS] = (
self.primary_keys + primary_key_configs
)
def get_primary_keys_list(self):
"""Return list of primary key column names"""
return [key[consts.CONFIG_SOURCE_COLUMN] for key in self.primary_keys]
@property
def comparison_fields(self):
"""Return fields from Config"""
return self._config.get(consts.CONFIG_COMPARISON_FIELDS, [])
def append_comparison_fields(self, field_configs):
"""Append field configs to existing config."""
self._config[consts.CONFIG_COMPARISON_FIELDS] = (
self.comparison_fields + field_configs
)
@property
def concat(self):
"""Return field from Config"""
return self._config.get(consts.CONFIG_ROW_CONCAT, [])
@property
def hash(self):
"""Return field from Config"""
return self._config.get(consts.CONFIG_ROW_HASH, [])
@property
def run_id(self):
"""Return field from Config"""
return self._config.get(consts.CONFIG_RUN_ID, None)
@property
def filters(self):
"""Return Filters from Config"""
return self._config.get(consts.CONFIG_FILTERS, [])
@property
def source_schema(self):
"""Return string value of source schema."""
if self.source_client._source_type == "FileSystem":
return None
return self._config.get(consts.CONFIG_SCHEMA_NAME, None)
@property
def source_table(self):
"""Return string value of source table."""
return self._config[consts.CONFIG_TABLE_NAME]
@property
def target_schema(self):
"""Return string value of target schema."""
if self.target_client._source_type == "FileSystem":
return None
return self._config.get(consts.CONFIG_TARGET_SCHEMA_NAME, self.source_schema)
@property
def target_table(self):
"""Return string value of target table."""
return self._config.get(
consts.CONFIG_TARGET_TABLE_NAME, self._config[consts.CONFIG_TABLE_NAME]
)
@property
def full_target_table(self):
"""Return string value of fully qualified target table."""
if self.target_schema:
return self.target_schema + "." + self.target_table
else:
return self.target_table
@property
def full_source_table(self):
"""Return string value of target table."""
if self.source_table and self.source_schema:
return self.source_schema + "." + self.source_table
elif self.source_table:
return self.source_table
else:
return f"custom.{''.join(random.choice(string.ascii_lowercase) for _ in range(5))}"
@property
def labels(self):
"""Return labels."""
return self._config.get(consts.CONFIG_LABELS, [])
@property
def result_handler_config(self):
"""Return int limit for query executions."""
return self._config.get(consts.CONFIG_RESULT_HANDLER) or {}
@property
def query_limit(self):
"""Return int limit for query executions."""
return self._config.get(consts.CONFIG_LIMIT)
@property
def threshold(self):
"""Return threshold from Config"""
return self._config.get(consts.CONFIG_THRESHOLD, 0.0)
@property
def source_query(self):
return self._config.get(consts.CONFIG_SOURCE_QUERY, None)
def append_source_query(self, source_query):
self._config["source_query"] = source_query
@property
def target_query(self):
return self._config.get(consts.CONFIG_TARGET_QUERY, None)
def append_target_query(self, target_query):
self._config["target_query"] = target_query
@property
def exclusion_columns(self):
"""Return the exclusion columns from Config"""
return self._config.get(consts.CONFIG_EXCLUSION_COLUMNS, [])
@property
def allow_list(self):
"""Return the allow_list from Config"""
return self._config.get(consts.CONFIG_ALLOW_LIST, "")
@property
def filter_status(self):
"""Return filter status list from Config"""
return self._config.get(consts.CONFIG_FILTER_STATUS, None)
def append_exclusion_columns(self, column_configs):
"""Append exclusion columns to existing config."""
self._config[consts.CONFIG_EXCLUSION_COLUMNS] = (
self.exclusion_columns + column_configs
)
def append_allow_list(
self, allow_list: Union[str, None], allow_list_file: Union[str, None]
):
"""Append datatype allow_list to existing config."""
full_allow_list = []
if allow_list:
allow_list = allow_list.replace(" ", "")
full_allow_list.append(allow_list)
if allow_list_file:
try:
allow_list_yaml = gcs_helper.read_file(allow_list_file)
except FileNotFoundError as e:
raise ValueError(
"Cannot locate --allow-list-file: {allow_list_file}"
) from e
allow_list_dict = yaml.safe_load(allow_list_yaml)
full_allow_list.append(
",".join([f"{_[0]}:{_[1]}" for _ in allow_list_dict.items()])
)
self._config[consts.CONFIG_ALLOW_LIST] = ",".join(full_allow_list)
def get_source_ibis_table(self):
"""Return IbisTable from source."""
if not hasattr(self, "_source_ibis_table"):
self._source_ibis_table = clients.get_ibis_table(
self.source_client, self.source_schema, self.source_table
)
return self._source_ibis_table
def get_source_ibis_table_from_query(self):
"""Return IbisTable from source."""
if not hasattr(self, "_source_ibis_table"):
self._source_ibis_table = clients.get_ibis_query(
self.source_client, self.source_query
)
return self._source_ibis_table
def get_source_ibis_calculated_table(self, depth=None):
"""Return mutated IbisTable from source
depth: Int the depth of subquery requested"""
if self.validation_type == consts.CUSTOM_QUERY:
table = self.get_source_ibis_table_from_query()
else:
table = self.get_source_ibis_table()
vb = ValidationBuilder(self)
calculated_table = table.mutate(
vb.source_builder.compile_calculated_fields(table, n=depth)
)
return calculated_table
def get_target_ibis_table(self):
"""Return IbisTable from target."""
if not hasattr(self, "_target_ibis_table"):
self._target_ibis_table = clients.get_ibis_table(
self.target_client, self.target_schema, self.target_table
)
return self._target_ibis_table
def get_target_ibis_table_from_query(self):
"""Return IbisTable from source."""
if not hasattr(self, "_target_ibis_table"):
self._target_ibis_table = clients.get_ibis_query(
self.target_client, self.target_query
)
return self._target_ibis_table
def get_target_ibis_calculated_table(self, depth=None):
"""Return mutated IbisTable from target
n: Int the depth of subquery requested"""
if self.validation_type == consts.CUSTOM_QUERY:
table = self.get_target_ibis_table_from_query()
else:
table = self.get_target_ibis_table()
vb = ValidationBuilder(self)
calculated_table = table.mutate(
vb.target_builder.compile_calculated_fields(table, n=depth)
)
return calculated_table
def get_yaml_validation_block(self):
"""Return Dict object formatted for a Yaml file."""
config = copy.deepcopy(self.config)
config.pop(consts.CONFIG_SOURCE_CONN, None)
config.pop(consts.CONFIG_TARGET_CONN, None)
config.pop(consts.CONFIG_SOURCE_CONN_NAME, None)
config.pop(consts.CONFIG_TARGET_CONN_NAME, None)
config.pop(consts.CONFIG_RESULT_HANDLER, None)
return config
def get_result_handler(self):
"""Return ResultHandler instance from supplied config."""
return build_result_handler(
self.result_handler_config,
self.config[consts.CONFIG_TYPE],
self.filter_status,
text_format=self._config.get(
consts.CONFIG_FORMAT, consts.FORMAT_TYPE_TABLE
),
)
@staticmethod
def build_config_manager(
config_type,
source_conn_name,
target_conn_name,
table_obj,
labels,
threshold,
format,
use_random_rows=None,
random_row_batch_size=None,
source_client=None,
target_client=None,
result_handler_config=None,
filter_config=None,
filter_status=None,
trim_string_pks=None,
case_insensitive_match=None,
concat=None,
hash=None,
run_id=None,
verbose=False,
):
if isinstance(filter_config, dict):
filter_config = [filter_config]
"""Return a ConfigManager instance with available config."""
config = {
consts.CONFIG_TYPE: config_type,
consts.CONFIG_SOURCE_CONN_NAME: source_conn_name,
consts.CONFIG_TARGET_CONN_NAME: target_conn_name,
consts.CONFIG_TABLE_NAME: table_obj.get(consts.CONFIG_TABLE_NAME, None),
consts.CONFIG_SCHEMA_NAME: table_obj.get(consts.CONFIG_SCHEMA_NAME, None),
consts.CONFIG_TARGET_SCHEMA_NAME: table_obj.get(
consts.CONFIG_TARGET_SCHEMA_NAME,
table_obj.get(consts.CONFIG_SCHEMA_NAME, None),
),
consts.CONFIG_TARGET_TABLE_NAME: table_obj.get(
consts.CONFIG_TARGET_TABLE_NAME,
table_obj.get(consts.CONFIG_TABLE_NAME, None),
),
consts.CONFIG_LABELS: labels,
consts.CONFIG_THRESHOLD: threshold,
consts.CONFIG_FORMAT: format,
consts.CONFIG_RESULT_HANDLER: result_handler_config,
consts.CONFIG_FILTERS: filter_config,
consts.CONFIG_USE_RANDOM_ROWS: use_random_rows,
consts.CONFIG_RANDOM_ROW_BATCH_SIZE: random_row_batch_size,
consts.CONFIG_FILTER_STATUS: filter_status,
consts.CONFIG_TRIM_STRING_PKS: trim_string_pks,
consts.CONFIG_CASE_INSENSITIVE_MATCH: case_insensitive_match,
consts.CONFIG_ROW_CONCAT: concat,
consts.CONFIG_ROW_HASH: hash,
consts.CONFIG_RUN_ID: run_id,
}
return ConfigManager(
config,
source_client=source_client,
target_client=target_client,
verbose=verbose,
)
def add_rstrip_to_comp_fields(self, comparison_fields: List[str]) -> List[str]:
"""As per #1190, add an rstrip calculated field for Teradata string comparison fields.
Parameters:
comparison_fields: List[str] of comparison field columns
Returns:
comp_fields_with_aliases: List[str] of comparison field columns with rstrip aliases
"""
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
source_table_schema = {k: v for k, v in source_table.schema().items()}
target_table_schema = {k: v for k, v in target_table.schema().items()}
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
comp_fields_with_aliases = []
calculated_configs = []
for field in comparison_fields:
if field.casefold() not in casefold_source_columns:
raise ValueError(f"Column DNE in source: {field}")
if field.casefold() not in casefold_target_columns:
raise ValueError(f"Column DNE in target: {field}")
source_ibis_type = source_table[
casefold_source_columns[field.casefold()]
].type()
target_ibis_type = target_table[
casefold_target_columns[field.casefold()]
].type()
if (
source_ibis_type.is_string() or target_ibis_type.is_string()
) and not self._comp_field_cast(
# Do not add rstrip if the column is a bool or UUID hiding in a string.
source_table_schema,
target_table_schema,
field,
):
logging.info(
f"Adding rtrim() to string comparison field `{field.casefold()}` due to Teradata CHAR padding."
)
alias = f"rstrip__{field.casefold()}"
calculated_configs.append(
self.build_config_calculated_fields(
[casefold_source_columns[field.casefold()]],
[casefold_target_columns[field.casefold()]],
"rstrip",
alias,
0,
)
)
comp_fields_with_aliases.append(alias)
else:
comp_fields_with_aliases.append(field)
self.append_calculated_fields(calculated_configs)
return comp_fields_with_aliases
def _comp_field_cast(
self, source_table_schema: dict, target_table_schema: dict, field: str
) -> str:
# We check below if the field exists because sometimes it is a computed name
# like "concat__all" which is not in the real table.
source_type = (
source_table_schema[field] if field in source_table_schema else None
)
target_type = (
target_table_schema[field] if field in target_table_schema else None
)
if self._is_bool(source_type, target_type):
return "bool"
elif self._is_uuid(source_type, target_type):
return consts.CONFIG_CAST_UUID_STRING
return None
def _is_bool(
self, source_type: Union[str, dt.DataType], target_type: Union[str, dt.DataType]
) -> bool:
"""Returns whether column is BOOLEAN based on either source or target data type.
We do this because some engines don't have a BOOLEAN type, therefore BOOLEAN on one side
means both sides need to be BOOLEAN aware."""
if isinstance(source_type, str):
return any(_ in ["bool", "!bool"] for _ in [source_type, target_type])
else:
return bool(
isinstance(source_type, dt.Boolean)
or isinstance(target_type, dt.Boolean)
)
def _is_uuid(
self, source_type: Union[str, dt.DataType], target_type: Union[str, dt.DataType]
) -> bool:
"""Returns whether column is UUID based on either source or target data type.
We do this because some engines don't have a UUID type, therefore UUID on one side
means both sides are UUID. i.e. we use any() not all()."""
if isinstance(source_type, str):
return any(_ in ["uuid", "!uuid"] for _ in [source_type, target_type])
else:
return bool(
isinstance(source_type, dt.UUID) or isinstance(target_type, dt.UUID)
)
def build_config_comparison_fields(self, fields, depth=None):
"""Return list of field config objects."""
field_configs = []
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
source_table_schema = {k: v for k, v in source_table.schema().items()}
target_table_schema = {k: v for k, v in target_table.schema().items()}
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
for field in fields:
cast_type = self._comp_field_cast(
source_table_schema, target_table_schema, field
)
column_config = {
consts.CONFIG_SOURCE_COLUMN: casefold_source_columns.get(
field.casefold(), field
),
consts.CONFIG_TARGET_COLUMN: casefold_target_columns.get(
field.casefold(), field
),
consts.CONFIG_FIELD_ALIAS: field,
consts.CONFIG_CAST: cast_type,
}
field_configs.append(column_config)
return field_configs
def build_column_configs(self, columns):
"""Return list of column config objects."""
column_configs = []
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
for column in columns:
if column.casefold() not in casefold_source_columns:
raise ValueError(f"Column DNE in source: {column}")
if column.casefold() not in casefold_target_columns:
raise ValueError(f"Column DNE in target: {column}")
source_ibis_type = source_table[
casefold_source_columns[column.casefold()]
].type()
target_ibis_type = target_table[
casefold_target_columns[column.casefold()]
].type()
cast_type = self._key_column_needs_casting_to_string(
source_ibis_type, target_ibis_type
)
column_config = {
consts.CONFIG_SOURCE_COLUMN: casefold_source_columns[column.casefold()],
consts.CONFIG_TARGET_COLUMN: casefold_target_columns[column.casefold()],
consts.CONFIG_FIELD_ALIAS: column,
consts.CONFIG_CAST: cast_type,
}
column_configs.append(column_config)
return column_configs
def build_config_count_aggregate(self):
"""Return dict aggregate for COUNT(*)."""
aggregate_config = {
consts.CONFIG_SOURCE_COLUMN: None,
consts.CONFIG_TARGET_COLUMN: None,
consts.CONFIG_FIELD_ALIAS: "count",
consts.CONFIG_TYPE: "count",
}
return aggregate_config
def _prefix_calc_col_name(
self, column_name: str, prefix: str, column_number: int
) -> str:
"""Prefix a column name but protect final string from overflowing SQL engine identifier length limit."""
new_name = f"{prefix}__{column_name}"
if len(new_name) > self._get_comparison_max_col_length():
# Use an abstract name for the calculated column to avoid composing invalid SQL.
new_name = f"{prefix}__dvt_calc_col_{column_number}"
return new_name
def build_and_append_pre_agg_calc_config(
self,
source_column,
target_column,
calc_func,
column_position,
cast_type: str = None,
depth: int = 0,
):
"""Create calculated field config used as a pre-aggregation step. Appends to calculated fields if does not already exist and returns created config."""
calculated_config = {
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: [source_column],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: [target_column],
consts.CONFIG_FIELD_ALIAS: self._prefix_calc_col_name(
source_column, calc_func, column_position
),
consts.CONFIG_TYPE: calc_func,
consts.CONFIG_DEPTH: depth,
}
if calc_func == consts.CONFIG_CAST and cast_type is not None:
calculated_config[consts.CONFIG_DEFAULT_CAST] = cast_type
calculated_config[consts.CONFIG_FIELD_ALIAS] = self._prefix_calc_col_name(
source_column, f"{calc_func}_{cast_type}", column_position
)
existing_calc_fields = [
config[consts.CONFIG_FIELD_ALIAS] for config in self.calculated_fields
]
if calculated_config[consts.CONFIG_FIELD_ALIAS] not in existing_calc_fields:
self.append_calculated_fields([calculated_config])
return calculated_config
def append_pre_agg_calc_field(
self,
source_column: str,
target_column: str,
agg_type: str,
column_type: str,
target_column_type: str,
column_position: int,
) -> dict:
"""Append calculated field for length() or epoch_seconds(timestamp) for preprocessing before column validation aggregation."""
depth = 0
cast_type = None
final_cast_type = None
if any(_ in ["json", "!json"] for _ in [column_type, target_column_type]):
# JSON data which needs casting to string before we apply a length function.
pre_calculated_config = self.build_and_append_pre_agg_calc_config(
source_column,
target_column,
consts.CONFIG_CAST,
column_position,
cast_type="string",
depth=depth,
)
source_column = target_column = pre_calculated_config[
consts.CONFIG_FIELD_ALIAS
]
depth = 1
calc_func = "length"
elif column_type in ["string", "!string"]:
calc_func = "length"
elif self._is_uuid(column_type, target_column_type):
calc_func = consts.CONFIG_CAST
cast_type = consts.CONFIG_CAST_UUID_STRING
elif column_type in ["binary", "!binary"]:
calc_func = "byte_length"
elif column_type in ["timestamp", "!timestamp", "date", "!date"]:
if (
self.source_client.name == "bigquery"
or self.target_client.name == "bigquery"
):
pre_calculated_config = self.build_and_append_pre_agg_calc_config(
source_column,
target_column,
consts.CONFIG_CAST,
column_position,
cast_type="timestamp",
depth=depth,
)
source_column = target_column = pre_calculated_config[
consts.CONFIG_FIELD_ALIAS
]
depth = 1
calc_func = "epoch_seconds"
if agg_type == consts.CONFIG_TYPE_SUM:
# It is possible to exceed int64 when summing epoch_seconds therefore cast to string.
# See issue 1391 for details.
final_cast_type = "string"
elif column_type == "int32" or column_type == "!int32":
calc_func = consts.CONFIG_CAST
cast_type = "int64"
else:
raise ValueError(f"Unsupported column type: {column_type}")
calculated_config = self.build_and_append_pre_agg_calc_config(
source_column,
target_column,
calc_func,
column_position,
cast_type=cast_type,
depth=depth,
)
aggregate_config = {
consts.CONFIG_SOURCE_COLUMN: f"{calculated_config[consts.CONFIG_FIELD_ALIAS]}",
consts.CONFIG_TARGET_COLUMN: f"{calculated_config[consts.CONFIG_FIELD_ALIAS]}",
consts.CONFIG_FIELD_ALIAS: self._prefix_calc_col_name(
calculated_config[consts.CONFIG_FIELD_ALIAS],
f"{agg_type}",
column_position,
),
consts.CONFIG_TYPE: agg_type,
}
if final_cast_type:
# Adding to dict this way to avoid adding a lot of empty cast attributes.
aggregate_config[consts.CONFIG_CAST] = final_cast_type
return aggregate_config
def _decimal_column_too_big_for_pandas(
self,
source_column_ibis_type: dt.DataType,
target_column_ibis_type: dt.DataType,
margin: int = 0,
) -> bool:
"""Identifies numeric columns that will cause problems in a Pandas Dataframe.
i.e. are of greater precision than a 64bit int/real can hold.
margin: Allows us to lower the precision threshold. This is helpful when summing column
values that are okay by themselves but cumulativaly could overflow a 64bit value.
"""
return bool(
(
(isinstance(source_column_ibis_type, dt.Int64) and margin > 0)
or (
isinstance(source_column_ibis_type, dt.Decimal)
and (
source_column_ibis_type.precision is None
or source_column_ibis_type.precision > (18 - margin)
)
)
)
and (
(isinstance(target_column_ibis_type, dt.Int64) and margin > 0)
or (
isinstance(target_column_ibis_type, dt.Decimal)
and (
target_column_ibis_type.precision is None
or target_column_ibis_type.precision > (18 - margin)
)
)
)
)
def _key_column_needs_casting_to_string(
self,
source_column_ibis_type: dt.DataType,
target_column_ibis_type: dt.DataType,
) -> str:
"""Return a string cast if the datatype combination requires it, otherwise None."""
if self._is_uuid(source_column_ibis_type, target_column_ibis_type):
# This needs to come before binary check because Oracle
# stores UUIDs (GUID) in binary columns.
return consts.CONFIG_CAST_UUID_STRING
elif (
self._decimal_column_too_big_for_pandas(
source_column_ibis_type, target_column_ibis_type
)
or isinstance(source_column_ibis_type, dt.Binary)
or isinstance(target_column_ibis_type, dt.Binary)
):
return "string"
else:
return None
def _type_is_supported_for_agg_validation(
self, source_type: str, target_type: str, supported_types: list
) -> bool:
if self._is_uuid(source_type, target_type):
return bool("uuid" in supported_types)
return bool(source_type in supported_types and target_type in supported_types)
def build_config_column_aggregates(
self, agg_type, arg_value, exclude_cols, supported_types, cast_to_bigint=False
):
"""Return list of aggregate objects of given agg_type."""
def require_pre_agg_calc_field(
column_type: str,
target_column_type: str,
agg_type: str,
cast_to_bigint: bool,
) -> bool:
if all(
_ in ["string", "!string", "json", "!json"]
for _ in [column_type, target_column_type]
):
# These data types are aggregated using their lengths.
return True
elif self._is_uuid(column_type, target_column_type):
return True
elif column_type in ["binary", "!binary"]:
if agg_type == "count":
# Oracle BLOB is invalid for use with SQL COUNT function.
# The expression below returns True if client is Oracle which
# has the effect of triggering use of byte_length transformation.
return bool(
self.source_client.name == "oracle"
or self.target_client.name == "oracle"
)
else:
# Convert to length for any min/max/sum on binary columns.
return True
elif cast_to_bigint and column_type in ["int32", "!int32"]:
return True
elif column_type in [
"timestamp",
"!timestamp",
"date",
"!date",
] and agg_type in (
"sum",
"avg",
"bit_xor",
):
# For timestamps: do not convert to epoch seconds for min/max
return True
return False
aggregate_configs = []
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
if arg_value: