@@ -164,92 +164,21 @@ def load(self, force: bool = True) -> None:
164164 # Build schema list for IN clause
165165 schemas_list = ", " .join (adapter .quote_string (s ) for s in self ._conn .schemas )
166166
167- # Backend-specific queries for primary keys and foreign keys
168- # Note: Both PyMySQL and psycopg2 use %s placeholders, so escape % as %%
167+ # Load primary keys and foreign keys via adapter methods
168+ # Note: Both PyMySQL and psycopg use %s placeholders, so escape % as %%
169169 like_pattern = "'~%%'"
170170
171- if adapter .backend == "mysql" :
172- # MySQL: use concat() and MySQL-specific information_schema columns
173- tab_expr = "concat('`', table_schema, '`.`', table_name, '`')"
174-
175- # load primary key info (MySQL uses constraint_name='PRIMARY')
176- keys = self ._conn .query (
177- f"""
178- SELECT { tab_expr } as tab, column_name
179- FROM information_schema.key_column_usage
180- WHERE table_name NOT LIKE { like_pattern }
181- AND table_schema in ({ schemas_list } )
182- AND constraint_name='PRIMARY'
183- """
184- )
185- pks = defaultdict (set )
186- for key in keys :
187- pks [key [0 ]].add (key [1 ])
188-
189- # load foreign keys (MySQL has referenced_* columns)
190- ref_tab_expr = "concat('`', referenced_table_schema, '`.`', referenced_table_name, '`')"
191- fk_keys = self ._conn .query (
192- f"""
193- SELECT constraint_name,
194- { tab_expr } as referencing_table,
195- { ref_tab_expr } as referenced_table,
196- column_name, referenced_column_name
197- FROM information_schema.key_column_usage
198- WHERE referenced_table_name NOT LIKE { like_pattern }
199- AND (referenced_table_schema in ({ schemas_list } )
200- OR referenced_table_schema is not NULL AND table_schema in ({ schemas_list } ))
201- """ ,
202- as_dict = True ,
203- )
204- else :
205- # PostgreSQL: use || concatenation and different query structure
206- tab_expr = "'\" ' || kcu.table_schema || '\" .\" ' || kcu.table_name || '\" '"
207-
208- # load primary key info (PostgreSQL uses constraint_type='PRIMARY KEY')
209- keys = self ._conn .query (
210- f"""
211- SELECT { tab_expr } as tab, kcu.column_name
212- FROM information_schema.key_column_usage kcu
213- JOIN information_schema.table_constraints tc
214- ON kcu.constraint_name = tc.constraint_name
215- AND kcu.table_schema = tc.table_schema
216- WHERE kcu.table_name NOT LIKE { like_pattern }
217- AND kcu.table_schema in ({ schemas_list } )
218- AND tc.constraint_type = 'PRIMARY KEY'
219- """
220- )
221- pks = defaultdict (set )
222- for key in keys :
223- pks [key [0 ]].add (key [1 ])
224-
225- # load foreign keys using pg_constraint system catalogs
226- # The information_schema approach creates a Cartesian product for composite FKs
227- # because constraint_column_usage doesn't have ordinal_position.
228- # Using pg_constraint with unnest(conkey, confkey) WITH ORDINALITY gives correct mapping.
229- fk_keys = self ._conn .query (
230- f"""
231- SELECT
232- c.conname as constraint_name,
233- '"' || ns1.nspname || '"."' || cl1.relname || '"' as referencing_table,
234- '"' || ns2.nspname || '"."' || cl2.relname || '"' as referenced_table,
235- a1.attname as column_name,
236- a2.attname as referenced_column_name
237- FROM pg_constraint c
238- JOIN pg_class cl1 ON c.conrelid = cl1.oid
239- JOIN pg_namespace ns1 ON cl1.relnamespace = ns1.oid
240- JOIN pg_class cl2 ON c.confrelid = cl2.oid
241- JOIN pg_namespace ns2 ON cl2.relnamespace = ns2.oid
242- CROSS JOIN LATERAL unnest(c.conkey, c.confkey) WITH ORDINALITY AS cols(conkey, confkey, ord)
243- JOIN pg_attribute a1 ON a1.attrelid = cl1.oid AND a1.attnum = cols.conkey
244- JOIN pg_attribute a2 ON a2.attrelid = cl2.oid AND a2.attnum = cols.confkey
245- WHERE c.contype = 'f'
246- AND cl1.relname NOT LIKE { like_pattern }
247- AND (ns2.nspname in ({ schemas_list } )
248- OR ns1.nspname in ({ schemas_list } ))
249- ORDER BY c.conname, cols.ord
250- """ ,
251- as_dict = True ,
252- )
171+ # load primary key info
172+ keys = self ._conn .query (adapter .load_primary_keys_sql (schemas_list , like_pattern ))
173+ pks = defaultdict (set )
174+ for key in keys :
175+ pks [key [0 ]].add (key [1 ])
176+
177+ # load foreign keys
178+ fk_keys = self ._conn .query (
179+ adapter .load_foreign_keys_sql (schemas_list , like_pattern ),
180+ as_dict = True ,
181+ )
253182
254183 # add nodes to the graph
255184 for n , pk in pks .items ():
0 commit comments