From 1dd90268b5aefcf5ac9b5ef9d7d114b068138683 Mon Sep 17 00:00:00 2001 From: Anthony Ly Date: Thu, 27 Jun 2024 17:46:12 +0200 Subject: [PATCH 1/5] feat: oracle connector --- libs/connector-oracle/.gitignore | 5 + libs/connector-oracle/.npmignore | 7 + libs/connector-oracle/README.md | 26 + libs/connector-oracle/jest.config.js | 6 + libs/connector-oracle/package.json | 40 ++ libs/connector-oracle/src/connect.test.ts | 30 + libs/connector-oracle/src/connect.ts | 130 ++++ libs/connector-oracle/src/constants.test.ts | 14 + libs/connector-oracle/src/helpers.test.ts | 16 + libs/connector-oracle/src/helpers.ts | 11 + libs/connector-oracle/src/history.test.ts | 33 + libs/connector-oracle/src/history.ts | 50 ++ libs/connector-oracle/src/index.ts | 83 +++ libs/connector-oracle/src/integration.test.ts | 35 + libs/connector-oracle/src/oracle.test.ts | 91 +++ libs/connector-oracle/src/oracle.ts | 672 ++++++++++++++++++ libs/connector-oracle/src/query.ts | 46 ++ libs/connector-oracle/src/stats.test.ts | 37 + libs/connector-oracle/src/stats.ts | 129 ++++ libs/connector-oracle/tsconfig.json | 105 +++ libs/models/src/databaseUrl.ts | 9 + pnpm-lock.yaml | 58 +- 22 files changed, 1627 insertions(+), 6 deletions(-) create mode 100644 libs/connector-oracle/.gitignore create mode 100644 libs/connector-oracle/.npmignore create mode 100644 libs/connector-oracle/README.md create mode 100644 libs/connector-oracle/jest.config.js create mode 100644 libs/connector-oracle/package.json create mode 100644 libs/connector-oracle/src/connect.test.ts create mode 100644 libs/connector-oracle/src/connect.ts create mode 100644 libs/connector-oracle/src/constants.test.ts create mode 100644 libs/connector-oracle/src/helpers.test.ts create mode 100644 libs/connector-oracle/src/helpers.ts create mode 100644 libs/connector-oracle/src/history.test.ts create mode 100644 libs/connector-oracle/src/history.ts create mode 100644 libs/connector-oracle/src/index.ts create mode 100644 libs/connector-oracle/src/integration.test.ts create mode 100644 libs/connector-oracle/src/oracle.test.ts create mode 100644 libs/connector-oracle/src/oracle.ts create mode 100644 libs/connector-oracle/src/query.ts create mode 100644 libs/connector-oracle/src/stats.test.ts create mode 100644 libs/connector-oracle/src/stats.ts create mode 100644 libs/connector-oracle/tsconfig.json diff --git a/libs/connector-oracle/.gitignore b/libs/connector-oracle/.gitignore new file mode 100644 index 000000000..173047170 --- /dev/null +++ b/libs/connector-oracle/.gitignore @@ -0,0 +1,5 @@ +node_modules +out +local +src/**/*.js +*.tgz diff --git a/libs/connector-oracle/.npmignore b/libs/connector-oracle/.npmignore new file mode 100644 index 000000000..ceefdda91 --- /dev/null +++ b/libs/connector-oracle/.npmignore @@ -0,0 +1,7 @@ +local +resources +src/*.js +src/*.test.ts +jest.config.js +tsconfig.json +*.tgz diff --git a/libs/connector-oracle/README.md b/libs/connector-oracle/README.md new file mode 100644 index 000000000..7d0978d21 --- /dev/null +++ b/libs/connector-oracle/README.md @@ -0,0 +1,26 @@ +# Oracle connector + +This library allows to connect to [Oracle](https://www.oracle.com/database), extract its schema and more... + +It lists all schemas, tables, columns, relations and types and format them in a JSON Schema. + +This library is made by [Azimutt](https://azimutt.app) to allow people to explore their Oracle database. +It's accessible through the [Desktop app](../../desktop) (soon), the [CLI](https://www.npmjs.com/package/azimutt) or even the website using the [gateway](../../gateway) server. + +**Feel free to use it and even submit PR to improve it:** + +## Publish + +- update `package.json` version +- update lib versions (`pnpm -w run update` + manual) +- test with `pnpm run dry-publish` and check `azimutt-connector-oracle-x.y.z.tgz` content +- launch `pnpm publish --access public` + +View it on [npm](https://www.npmjs.com/package/@azimutt/connector-oracle). + +## Dev + +If you need to develop on multiple libs at the same time (ex: want to update a connector and try it through the CLI), depend on local libs but publish & revert before commit. + +- Depend on a local lib: `pnpm add `, ex: `pnpm add @azimutt/models` +- "Publish" lib locally by building it: `pnpm run build` diff --git a/libs/connector-oracle/jest.config.js b/libs/connector-oracle/jest.config.js new file mode 100644 index 000000000..4f32262e0 --- /dev/null +++ b/libs/connector-oracle/jest.config.js @@ -0,0 +1,6 @@ +module.exports = { + transform: {'^.+\\.ts?$': 'ts-jest'}, + testEnvironment: 'node', + testRegex: '/src/.+\\.test?\\.(ts|tsx)$', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'] +} diff --git a/libs/connector-oracle/package.json b/libs/connector-oracle/package.json new file mode 100644 index 000000000..6d24426e9 --- /dev/null +++ b/libs/connector-oracle/package.json @@ -0,0 +1,40 @@ +{ + "name": "@azimutt/connector-oracle", + "version": "0.1.0", + "description": "Connect to Oracle, extract schema, run analysis and queries", + "keywords": [], + "homepage": "https://azimutt.app", + "author": { + "name": "Anthony Ly", + "email": "anthonyly.dev@gmail.com", + "url": "https://anthonyly.dev" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/azimuttapp/azimutt.git", + "directory": "libs/connector-oracle" + }, + "main": "./out/index.js", + "types": "./out/index.d.ts", + "scripts": { + "test": "jest", + "build": "rm -rf ./out && tsc", + "build:docker": "npx tsc", + "dry-publish": "pnpm run build && pnpm test && pnpm pack" + }, + "dependencies": { + "@azimutt/models": "workspace:^", + "@azimutt/utils": "workspace:^", + "oracledb": "^6.5.1" + }, + "devDependencies": { + "@jest/globals": "^29.7.0", + "@types/jest": "^29.5.12", + "@types/node": "^20.14.5", + "@types/oracledb": "^6.5.1", + "jest": "^29.7.0", + "ts-jest": "^29.1.3", + "typescript": "^5.4.5" + } +} diff --git a/libs/connector-oracle/src/connect.test.ts b/libs/connector-oracle/src/connect.test.ts new file mode 100644 index 000000000..49009fce0 --- /dev/null +++ b/libs/connector-oracle/src/connect.test.ts @@ -0,0 +1,30 @@ +import { describe, test } from "@jest/globals" +import { parseDatabaseUrl } from "@azimutt/models" +import { connect } from "./connect" +import { execQuery } from "./query" +import { application, logger } from "./constants.test" + +// Use this test to troubleshoot database connection errors. +// If you don't succeed with the first one (Azimutt `connect`), try with the second one (raw node lib) and once you found a way, tell us how to fix ;) +// Of course, you can contact us (issues or contact@azimutt.app) to do it together. +// More documentation available at: https://azimutt.notion.site/Database-connection-troubleshooting-c4c19ed28c7040ef9aaaeec96ce6ba8d +describe("connect", () => { + // TODO 1: replace this with your own connection string, but don't commit it! + const url = "jdbc:oracle:thin:sys/oracle@//localhost:1521/FREE" + + // TODO 2: write a valid query for your database + const query = "SELECT * FROM C##AZIMUTT.USERS" + const parameters: any[] = [] + + // TODO 3: unskip this test first and run it (`npm run test -- src/connect.test.ts`) + test.skip("Azimutt should connect", async () => { + const parsedUrl = parseDatabaseUrl(url) + const results = await connect( + application, + parsedUrl, + execQuery(query, parameters), + { logger, logQueries: true } + ) + console.log("results", results) + }) +}) diff --git a/libs/connector-oracle/src/connect.ts b/libs/connector-oracle/src/connect.ts new file mode 100644 index 000000000..f5a388447 --- /dev/null +++ b/libs/connector-oracle/src/connect.ts @@ -0,0 +1,130 @@ +import { + Connection, + ConnectionAttributes, + getConnection, + SYSDBA, +} from "oracledb" +import { AnyError, errorToString } from "@azimutt/utils" +import { + AttributeValue, + ConnectorDefaultOpts, + DatabaseUrlParsed, + logQueryIfNeeded, + queryError, +} from "@azimutt/models" + +export async function connect( + application: string, + url: DatabaseUrlParsed, + exec: (c: Conn) => Promise, + opts: ConnectorDefaultOpts +): Promise { + const client = await createConnection(buildConfig(application, url)).catch( + (err) => Promise.reject(connectionError(err)) + ) + let queryCpt = 1 + const conn: Conn = { + url, + query( + sql: string, + parameters: [] = [], + name?: string + ): Promise { + return logQueryIfNeeded( + queryCpt++, + name, + sql, + parameters, + (sql, parameters) => { + return client.execute(sql, parameters).then( + (res) => res.rows ?? [], + (err) => Promise.reject(queryError(name, sql, err)) + ) + }, + (r) => r?.length ?? 0, + opts + ) + }, + queryArrayMode( + sql: string, + parameters: any[] = [], + name?: string + ): Promise { + return logQueryIfNeeded( + queryCpt++, + name, + sql, + parameters, + (sql, parameters) => { + return client.execute(sql, parameters).then( + (res) => { + const { metaData, rows } = res + const fields = metaData?.map((meta) => ({ + name: meta.name, + })) + return { fields: fields ?? [], rows: (rows as any[]) ?? [] } + }, + (err) => Promise.reject(queryError(name, sql, err)) + ) + }, + (r) => r.rows.length, + opts + ) + }, + } + return exec(conn).then( + (res) => client.close().then((_) => res), + (err) => client.close().then((_) => Promise.reject(err)) + ) +} + +export interface Conn { + url: DatabaseUrlParsed + + query( + sql: string, + parameters?: any[], + name?: string + ): Promise + + queryArrayMode( + sql: string, + parameters?: any[], + name?: string + ): Promise +} + +export type QueryResultValue = AttributeValue +export type QueryResultRow = QueryResultValue[] +export type QueryResultField = { + name: string +} +export type QueryResultRowArray = QueryResultValue[] +export type QueryResultArrayMode = { + fields: QueryResultField[] + rows: QueryResultRowArray[] +} + +async function createConnection( + config: ConnectionAttributes +): Promise { + const client = await getConnection(config) + return client +} + +function buildConfig( + application: string, + url: DatabaseUrlParsed +): ConnectionAttributes { + return { + connectionString: `${url.host}:${url.port}/${url.db}`, + user: "sys", + password: url.pass || undefined, + privilege: SYSDBA, + } +} + +function connectionError(err: AnyError): AnyError { + const msg = errorToString(err) + return err +} diff --git a/libs/connector-oracle/src/constants.test.ts b/libs/connector-oracle/src/constants.test.ts new file mode 100644 index 000000000..0102d7807 --- /dev/null +++ b/libs/connector-oracle/src/constants.test.ts @@ -0,0 +1,14 @@ +import {expect, test} from "@jest/globals"; +import {Logger} from "@azimutt/utils"; + +export const logger: Logger = { + debug: (text: string): void => console.debug(text), + log: (text: string): void => console.log(text), + warn: (text: string): void => console.warn(text), + error: (text: string): void => console.error(text) +} +export const application = 'azimutt-tests' + +test('dummy', () => { + expect(application).toEqual('azimutt-tests') +}) diff --git a/libs/connector-oracle/src/helpers.test.ts b/libs/connector-oracle/src/helpers.test.ts new file mode 100644 index 000000000..393536031 --- /dev/null +++ b/libs/connector-oracle/src/helpers.test.ts @@ -0,0 +1,16 @@ +import { describe, expect, test } from "@jest/globals" +import { buildSqlColumn, buildSqlTable } from "./helpers" + +describe("helpers", () => { + test("buildSqlTable", () => { + expect(buildSqlTable({ entity: "events" })).toEqual(`"events"`) + expect(buildSqlTable({ schema: "", entity: "events" })).toEqual(`"events"`) + expect(buildSqlTable({ schema: "public", entity: "events" })).toEqual( + `"public"."events"` + ) + }) + test("buildSqlColumn", () => { + expect(buildSqlColumn(["name"])).toEqual(`"name"`) + expect(buildSqlColumn(["data", "email"])).toEqual(`"data"->'email'`) + }) +}) diff --git a/libs/connector-oracle/src/helpers.ts b/libs/connector-oracle/src/helpers.ts new file mode 100644 index 000000000..2aa74bb78 --- /dev/null +++ b/libs/connector-oracle/src/helpers.ts @@ -0,0 +1,11 @@ +import { AttributePath, EntityRef, SqlFragment } from "@azimutt/models" + +export function buildSqlTable(ref: EntityRef): SqlFragment { + const sqlSchema = ref.schema ? `"${ref.schema}".` : "" + return `${sqlSchema}"${ref.entity}"` +} + +export function buildSqlColumn(path: AttributePath): SqlFragment { + const [head, ...tail] = path + return `"${head}"${tail.map((t) => `->'${t}'`).join("")}` +} diff --git a/libs/connector-oracle/src/history.test.ts b/libs/connector-oracle/src/history.test.ts new file mode 100644 index 000000000..47023d7dd --- /dev/null +++ b/libs/connector-oracle/src/history.test.ts @@ -0,0 +1,33 @@ +import { describe, test } from "@jest/globals" +import { + ConnectorQueryHistoryOpts, + DatabaseQuery, + DatabaseUrlParsed, + parseDatabaseUrl, + zodParseAsync, +} from "@azimutt/models" +import { connect } from "./connect" +import { getQueryHistory } from "./history" +import { application, logger } from "./constants.test" + +describe("history", () => { + // local url, install db or replace it to test + const url: DatabaseUrlParsed = parseDatabaseUrl( + "jdbc:oracle:thin:sys/oracle@//localhost:1521/FREE" + ) + const opts: ConnectorQueryHistoryOpts = { + logger, + logQueries: false, + database: "C##AZIMUTT.USERS", + } + + test.skip("getQueryHistory", async () => { + const queries: DatabaseQuery[] = await connect( + application, + url, + getQueryHistory(opts), + opts + ).then(zodParseAsync(DatabaseQuery.array())) + console.log(`${queries.length} queries`, queries) + }) +}) diff --git a/libs/connector-oracle/src/history.ts b/libs/connector-oracle/src/history.ts new file mode 100644 index 000000000..8eee3c21f --- /dev/null +++ b/libs/connector-oracle/src/history.ts @@ -0,0 +1,50 @@ +import { + ConnectorQueryHistoryOpts, + DatabaseQuery, + handleError, +} from "@azimutt/models" +import { Conn } from "./connect" + +export type RawQuery = { + id: string + query: string + rows: number + database: string + user: string +} + +export const getQueryHistory = + (opts: ConnectorQueryHistoryOpts) => + async (conn: Conn): Promise => { + return conn + .query( + ` + SELECT + h.sql_id, + i.instance_name AS DATABASE_NAME, + s.rows_processed, + u.username + FROM + dba_hist_sqlstat h + JOIN + v$sql s ON s.sql_id = h.sql_id + JOIN + dba_hist_active_sess_history a ON h.sql_id = a.sql_id + JOIN + dba_users u ON a.user_id = u.user_id + JOIN + gv$instance i ON h.instance_number = i.instance_number + ORDER BY + h.sql_id`, + [], + "getQueryHistory" + ) + .then((queries) => { + return queries.reduce((acc, row) => { + const [id, databaseName, rows, user] = row as any[] + acc.push({ id, query: "", database: databaseName, rows, user }) + return acc + }, []) + }) + .catch(handleError(`Failed to get historical queries`, [], opts)) + } diff --git a/libs/connector-oracle/src/index.ts b/libs/connector-oracle/src/index.ts new file mode 100644 index 000000000..6f23acf74 --- /dev/null +++ b/libs/connector-oracle/src/index.ts @@ -0,0 +1,83 @@ +import { + AttributeRef, + Connector, + ConnectorAttributeStats, + ConnectorDefaultOpts, + ConnectorEntityStats, + ConnectorQueryHistoryOpts, + ConnectorSchemaOpts, + Database, + DatabaseQuery, + DatabaseUrlParsed, + EntityRef, + QueryAnalyze, + QueryResults, + zodParseAsync, +} from "@azimutt/models" +import { connect } from "./connect" +import { execQuery } from "./query" +import { getSchema } from "./oracle" +import { getQueryHistory } from "./history" +import { getColumnStats, getTableStats } from "./stats" + +export const oracle: Connector = { + name: "Oracle", + getSchema: ( + application: string, + url: DatabaseUrlParsed, + opts: ConnectorSchemaOpts + ): Promise => { + const urlOptions = url.options || {} + const options: ConnectorSchemaOpts = { + ...opts, + schema: opts.schema || urlOptions["schema"], + entity: opts.entity || urlOptions["table"], + } + return connect(application, url, getSchema(options), options).then( + zodParseAsync(Database) + ) + }, + getQueryHistory: ( + application: string, + url: DatabaseUrlParsed, + opts: ConnectorQueryHistoryOpts + ): Promise => + connect(application, url, getQueryHistory(opts), opts).then( + zodParseAsync(DatabaseQuery.array()) + ), + execute: ( + application: string, + url: DatabaseUrlParsed, + query: string, + parameters: any[], + opts: ConnectorDefaultOpts + ): Promise => + connect(application, url, execQuery(query, parameters), opts).then( + zodParseAsync(QueryResults) + ), + analyze: ( + application: string, + url: DatabaseUrlParsed, + query: string, + parameters: any[], + opts: ConnectorDefaultOpts + ): Promise => Promise.reject("Not implemented"), + getEntityStats: ( + application: string, + url: DatabaseUrlParsed, + ref: EntityRef, + opts: ConnectorDefaultOpts + ): Promise => + connect(application, url, getTableStats(ref), opts).then( + zodParseAsync(ConnectorEntityStats) + ), + getAttributeStats: ( + application: string, + url: DatabaseUrlParsed, + ref: AttributeRef, + opts: ConnectorDefaultOpts + ): Promise => + connect(application, url, getColumnStats(ref), opts).then( + zodParseAsync(ConnectorAttributeStats) + ), +} diff --git a/libs/connector-oracle/src/integration.test.ts b/libs/connector-oracle/src/integration.test.ts new file mode 100644 index 000000000..e0803e3ac --- /dev/null +++ b/libs/connector-oracle/src/integration.test.ts @@ -0,0 +1,35 @@ +import { describe, expect, test } from "@jest/globals" +import { parseDatabaseUrl, ConnectorSchemaOpts } from "@azimutt/models" +import { connect } from "./connect" +import { getSchema } from "./oracle" +import { application, logger } from "./constants.test" + +// run these test with a postgres db loaded with `integration/postgres.sql` script, you can use the `integration/compose.yaml` +describe("integration", () => { + const url = parseDatabaseUrl( + "jdbc:oracle:thin:sys/oracle@//localhost:1521/FREE" + ) + + test.skip("getSchema", async () => { + const schemaOpts: ConnectorSchemaOpts = { + schema: undefined, + sampleSize: 10, + inferRelations: true, + ignoreErrors: false, + logQueries: true, + logger, + } + const schema = await connect(application, url, getSchema(schemaOpts), { + logger, + logQueries: true, + }) + expect(schema.entities?.length).toEqual(8) + expect(schema.relations?.length).toEqual(8) + // polymorphic relation + expect( + schema.entities + ?.find((t) => t.name === "C##AZIMUTT.USERS") + ?.attrs.find((c) => c.name === "item_type")?.stats?.distinctValues + ).toEqual(["ID", "NAME"]) + }) +}) diff --git a/libs/connector-oracle/src/oracle.test.ts b/libs/connector-oracle/src/oracle.test.ts new file mode 100644 index 000000000..0a73092f0 --- /dev/null +++ b/libs/connector-oracle/src/oracle.test.ts @@ -0,0 +1,91 @@ +import { describe, expect, test } from "@jest/globals" +import { + ConnectorSchemaOpts, + DatabaseUrlParsed, + parseDatabaseUrl, +} from "@azimutt/models" +import { connect } from "./connect" +import { execQuery } from "./query" +import { + getBlockSize, + getColumns, + getDatabase, + getDistinctValues, + getSchema, + getTables, + getTypes, +} from "./oracle" +import { application, logger } from "./constants.test" + +describe("oracle", () => { + // local url, install db or replace it to test + const url: DatabaseUrlParsed = parseDatabaseUrl( + "jdbc:oracle:thin:sys/oracle@//localhost:1521/FREE" + ) + + const opts: ConnectorSchemaOpts = { + logger, + logQueries: false, + inferJsonAttributes: true, + inferPolymorphicRelations: true, + } + + test.skip("execQuery", async () => { + const query = + "SELECT u.id, e.id, o.id FROM users u JOIN events e ON u.id = e.created_by JOIN organizations o on o.id = e.organization_id FETCH FIRST 10 ROWS ONLY" + const results = await connect(application, url, execQuery(query, []), opts) + expect(results.attributes).toEqual([ + { + name: "id", + ref: { schema: "public", entity: "users", attribute: ["id"] }, + }, + { + name: "id_2", + ref: { schema: "public", entity: "events", attribute: ["id"] }, + }, + { + name: "id_3", + ref: { schema: "public", entity: "organizations", attribute: ["id"] }, + }, + ]) + }) + test.skip("getSchema", async () => { + const schema = await connect(application, url, getSchema(opts), opts) + console.log("schema", schema) + // console.log('schema', schema.entities?.find(e => e.name == 'events')?.attrs?.find(a => a.name == 'name')?.stats) + expect(schema.entities?.length).toEqual(14) + }) + test.skip("getBlockSize", async () => { + const blockSize = await connect(application, url, getBlockSize(opts), opts) + console.log(`blockSize`, blockSize) + }) + test.skip("getDatabase", async () => { + const database = await connect(application, url, getDatabase(opts), opts) + console.log(`database`, database) + }) + test.skip("getTables", async () => { + const tables = await connect(application, url, getTables(opts), opts) + console.log(`${tables.length} tables`, tables) + }) + test.skip("getColumns", async () => { + const columns = await connect(application, url, getColumns(opts), opts) + console.log(`${columns.length} columns`, columns) + }) + test.skip("getTypes", async () => { + const types = await connect(application, url, getTypes(opts), opts) + console.log(`${types.length} types`, types) + }) + test.skip("getDistinctValues", async () => { + const values = await connect( + application, + url, + getDistinctValues( + { schema: "C##AZIMUTT", entity: "USERS" }, + ["NAME"], + opts + ), + opts + ) + console.log(`${values.length} values`, values) + }) +}) diff --git a/libs/connector-oracle/src/oracle.ts b/libs/connector-oracle/src/oracle.ts new file mode 100644 index 000000000..17c7ed58a --- /dev/null +++ b/libs/connector-oracle/src/oracle.ts @@ -0,0 +1,672 @@ +import { + groupBy, + mapEntriesAsync, + mapValues, + mapValuesAsync, + pluralizeL, + removeEmpty, + removeUndefined, +} from "@azimutt/utils" +import { + Attribute, + AttributeName, + AttributePath, + attributeRefToId, + AttributeValue, + Check, + ConnectorSchemaOpts, + connectorSchemaOptsDefaults, + Database, + DatabaseKind, + Entity, + EntityId, + EntityRef, + entityRefFromId, + entityRefToId, + formatConnectorScope, + handleError, + Index, + isPolymorphic, + PrimaryKey, + Relation, + schemaToAttributes, + Type, + ValueSchema, + valuesToSchema, +} from "@azimutt/models" +import { buildSqlColumn, buildSqlTable } from "./helpers" +import { Conn } from "./connect" + +export const getSchema = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + const start = Date.now() + const scope = formatConnectorScope( + { schema: "schema", entity: "table" }, + opts + ) + opts.logger.log( + `Connected to the database${scope ? `, exporting for ${scope}` : ""} ...` + ) + + // access system tables only + const blockSize: number = await getBlockSize(opts)(conn) + const database: RawDatabase = await getDatabase(opts)(conn) + const tables: RawTable[] = await getTables(opts)(conn) + opts.logger.log(`Found ${pluralizeL(tables, "table")} ...`) + const columns: RawColumn[] = await getColumns(opts)(conn) + opts.logger.log(`Found ${pluralizeL(columns, "column")} ...`) + const constraints: RawConstraint[] = await getConstraints(opts)(conn) + opts.logger.log(`Found ${pluralizeL(constraints, "constraint")} ...`) + const indexes: RawIndex[] = await getIndexes(opts)(conn) + opts.logger.log(`Found ${pluralizeL(indexes, "index")} ...`) + const relations: RawRelation[] = await getRelations(opts)(conn) + opts.logger.log(`Found ${pluralizeL(relations, "relation")} ...`) + const types: RawType[] = await getTypes(opts)(conn) + opts.logger.log(`Found ${pluralizeL(types, "type")} ...`) + + // access table data when options are requested + const columnsByTable = groupByEntity(columns) + const jsonColumns: Record< + EntityId, + Record + > = opts.inferJsonAttributes + ? await getJsonColumns(columnsByTable, opts)(conn) + : {} + const polyColumns: Record< + EntityId, + Record + > = opts.inferPolymorphicRelations + ? await getPolyColumns(columnsByTable, opts)(conn) + : {} + // TODO: pii, join relations... + + // build the database + const columnsByIndex: Record = mapValues( + columnsByTable, + (cols) => + cols.reduce( + (acc, col) => ({ ...acc, [col.column_index]: col.column_name }), + {} + ) + ) + const constraintsByTable = groupByEntity(constraints) + const indexesByTable = groupByEntity(indexes) + opts.logger.log( + `✔︎ Exported ${pluralizeL(tables, "table")}, ${pluralizeL(relations, "relation")} and ${pluralizeL(types, "type")} from the database!` + ) + return removeUndefined({ + entities: tables + .map((table) => [toEntityId(table), table] as const) + .map(([id, table]) => + buildEntity( + blockSize, + table, + columnsByTable[id] || [], + columnsByIndex[id] || {}, + constraintsByTable[id] || [], + indexesByTable[id] || [], + jsonColumns[id] || {}, + polyColumns[id] || {} + ) + ), + relations: relations + .map((r) => buildRelation(r, columnsByIndex)) + .filter((rel): rel is Relation => !!rel), + types: types.map(buildType), + doc: undefined, + stats: removeUndefined({ + name: conn.url.db || database.database, + kind: DatabaseKind.Enum.postgres, + version: database.version, + doc: undefined, + extractedAt: new Date().toISOString(), + extractionDuration: Date.now() - start, + size: database.blks_read * blockSize, + }), + extra: undefined, + }) + } + +// 👇️ Private functions, some are exported only for tests +// If you use them, beware of breaking changes! + +const toEntityId = ( + value: T +): EntityId => + entityRefToId({ schema: value.table_schema, entity: value.table_name }) +const groupByEntity = ( + values: T[] +): Record => groupBy(values, toEntityId) + +export type RawDatabase = { + version: string + database: string + blks_read: number +} + +export const getDatabase = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + const data: RawDatabase = { + version: "", + database: "", + blks_read: 0, + } + + await conn.query(`SELECT BANNER FROM V$VERSION`).then((res) => { + data.version = res?.[0]?.[0] as string + }) + + await conn.query(`select name from v$database`).then((res) => { + data.database = res?.[0]?.[0] as string + }) + + await conn + .query(`select value from v$sysstat where name = 'physical reads'`) + .then((res) => { + data.blks_read = res?.[0]?.[0] ? Number(res[0][0]) : 0 + }) + + return data + } + +export const getBlockSize = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query( + `select distinct bytes/blocks AS block_size from user_segments`, + [], + "getBlockSize" + ) + .then((res) => (res?.[0]?.[0] ? Number(res[0][0]) : 8192)) + .catch(handleError(`Failed to get block size`, 0, opts)) + } + +export type RawTable = { + table_schema: string + table_name: string +} + +export const getTables = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query(`SELECT owner as table_schema, table_name from ALL_ALL_TABLES`) + .then((res) => + res.reduce((acc, row) => { + const [table_schema, table_name] = row as string[] + acc.push({ table_schema, table_name }) + return acc + }, []) + ) + .catch(handleError(`Failed to get tables`, [], opts)) + } + +function buildEntity( + blockSize: number, + table: RawTable, + columns: RawColumn[], + columnsByIndex: { [i: number]: string }, + constraints: RawConstraint[], + indexes: RawIndex[], + jsonColumns: Record, + polyColumns: Record +): Entity { + return removeEmpty({ + schema: table.table_schema, + name: table.table_name, + + attrs: columns + .slice(0) + .sort((a, b) => a.column_index - b.column_index) + .map((c) => buildAttribute(c, jsonColumns[c.column_name])), + pk: + constraints + .filter((c) => c.constraint_type === "P") + .map((c) => buildPrimaryKey(c, columnsByIndex))[0] || undefined, + indexes: indexes.map((i) => buildIndex(blockSize, i, columnsByIndex)), + checks: constraints + .filter((c) => c.constraint_type === "C") + .map((c) => buildCheck(c, columnsByIndex)), + extra: undefined, + }) +} + +export type RawColumn = { + column_index: number + table_schema: string + table_name: string + column_name: string + column_type: string + column_type_len: number + column_nullable: boolean +} + +export const getColumns = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query( + `select column_id, + owner as schema_name, + table_name, + column_name, + data_type, + data_length, + nullable + from sys.dba_tab_columns`, + [], + "getColumns" + ) + .then((res) => + res.reduce((acc, row) => { + const [ + column_index, + table_schema, + table_name, + column_name, + column_type, + column_type_len, + column_nullable, + ] = row as any[] + acc.push({ + column_index, + table_schema, + table_name, + column_name, + column_type, + column_type_len, + column_nullable: column_nullable === "Y", + }) + return acc + }, []) + ) + .catch(handleError(`Failed to get columns`, [], opts)) + } + +function buildAttribute( + c: RawColumn, + jsonColumn: ValueSchema | undefined +): Attribute { + return removeEmpty({ + name: c.column_name, + type: c.column_type, + null: c.column_nullable || undefined, + attrs: jsonColumn ? schemaToAttributes(jsonColumn) : undefined, + }) +} + +type RawConstraint = { + table_schema: string + table_name: string + column_name: string + constraint_name: string + constraint_type: "P" | "C" // P: primary key, C: Check, + deferrable: boolean +} + +export const getConstraints = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + // https://docs.oracle.com/en/database/oracle/oracle-database/21/refrn/ALL_CONSTRAINTS.html + // `constraint_type IN ('P', 'C')`: get only primary key and check constraints + return conn + .query( + `SELECT uc.owner AS table_schema, + uc.table_name, + acc.COLUMN_NAME, + uc.constraint_name, + uc.constraint_type, + uc.DEFERRABLE + FROM user_constraints uc JOIN all_cons_columns acc ON uc.CONSTRAINT_NAME = acc.CONSTRAINT_NAME + WHERE constraint_type IN ('P', 'C') + ORDER BY table_schema, table_name, constraint_name`, + [], + "getConstraints" + ) + .then((res) => + res.reduce((acc, row) => { + const [ + table_schema, + table_name, + column_name, + constraint_name, + constraint_type, + deferrable, + ] = row as any[] + + acc.push({ + table_schema, + table_name, + column_name, + constraint_name, + constraint_type, + deferrable: deferrable !== "NOT DEFFERRABLE", + }) + return acc + }, []) + ) + .catch(handleError(`Failed to get constraints`, [], opts)) + } + +function buildPrimaryKey( + c: RawConstraint, + columns: { [i: number]: string } +): PrimaryKey { + return removeUndefined({ + name: c.constraint_name, + attrs: [[c.column_name]], + }) +} + +function buildCheck(c: RawConstraint, columns: { [i: number]: string }): Check { + return removeUndefined({ + name: c.constraint_name, + attrs: [[c.column_name]], + predicate: "", + }) +} + +type RawIndex = { + table_schema: string + table_name: string + index_name: string + columns: string[] + is_unique: boolean +} + +export const getIndexes = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query( + `SELECT idx.table_owner AS table_schema, + idx.table_name, + idx.index_name, + LISTAGG(col.column_name, ', ') WITHIN GROUP (ORDER BY col.column_position) AS columns, + CASE + WHEN idx.uniqueness = 'UNIQUE' THEN 1 + ELSE 0 + END AS is_unique + FROM all_indexes idx + JOIN all_ind_columns col + ON + idx.index_name = col.index_name + AND idx.table_owner = col.table_owner + AND idx.table_name = col.table_name + GROUP BY + idx.index_name, idx.table_owner, idx.table_name, idx.uniqueness` + ) + .then((res) => + res.reduce((acc, row) => { + const [table_schema, table_name, index_name, columns, is_unique] = + row as any + + acc.push({ + table_schema, + table_name, + index_name, + columns: columns.split(", "), + is_unique: Boolean(is_unique), + }) + return acc + }, []) + ) + .catch(handleError(`Failed to get indexes`, [], opts)) + } + +function buildIndex( + blockSize: number, + index: RawIndex, + columns: { [i: number]: string } +): Index { + return removeUndefined({ + name: index.index_name, + attrs: [index.columns], + unique: index.is_unique || undefined, + }) +} + +type RawRelation = { + constraint_name: string + table_schema: string + table_name: string + table_column: string + target_schema: string + target_table: string + target_column: string + is_deferrable: boolean + on_delete: string +} + +export const getRelations = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query( + ` + SELECT + a.constraint_name, + a.owner AS table_schema, + a.table_name AS table_name, + ac.column_name AS table_column, + cc.owner AS target_schema, + cc.table_name AS target_table, + cc.column_name AS target_column, + CASE + WHEN a.deferrable = 'DEFERRABLE' THEN 1 + ELSE 0 + END AS is_deferable, + a.delete_rule AS on_delete_action + FROM + all_constraints a + JOIN + all_cons_columns ac ON a.constraint_name = ac.constraint_name AND a.owner = ac.owner + JOIN + all_constraints c ON a.r_constraint_name = c.constraint_name AND a.r_owner = c.owner + JOIN + all_cons_columns cc ON c.constraint_name = cc.constraint_name AND c.owner = cc.owner AND ac.position = cc.position + WHERE + a.constraint_type = 'R' + ORDER BY + a.table_name, a.constraint_name, ac.position`, + [], + "getRelations" + ) + .then((res) => + res.reduce((acc, row) => { + const [ + constraint_name, + table_schema, + table_name, + table_column, + target_schema, + target_table, + target_column, + is_deferrable, + on_delete, + ] = row as any[] + + acc.push({ + constraint_name, + table_schema, + table_name, + table_column, + target_schema, + target_table, + target_column, + is_deferrable: Boolean(is_deferrable), + on_delete, + }) + return acc + }, []) + ) + .catch(handleError(`Failed to get relations`, [], opts)) + } + +function buildRelation( + r: RawRelation, + columnsByIndex: Record +): Relation | undefined { + const src = { schema: r.table_schema, entity: r.table_name } + const ref = { schema: r.target_schema, entity: r.target_table } + const rel: Relation = { + name: r.constraint_name, + kind: undefined, // 'many-to-one' when not specified + origin: undefined, // 'fk' when not specified + src, + ref, + attrs: [ + { + src: [r.table_column], + ref: [r.target_column], + }, + ], + } + // don't keep relation if columns are not found :/ + // should not happen if errors are not skipped + return rel.attrs.length > 0 ? removeUndefined(rel) : undefined +} + +export type RawType = { + type_schema: string + type_name: string +} + +export const getTypes = + (opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + return conn + .query( + ` + SELECT + t.owner AS type_schema, + t.type_name + FROM + all_types t + ORDER BY type_schema, type_name`, + [], + "getTypes" + ) + .then((res) => + res.reduce((acc, row) => { + const [type_schema, type_name] = row as string[] + acc.push({ type_schema, type_name }) + return acc + }, []) + ) + .catch(handleError(`Failed to get types`, [], opts)) + } + +function buildType(t: RawType): Type { + return removeUndefined({ + schema: t.type_schema, + name: t.type_name, + }) +} + +const getJsonColumns = + (columns: Record, opts: ConnectorSchemaOpts) => + async ( + conn: Conn + ): Promise>> => { + opts.logger.log("Inferring JSON columns ...") + return mapEntriesAsync(columns, (entityId, tableCols) => { + const ref = entityRefFromId(entityId) + const jsonCols = tableCols.filter((c) => c.column_type === "jsonb") + return mapValuesAsync( + Object.fromEntries(jsonCols.map((c) => [c.column_name, c.column_name])), + (c) => getSampleValues(ref, [c], opts)(conn).then(valuesToSchema) + ) + }) + } + +const getSampleValues = + (ref: EntityRef, attribute: AttributePath, opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + const sqlTable = buildSqlTable(ref) + const sqlColumn = buildSqlColumn(attribute) + const sampleSize = opts.sampleSize || connectorSchemaOptsDefaults.sampleSize + return conn + .query( + `SELECT ${sqlColumn} AS value FROM ${sqlTable} WHERE ${sqlColumn} IS NOT NULL FETCH FIRST ${sampleSize} ROWS ONLY`, + [], + "getSampleValues" + ) + .then((rows) => + rows.reduce<{ value: AttributeValue }[]>((acc, row) => { + const [value] = row as any[] + acc.push({ value }) + return acc + }, []) + ) + .catch( + handleError( + `Failed to get sample values for '${attributeRefToId({ ...ref, attribute })}'`, + [], + opts + ) + ) + } + +const getPolyColumns = + (columns: Record, opts: ConnectorSchemaOpts) => + async ( + conn: Conn + ): Promise>> => { + opts.logger.log("Inferring polymorphic relations ...") + return mapEntriesAsync(columns, (entityId, tableCols) => { + const ref = entityRefFromId(entityId) + const colNames = tableCols.map((c) => c.column_name) + const polyCols = tableCols.filter((c) => + isPolymorphic(c.column_name, colNames) + ) + return mapValuesAsync( + Object.fromEntries(polyCols.map((c) => [c.column_name, c.column_name])), + (c) => + getDistinctValues( + ref, + [c], + opts + )(conn).then((values) => + values.filter((v): v is string => typeof v === "string") + ) + ) + }) + } + +export const getDistinctValues = + (ref: EntityRef, attribute: AttributePath, opts: ConnectorSchemaOpts) => + async (conn: Conn): Promise => { + const sqlTable = buildSqlTable(ref) + const sqlColumn = buildSqlColumn(attribute) + const sampleSize = opts.sampleSize || connectorSchemaOptsDefaults.sampleSize + return conn + .query( + `SELECT DISTINCT ${sqlColumn} AS value FROM ${sqlTable} WHERE ${sqlColumn} IS NOT NULL ORDER BY value FETCH FIRST ${sampleSize} ROWS ONLY`, + [], + "getDistinctValues" + ) + .then((rows) => + rows.reduce<{ value: AttributeValue }[]>((acc, row) => { + const [value] = row as any[] + acc.push({ value }) + return acc + }, []) + ) + .catch((err) => + err instanceof Error && + err.message.match(/materialized view "[^"]+" has not been populated/) + ? [] + : Promise.reject(err) + ) + .catch( + handleError( + `Failed to get distinct values for '${attributeRefToId({ ...ref, attribute })}'`, + [], + opts + ) + ) + } diff --git a/libs/connector-oracle/src/query.ts b/libs/connector-oracle/src/query.ts new file mode 100644 index 000000000..12dd66efa --- /dev/null +++ b/libs/connector-oracle/src/query.ts @@ -0,0 +1,46 @@ +import { AttributeRef, QueryResults } from "@azimutt/models" +import { Conn, QueryResultArrayMode, QueryResultField } from "./connect" + +export const execQuery = + (query: string, parameters: any[]) => + (conn: Conn): Promise => { + return conn + .queryArrayMode(query, parameters) + .then((result) => buildResults(conn, query, result)) + } + +async function buildResults( + conn: Conn, + query: string, + result: QueryResultArrayMode +): Promise { + const attributes = buildAttributes(result.fields) + const rows = result.rows.map((row) => + attributes.reduce((acc, col, i) => ({ ...acc, [col.name]: row[i] }), {}) + ) + return { query, attributes, rows } +} + +function buildAttributes( + fields: QueryResultField[] +): { name: string; ref?: AttributeRef }[] { + const keys: { [key: string]: true } = {} + return fields.map((f) => { + const name = uniqueName(f.name, keys) + keys[name] = true + return { name } + }) +} + +function uniqueName( + name: string, + currentNames: { [key: string]: true }, + cpt: number = 1 +): string { + const newName = cpt === 1 ? name : `${name}_${cpt}` + if (currentNames[newName]) { + return uniqueName(name, currentNames, cpt + 1) + } else { + return newName + } +} diff --git a/libs/connector-oracle/src/stats.test.ts b/libs/connector-oracle/src/stats.test.ts new file mode 100644 index 000000000..8fb404eec --- /dev/null +++ b/libs/connector-oracle/src/stats.test.ts @@ -0,0 +1,37 @@ +import { describe, expect, test } from "@jest/globals" +import { DatabaseUrlParsed, parseDatabaseUrl } from "@azimutt/models" +import { connect } from "./connect" +import { getColumnStats, getTableStats } from "./stats" +import { application, logger } from "./constants.test" + +describe("stats", () => { + // local url, install db or replace it to test + const url: DatabaseUrlParsed = parseDatabaseUrl( + "jdbc:oracle:thin:sys/oracle@//localhost:1521/FREE" + ) + + test.skip("getTableStats", async () => { + const stats = await connect( + application, + url, + getTableStats({ schema: "C##AZIMUTT", entity: "USERS" }), + { logger, logQueries: true } + ) + console.log("getTableStats", stats) + expect(stats.rows).toEqual(2) + }) + test.skip("getColumnStats", async () => { + const stats = await connect( + application, + url, + getColumnStats({ + schema: "C##AZIMUTT", + entity: "USERS", + attribute: ["NAME"], + }), + { logger, logQueries: true } + ) + console.log("getColumnStats", stats) + expect(stats.rows).toEqual(2) + }) +}) diff --git a/libs/connector-oracle/src/stats.ts b/libs/connector-oracle/src/stats.ts new file mode 100644 index 000000000..7e56be8d6 --- /dev/null +++ b/libs/connector-oracle/src/stats.ts @@ -0,0 +1,129 @@ +import { shuffle } from "@azimutt/utils" +import { + AttributeRef, + AttributeType, + AttributeValue, + ConnectorAttributeStats, + ConnectorAttributeStatsValue, + ConnectorEntityStats, + EntityRef, + SqlFragment, +} from "@azimutt/models" +import { buildSqlColumn, buildSqlTable } from "./helpers" +import { Conn } from "./connect" + +export const getTableStats = + (ref: EntityRef) => + async (conn: Conn): Promise => { + const sqlTable = buildSqlTable(ref) + const rows = await countRows(conn, sqlTable) + const sampleValues = await getSampleValues(conn, sqlTable) + return { ...ref, rows, sampleValues } + } + +export const getColumnStats = + (ref: AttributeRef) => + async (conn: Conn): Promise => { + const sqlTable = buildSqlTable(ref) + const sqlColumn = buildSqlColumn(ref.attribute) + const type = await getColumnType(conn, ref) + const basics = await getColumnBasics(conn, sqlTable, sqlColumn) + const commonValues = await getCommonValues(conn, sqlTable, sqlColumn) + return { ...ref, type, ...basics, commonValues } + } + +async function countRows(conn: Conn, sqlTable: SqlFragment): Promise { + const sql = `SELECT count(*) FROM ${sqlTable}` + const rows = await conn.query(sql) + return Number(rows[0][0]) +} + +async function getSampleValues( + conn: Conn, + sqlTable: SqlFragment +): Promise<{ [attribute: string]: AttributeValue }> { + // take several raws to minimize empty columns and randomize samples from several raws + const sql = `SELECT * FROM ${sqlTable} FETCH FIRST 10 ROWS ONLY` + const result = await conn.queryArrayMode(sql) + const samples = await Promise.all( + result.fields.map(async (field, fieldIndex) => { + const values = shuffle( + result.rows.map((row) => row[fieldIndex]).filter((v) => !!v) + ) + const value = await (values.length > 0 + ? Promise.resolve(values[0]) + : getSampleValue(conn, sqlTable, buildSqlColumn([field.name]))) + return [field.name, value] as [string, AttributeValue] + }) + ) + return Object.fromEntries(samples) +} + +async function getSampleValue( + conn: Conn, + sqlTable: SqlFragment, + sqlColumn: SqlFragment +): Promise { + // select several raws to and then shuffle results to avoid showing samples from the same raw + const sql = `SELECT ${sqlColumn} AS value FROM ${sqlTable} WHERE ${sqlColumn} IS NOT NULL FETCH FIRST 10 ROWS ONLY` + const rows = await conn.query(sql) + return rows.length > 0 ? shuffle(rows)[0][0] : null +} + +async function getColumnType( + conn: Conn, + ref: AttributeRef +): Promise { + const rows = await conn.query( + ` + SELECT + CASE + WHEN data_type IN ('VARCHAR2', 'CHAR') THEN data_type || '(' || data_length || ')' + WHEN data_type IN ('NUMBER') THEN data_type || '(' || data_precision || ', ' || data_scale || ')' + ELSE data_type + END AS formatted_type + FROM + all_tab_columns + WHERE + table_name = :table_name AND column_name = :column_name ${ref.schema ? ` AND owner = :owner` : ""}`, + [ref.entity, ref.attribute[0], ref.schema].filter(Boolean) + ) + return rows.length > 0 ? (rows[0][0] as string) : "unknown" +} + +type ColumnBasics = { rows: number; nulls: number; cardinality: number } + +async function getColumnBasics( + conn: Conn, + sqlTable: SqlFragment, + sqlColumn: SqlFragment +): Promise { + const queries = [ + `SELECT count(*) FROM ${sqlTable}`, + `SELECT count(*) FROM ${sqlTable} WHERE ${sqlColumn} IS NULL`, + `SELECT count(distinct ${sqlColumn}) FROM ${sqlTable}`, + ] + const [rows, nulls, cardinality] = await Promise.all( + queries.map((query) => conn.query(query).then((res) => Number(res[0][0]))) + ) + + return { + rows, + nulls, + cardinality, + } +} + +async function getCommonValues( + conn: Conn, + sqlTable: SqlFragment, + sqlColumn: SqlFragment +): Promise { + const sql = `SELECT ${sqlColumn} AS value, count(*) FROM ${sqlTable} GROUP BY ${sqlColumn} ORDER BY count(*) DESC FETCH FIRST 10 ROWS ONLY` + const res = await conn.query(sql) + return res.reduce((acc, row) => { + const [value, count] = row as any[] + acc.push({ count, value }) + return acc + }, []) +} diff --git a/libs/connector-oracle/tsconfig.json b/libs/connector-oracle/tsconfig.json new file mode 100644 index 000000000..84c48ed51 --- /dev/null +++ b/libs/connector-oracle/tsconfig.json @@ -0,0 +1,105 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "ES2021", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + "lib": ["ES2021"], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "./src", /* Specify the root folder within your source files. */ + "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + "resolveJsonModule": true, /* Enable importing .json files. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./out", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": ["node_modules", "out", "**/*.test.ts"], + "include": ["src"] +} diff --git a/libs/models/src/databaseUrl.ts b/libs/models/src/databaseUrl.ts index 7457853fa..232798400 100644 --- a/libs/models/src/databaseUrl.ts +++ b/libs/models/src/databaseUrl.ts @@ -26,6 +26,7 @@ const postgresRe = /^(?:jdbc:)?postgres(?:ql)?:\/\/(?:([^:]+):([^@]*)@)?([^:/?&] const sqlserver = /^(?:jdbc:)?sqlserver(?:ql)?:\/\/(?:([^:]+):([^@]*)@)?([^:/?&]+)(?::(\d+))?(?:\/([^?]+))?(?:\?(.+))?$/ const snowflakeRegexxx = /^(?:jdbc:)?snowflake:\/\/(?:([^:]+):([^@]*)@)?([^:/?&]+)(?::(\d+))?(?:\/([^?]+))?(?:\?(.+))?$/ const snowflakeRegexxxxxxxxxxxxxx = /^https:\/\/(?:([^:]+):([^@]*)@)?(.+?(?:\.privatelink)?\.snowflakecomputing\.com)(?::(\d+))?(?:\/([^?]+))?$/ +const oracleRegexxxxx = /^(?:jdbc:)?oracle:thin:(?:([^\/]+)\/([^@]+)@\/\/)?([^:\/]+)(?::(\d+))?(?:\/([^?]+))?$/ export function parseDatabaseUrl(rawUrl: DatabaseUrl): DatabaseUrlParsed { const url = rawUrl.trim() @@ -94,6 +95,14 @@ export function parseDatabaseUrl(rawUrl: DatabaseUrl): DatabaseUrlParsed { return removeUndefined({full: url, kind, user, pass, host, port: port ? parseInt(port) : undefined, db, options}) } + const oracleMatches = url.match(oracleRegexxxxx) + if (oracleMatches) { + const kind: DatabaseKind = 'oracle' + const [, user, pass, host, port, db, optionsStr] = oracleMatches + const options = optionsStr ? parseDatabaseOptions(optionsStr) : undefined + return removeUndefined({full: url, kind, user, pass, host, port: port ? parseInt(port) : undefined, db, options}) + } + return {full: url} } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e342bf56f..141ac0f30 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -516,6 +516,40 @@ importers: specifier: ^5.4.5 version: 5.4.5 + libs/connector-oracle: + dependencies: + '@azimutt/models': + specifier: workspace:^ + version: link:../models + '@azimutt/utils': + specifier: workspace:^ + version: link:../utils + oracledb: + specifier: ^6.5.1 + version: 6.5.1 + devDependencies: + '@jest/globals': + specifier: ^29.7.0 + version: 29.7.0 + '@types/jest': + specifier: ^29.5.12 + version: 29.5.12 + '@types/node': + specifier: ^20.14.5 + version: 20.14.5 + '@types/oracledb': + specifier: ^6.5.1 + version: 6.5.1 + jest: + specifier: ^29.7.0 + version: 29.7.0(@types/node@20.14.5)(ts-node@10.9.2(@types/node@20.14.5)(typescript@5.4.5)) + ts-jest: + specifier: ^29.1.3 + version: 29.1.4(@babel/core@7.24.6)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.6))(esbuild@0.21.4)(jest@29.7.0(@types/node@20.14.5)(ts-node@10.9.2(@types/node@20.14.5)(typescript@5.4.5)))(typescript@5.4.5) + typescript: + specifier: ^5.4.5 + version: 5.4.5 + libs/connector-postgres: dependencies: '@azimutt/models': @@ -2725,6 +2759,9 @@ packages: '@types/node@20.14.5': resolution: {integrity: sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA==} + '@types/oracledb@6.5.1': + resolution: {integrity: sha512-Ll0bKGXmCZVngBL3juSaytA8Jeocx0VghDHTt+FEC2bs8fdl9pzoaBXYWXjBUxCCT8Y/69m5AzuTgBd79j24WA==} + '@types/pg@8.11.6': resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} @@ -6521,6 +6558,10 @@ packages: resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} engines: {node: '>=10'} + oracledb@6.5.1: + resolution: {integrity: sha512-JzoSGei1wnvmqgKnAZK1W650mzHTZXx+7hClV4mwsbY/ZjUtrpnojNJMYJ2jkOhj7XG5oJPfXc4GqDKaNzkxqg==} + engines: {node: '>=14.6'} + os-tmpdir@1.0.2: resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} engines: {node: '>=0.10.0'} @@ -8586,7 +8627,7 @@ snapshots: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-bucket-endpoint': 3.577.0 @@ -8647,7 +8688,7 @@ snapshots: dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -8732,7 +8773,7 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/client-sts@3.583.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 @@ -8775,7 +8816,6 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.6.2 transitivePeerDependencies: - - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/core@3.582.0': @@ -8809,7 +8849,7 @@ snapshots: '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-process': 3.577.0 '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) @@ -8866,7 +8906,7 @@ snapshots: '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/types': 3.0.0 @@ -11314,6 +11354,10 @@ snapshots: dependencies: undici-types: 5.26.5 + '@types/oracledb@6.5.1': + dependencies: + '@types/node': 20.14.5 + '@types/pg@8.11.6': dependencies: '@types/node': 20.14.5 @@ -16087,6 +16131,8 @@ snapshots: strip-ansi: 6.0.1 wcwidth: 1.0.1 + oracledb@6.5.1: {} + os-tmpdir@1.0.2: {} p-cancelable@2.1.1: {} From e5cd9b042b5297aef760bd45f866874b010566af Mon Sep 17 00:00:00 2001 From: Anthony Ly Date: Fri, 28 Jun 2024 11:11:09 +0200 Subject: [PATCH 2/5] chore: gateway oracle connector --- gateway/package.json | 1 + gateway/src/services/connector.ts | 3 ++- pnpm-lock.yaml | 42 +++++++++++++++++-------------- 3 files changed, 26 insertions(+), 20 deletions(-) diff --git a/gateway/package.json b/gateway/package.json index 80313a701..714f403c2 100644 --- a/gateway/package.json +++ b/gateway/package.json @@ -45,6 +45,7 @@ "@azimutt/connector-mariadb": "^0.1.1", "@azimutt/connector-mongodb": "^0.1.1", "@azimutt/connector-mysql": "^0.1.2", + "@azimutt/connector-oracle": "workspace:^", "@azimutt/connector-postgres": "^0.1.6", "@azimutt/connector-snowflake": "^0.1.1", "@azimutt/connector-sqlserver": "^0.1.1", diff --git a/gateway/src/services/connector.ts b/gateway/src/services/connector.ts index 0fbdeb588..ed4923d41 100644 --- a/gateway/src/services/connector.ts +++ b/gateway/src/services/connector.ts @@ -7,6 +7,7 @@ import {mysql} from "@azimutt/connector-mysql" import {postgres} from "@azimutt/connector-postgres" import {snowflake} from "@azimutt/connector-snowflake" import {sqlserver} from "@azimutt/connector-sqlserver" +import {oracle} from "@azimutt/connector-oracle" const connectors: Record = { bigquery: bigquery, @@ -17,7 +18,7 @@ const connectors: Record = { mariadb: mariadb, mongodb: mongodb, mysql: mysql, - oracle: undefined, + oracle: oracle, postgres: postgres, redis: undefined, snowflake: snowflake, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 141ac0f30..2ba8fd58c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -287,6 +287,9 @@ importers: '@azimutt/connector-mysql': specifier: ^0.1.2 version: 0.1.2 + '@azimutt/connector-oracle': + specifier: workspace:^ + version: link:../libs/connector-oracle '@azimutt/connector-postgres': specifier: ^0.1.6 version: 0.1.6(pg-native@3.0.1) @@ -8627,9 +8630,9 @@ snapshots: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) - '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/middleware-bucket-endpoint': 3.577.0 '@aws-sdk/middleware-expect-continue': 3.577.0 '@aws-sdk/middleware-flexible-checksums': 3.577.0 @@ -8688,9 +8691,9 @@ snapshots: dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -8773,13 +8776,13 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.583.0': + '@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -8816,6 +8819,7 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/core@3.582.0': @@ -8847,13 +8851,13 @@ snapshots: '@smithy/util-stream': 3.0.1 tslib: 2.6.2 - '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': + '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0))': dependencies: - '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-process': 3.577.0 - '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) - '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/types': 3.577.0 '@smithy/credential-provider-imds': 3.0.0 '@smithy/property-provider': 3.0.0 @@ -8864,14 +8868,14 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': + '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0))': dependencies: '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-http': 3.582.0 - '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/credential-provider-process': 3.577.0 - '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) - '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)) '@aws-sdk/types': 3.577.0 '@smithy/credential-provider-imds': 3.0.0 '@smithy/property-provider': 3.0.0 @@ -8891,10 +8895,10 @@ snapshots: '@smithy/types': 3.0.0 tslib: 2.6.2 - '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': + '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: '@aws-sdk/client-sso': 3.583.0 - '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) + '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0) '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/shared-ini-file-loader': 3.0.0 @@ -8904,9 +8908,9 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0)': + '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0))': dependencies: - '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/types': 3.0.0 @@ -9025,7 +9029,7 @@ snapshots: '@smithy/types': 3.0.0 tslib: 2.6.2 - '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.577.0 From eeea7a16eed5a306c70804c9e1172b1589fbcafd Mon Sep 17 00:00:00 2001 From: Anthony Ly Date: Fri, 28 Jun 2024 11:21:06 +0200 Subject: [PATCH 3/5] fix: analyze --- libs/connector-oracle/src/oracle.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/libs/connector-oracle/src/oracle.ts b/libs/connector-oracle/src/oracle.ts index 17c7ed58a..7882203ee 100644 --- a/libs/connector-oracle/src/oracle.ts +++ b/libs/connector-oracle/src/oracle.ts @@ -214,14 +214,15 @@ function buildEntity( jsonColumns: Record, polyColumns: Record ): Entity { - return removeEmpty({ + return { schema: table.table_schema, name: table.table_name, - attrs: columns - .slice(0) - .sort((a, b) => a.column_index - b.column_index) - .map((c) => buildAttribute(c, jsonColumns[c.column_name])), + attrs: + columns + ?.slice(0) + ?.sort((a, b) => a.column_index - b.column_index) + ?.map((c) => buildAttribute(c, jsonColumns[c.column_name])) ?? [], pk: constraints .filter((c) => c.constraint_type === "P") @@ -231,7 +232,7 @@ function buildEntity( .filter((c) => c.constraint_type === "C") .map((c) => buildCheck(c, columnsByIndex)), extra: undefined, - }) + } } export type RawColumn = { @@ -546,6 +547,7 @@ export const getTypes = t.type_name FROM all_types t + WHERE t.owner IS NOT NULL ORDER BY type_schema, type_name`, [], "getTypes" From b1bdf9b24adf6c13cf1c5759f0ff813f99eb2424 Mon Sep 17 00:00:00 2001 From: Anthony Ly Date: Fri, 28 Jun 2024 11:32:08 +0200 Subject: [PATCH 4/5] fix: user --- libs/connector-oracle/src/connect.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/connector-oracle/src/connect.ts b/libs/connector-oracle/src/connect.ts index f5a388447..418fc0d74 100644 --- a/libs/connector-oracle/src/connect.ts +++ b/libs/connector-oracle/src/connect.ts @@ -118,7 +118,7 @@ function buildConfig( ): ConnectionAttributes { return { connectionString: `${url.host}:${url.port}/${url.db}`, - user: "sys", + user: url.user, password: url.pass || undefined, privilege: SYSDBA, } From 7b501e311da80c70ac49eebe624eaf7ac23a720d Mon Sep 17 00:00:00 2001 From: Anthony Ly Date: Fri, 28 Jun 2024 13:55:15 +0200 Subject: [PATCH 5/5] doc: readme --- libs/connector-oracle/README.md | 49 +++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/libs/connector-oracle/README.md b/libs/connector-oracle/README.md index 7d0978d21..e79790b88 100644 --- a/libs/connector-oracle/README.md +++ b/libs/connector-oracle/README.md @@ -24,3 +24,52 @@ If you need to develop on multiple libs at the same time (ex: want to update a c - Depend on a local lib: `pnpm add `, ex: `pnpm add @azimutt/models` - "Publish" lib locally by building it: `pnpm run build` + +## Oracle Setup + +### Run in Docker + +You can use the free version of Oracle Database + +```bash +docker pull container-registry.oracle.com/database/free:latest +``` + +To launch a container, the needed configuration is the `ORACLE_PWD` of the `SYS` user. You can also map the default 1521 port to your computer. + +```bash +docker run -d --name oracle -p 1521:1521 -e ORACLE_PWD=oracle container-registry.oracle.com/database/free:latest +``` + +To connect, you can use a jdbc driver with the URL `jdbc:oracle:thin:/@//localhost:1521/FREE` + +### Setup a user + +Create a user + +```sql +CREATE USER "C##AZIMUTT" IDENTIFIED BY "azimutt"; +``` + +Grand permissions + +```sql +GRANT CONNECT, RESOURCE, DBA TO "C##AZIMUTT"; +``` + +Update user quota on `Users` tablespace + +```sql +ALTER USER "C##AZIMUTT" QUOTA UNLIMITED ON USERS; +``` + +### Create a table + +```sql +CREATE TABLE "C##AZIMUTT"."USERS"( + user_id NUMBER GENERATED BY DEFAULT AS IDENTITY, + first_name VARCHAR2(50) NOT NULL, + last_name VARCHAR2(50) NOT NULL, + PRIMARY KEY(user_id) +); +```