Skip to content

Commit

Permalink
Add Built-in RBAC support (lakesoul-io#292)
Browse files Browse the repository at this point in the history
* rebase main

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix compile error

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix rbac test bug

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* add domain for table and namespace completely

Signed-off-by: yuanf <yuanf@dmetasoul.com>

B

C

* add domain for PartitionInfo and DataCommitInfo

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* add test for DataCommitInfo

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* hide tests

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix env bug and table insert bug

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* 添加跨domain测试

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* complete test for rbac in spark

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix default database domain bug and and base rbac code for lakesoul-flink

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix casbin version bug

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* add flink test and fix some bugs

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main ok

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* rebase main ok

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* retest

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* add git actions

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix git actions bug

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix git actions bug again

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix git actions bug again2

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix git actions bug again3

Signed-off-by: yuanf <yuanf@dmetasoul.com>

* fix git actions bug again4

Signed-off-by: yuanf <yuanf@dmetasoul.com>

---------

Signed-off-by: yuanf <yuanf@dmetasoul.com>
Co-authored-by: yuanf <yuanf@dmetasoul.com>
Signed-off-by: ChenYunHey <1908166778@qq.com>
  • Loading branch information
2 people authored and ChenYunHey committed Aug 22, 2023
1 parent bc13330 commit d8cff58
Show file tree
Hide file tree
Showing 39 changed files with 1,640 additions and 109 deletions.
144 changes: 142 additions & 2 deletions .github/workflows/maven-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,73 @@ jobs:
path: ./native-io/target/release/
- name: Build with Maven
run: |
mvn -B test -pl lakesoul-spark -am -Pcross-build -Pparallel-test --file pom.xml -Dtest='!UpdateScalaSuite,!AlterTableByNameSuite,!ReadSuite,!UpdateSQLSuite,!ParquetNativeFilterSuite,!DeleteScalaSuite,!DeleteSQLSuite,!ParquetV2FilterSuite,!ParquetScanSuite,!UpsertSuiteBase' -Dsurefire.failIfNoSpecifiedTests=false
mvn -B test -pl lakesoul-spark -am -Pcross-build -Pparallel-test --file pom.xml -Dtest='!UpdateScalaSuite,!AlterTableByNameSuite,!ReadSuite,!UpdateSQLSuite,!ParquetNativeFilterSuite,!DeleteScalaSuite,!DeleteSQLSuite,!ParquetV2FilterSuite,!ParquetScanSuite,!UpsertSuiteBase, !RBACOperatinSuite' -Dsurefire.failIfNoSpecifiedTests=false
- name: Generate Report Site
if: always()
run: |
mvn surefire-report:report-only -pl lakesoul-spark -am
- name: Upload Test Report
if: always()
continue-on-error: true
uses: actions/upload-artifact@v3
with:
name: maven-test-report-artifact-spark-2
path: lakesoul-spark/target/site
retention-days: 5
if-no-files-found: error

spark-test-rbac:
runs-on: ubuntu-latest
needs: [ build-linux-x86_64 ]

services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:14.5
# Provide the password for postgres
env:
POSTGRES_PASSWORD: lakesoul_test
POSTGRES_USER: lakesoul_test
POSTGRES_DB: lakesoul_test
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
--name lakesoul-test-pg
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432

steps:
- uses: actions/checkout@v3
- name: Set up JDK 8
uses: actions/setup-java@v3
with:
java-version: '8'
distribution: 'temurin'
cache: maven
- name: Install psql
run: sudo apt-get install -y postgresql-client-14
- name: Init PG
run: |
./script/meta_init_for_local_test.sh -j 1
- name: Init PG RBAC
run: |
./script/meta_rbac_init_for_local_test.sh -j 1
- name: Install Protoc
uses: arduino/setup-protoc@v2
with:
version: "23.x"
- uses: actions/download-artifact@v3
with:
name: lakesoul-nativeio-x86_64-unknown-linux-gnu-maven-test
path: ./native-io/target/release/
- name: Build with Maven
run: |
mvn -B test -pl lakesoul-spark -am -Pcross-build --file pom.xml -Dtest='RBACOperatinSuite' -Dsurefire.failIfNoSpecifiedTests=false
- name: Generate Report Site
if: always()
run: |
Expand Down Expand Up @@ -228,7 +294,7 @@ jobs:
path: ./native-io/target/release/
- name: Build with Maven
run: |
MAVEN_OPTS="-Xmx5g" mvn -B test -pl lakesoul-flink -am -Pcross-build --file pom.xml -Dsurefire.failIfNoSpecifiedTests=false
MAVEN_OPTS="-Xmx5g" mvn -B test -pl lakesoul-flink -am -Pcross-build --file pom.xml -Dtest='!LakeSoulRBACTest' -Dsurefire.failIfNoSpecifiedTests=false
- name: Generate Report Site
if: always()
run: |
Expand All @@ -242,3 +308,77 @@ jobs:
path: lakesoul-flink/target/site
retention-days: 5
if-no-files-found: error

flink-test-rbac:
runs-on: ubuntu-latest
needs: [ build-linux-x86_64 ]

services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:14.5
# Provide the password for postgres
env:
POSTGRES_PASSWORD: lakesoul_test
POSTGRES_USER: lakesoul_test
POSTGRES_DB: lakesoul_test
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
--name lakesoul-test-pg
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432

steps:
- uses: actions/checkout@v3
- name: Set up JDK 8
uses: actions/setup-java@v3
with:
java-version: '8'
distribution: 'temurin'
cache: maven
- name: Install psql
run: sudo apt-get install -y postgresql-client-14
- name: Init PG
run: |
PGPASSWORD=lakesoul_test psql -h localhost -p 5432 -U lakesoul_test -f script/meta_init.sql lakesoul_test
- name: Init PG RBAC ROW POLICY
run: |
PGPASSWORD=lakesoul_test psql -h localhost -p 5432 -U lakesoul_test -f script/meta_rbac_init.sql lakesoul_test
- name: Init PG RBAC DOMAINS
run: |
PGPASSWORD=lakesoul_test psql -h localhost -p 5432 -U lakesoul_test -f script/meta_rbac_init_domains.sql lakesoul_test
- name: Init PG RBAC USERS
run: |
PGPASSWORD=lakesoul_test psql -h localhost -p 5432 -U lakesoul_test -f script/meta_rbac_init_users.sql lakesoul_test
- name: Install Protoc
uses: arduino/setup-protoc@v2
with:
version: "23.x"
- uses: actions/download-artifact@v3
with:
name: lakesoul-nativeio-x86_64-unknown-linux-gnu-maven-test
path: ./native-io/target/release/
- name: Build with Maven
run: |
MAVEN_OPTS="-Xmx5g" mvn -B test -pl lakesoul-flink -am -Pcross-build --file pom.xml -Dtest='LakeSoulRBACTest' -Dsurefire.failIfNoSpecifiedTests=false
- name: Generate Report Site
if: always()
run: |
mvn surefire-report:report-only -pl lakesoul-flink -am
- name: Upload Test Report
if: always()
continue-on-error: true
uses: actions/upload-artifact@v3
with:
name: maven-test-report-artifact-flink-1
path: lakesoul-flink/target/site
retention-days: 5
if-no-files-found: error
8 changes: 7 additions & 1 deletion lakesoul-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ SPDX-License-Identifier: Apache-2.0
<dependency>
<groupId>org.casbin</groupId>
<artifactId>jdbc-adapter</artifactId>
<version>LATEST</version>
<version>2.4.1</version>
<exclusions>
<exclusion>
<groupId>org.postgresql</groupId>
Expand All @@ -127,6 +127,12 @@ SPDX-License-Identifier: Apache-2.0
</exclusions>
</dependency>
<!-- aspectJ -->
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.9.19</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ public static synchronized Connection getConn() throws SQLException {
public static synchronized void closeAllConnections() {
if (instance != null) {
instance.ds.close();
instance = null;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import com.alibaba.fastjson.JSONObject;
import com.dmetasoul.lakesoul.meta.dao.*;
import com.dmetasoul.lakesoul.meta.entity.*;
import com.dmetasoul.lakesoul.meta.rbac.AuthZContext;
import com.dmetasoul.lakesoul.meta.rbac.AuthZEnforcer;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -106,13 +108,15 @@ public void createNewTable(String tableId, String namespace, String tableName, S
tableInfo.setPartitions(partitions);
tableInfo.setProperties(properties.toJSONString());

String domain = getNameSpaceDomain(namespace);

if (StringUtils.isNotBlank(tableName)) {
tableNameIdDao.insert(TableNameIdDao.newTableNameId(tableName, tableId, namespace));
tableNameIdDao.insert(TableNameIdDao.newTableNameId(tableName, tableId, namespace, domain));
}
if (StringUtils.isNotBlank(tablePath)) {
boolean ex = false;
try {
tablePathIdDao.insert(TablePathIdDao.newTablePathId(tablePath, tableId, namespace));
tablePathIdDao.insert(TablePathIdDao.newTablePathId(tablePath, tableId, namespace, domain));
} catch (Exception e) {
ex = true;
throw e;
Expand All @@ -124,6 +128,7 @@ public void createNewTable(String tableId, String namespace, String tableName, S
}
boolean ex = false;
try {
tableInfo.setDomain(domain);
tableInfoDao.insert(tableInfo.build());
} catch (Exception e) {
ex = true;
Expand Down Expand Up @@ -320,16 +325,6 @@ public void deleteShortTableName(String tableName, String tablePath, String tabl
tableNameIdDao.delete(tableName, tableNamespace);
}

public void addShortTableName(String tableName, String tablePath) {
TableInfo tableInfo = getTableInfoByPath(tablePath);

tableNameIdDao.insert(
TableNameId.newBuilder()
.setTableId(tableInfo.getTableId())
.setTableName(tableName)
.build());
}

public void updateTableProperties(String tableId, String properties) {
TableInfo tableInfo = tableInfoDao.selectByTableId(tableId);
JSONObject originProperties = JSON.parseObject(tableInfo.getProperties());
Expand All @@ -354,11 +349,16 @@ public void updateTableShortName(String tablePath, String tableId, String tableN
}
tableInfoDao.updateByTableId(tableId, tableName, tablePath, "");

tableNameIdDao.insert(TableNameIdDao.newTableNameId(tableName, tableId, tableNamespace));
tableNameIdDao.insert(TableNameIdDao.newTableNameId(tableName, tableId, tableNamespace, tableInfo.getDomain()));
}

public boolean batchCommitDataCommitInfo(List<DataCommitInfo> listData) {
return dataCommitInfoDao.batchInsert(listData);
List<DataCommitInfo> mappedListData = listData.stream().map(item -> {
return item.toBuilder()
.setDomain(getTableDomain(item.getTableId()))
.build();
}).collect(Collectors.toList());
return dataCommitInfoDao.batchInsert(mappedListData);
}

public boolean commitData(MetaInfo metaInfo, boolean changeSchema, CommitOp commitOp) {
Expand Down Expand Up @@ -700,6 +700,11 @@ private PartitionInfo getOrCreateCurPartitionInfo(Map<String, PartitionInfo> cur
.setTableId(tableId)
.setPartitionDesc(partitionDesc)
.setVersion(-1)
.setDomain(getTableDomain(tableId))
.build();
}else{
curPartitionInfo = curPartitionInfo.toBuilder()
.setDomain(getTableDomain(tableId))
.build();
}
return curPartitionInfo;
Expand Down Expand Up @@ -764,6 +769,28 @@ public void rollbackPartitionByVersion(String tableId, String partitionDesc, int
.build());
}

private String getTableDomain(String tableId){
if(!AuthZEnforcer.authZEnabled()){
return "public";
}
TableInfo tableInfo = this.getTableInfoByTableId(tableId);
if(tableInfo == null){
throw new IllegalStateException("target tableinfo does not exists");
}
return getNameSpaceDomain(tableInfo.getTableNamespace());
}

private String getNameSpaceDomain(String namespace){
if(!AuthZEnforcer.authZEnabled()){
return "public";
}
Namespace namespaceInfo = getNamespaceByNamespace(namespace);
if(namespaceInfo == null) {
throw new IllegalStateException("target namespace does not exists");
}
return namespaceInfo.getDomain();
}

public void commitDataCommitInfo(DataCommitInfo dataCommitInfo) {
String tableId = dataCommitInfo.getTableId();
String partitionDesc = dataCommitInfo.getPartitionDesc().replaceAll("/", LAKESOUL_RANGE_PARTITION_SPLITTER);
Expand All @@ -774,6 +801,9 @@ public void commitDataCommitInfo(DataCommitInfo dataCommitInfo) {
LOG.info("DataCommitInfo with tableId={}, commitId={} committed already", tableId, commitId.toString());
return;
} else if (metaCommitInfo == null) {
dataCommitInfo = dataCommitInfo.toBuilder()
.setDomain(getTableDomain(tableId))
.build();
dataCommitInfoDao.insert(dataCommitInfo);
}
MetaInfo.Builder metaInfo = MetaInfo.newBuilder();
Expand All @@ -787,6 +817,7 @@ public void commitDataCommitInfo(DataCommitInfo dataCommitInfo) {
.setTableId(tableId)
.setPartitionDesc(partitionDesc)
.setCommitOp(commitOp)
.setDomain(getTableDomain(tableId))
.addAllSnapshot(snapshot);
partitionInfoList.add(p.build());

Expand All @@ -809,7 +840,11 @@ public void createNewNamespace(String name, String properties, String comment) {
.setProperties(properties)
.setComment(comment == null ? "" : comment);

namespace.setDomain(AuthZEnforcer.authZEnabled()
? AuthZContext.getInstance().getDomain()
: "public");
namespaceDao.insert(namespace.build());

}

public Namespace getNamespaceByNamespace(String namespace) {
Expand All @@ -836,12 +871,14 @@ public void deleteNamespace(String namespace) {
public void cleanMeta() {

namespaceDao.clean();
namespaceDao.insert(NamespaceDao.DEFAULT_NAMESPACE);
if(!AuthZEnforcer.authZEnabled()){
namespaceDao.insert(NamespaceDao.DEFAULT_NAMESPACE);
}
dataCommitInfoDao.clean();
tableInfoDao.clean();
tablePathIdDao.clean();
tableNameIdDao.clean();
partitionInfoDao.clean();
}

}

Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,15 @@ public class DBUtil {

private static final String driverNameKey = "lakesoul.pg.driver";
private static final String urlKey = "lakesoul.pg.url";
private static final String usernameKey = "lakesoul.pg.username";
private static final String passwordKey = "lakesoul.pg.password";
public static final String usernameKey = "lakesoul.pg.username";
public static final String passwordKey = "lakesoul.pg.password";

private static final String driverNameEnv = "LAKESOUL_PG_DRIVER";
private static final String urlEnv = "LAKESOUL_PG_URL";
private static final String usernameEnv = "LAKESOUL_PG_USERNAME";
private static final String passwordEnv = "LAKESOUL_PG_PASSWORD";
private static final String domainENV = "LAKESOUL_CURRENT_DOMAIN";
public static final String domainKey = "lakesoul.current.domain";

private static final String lakeSoulHomeEnv = "LAKESOUL_HOME";

Expand Down Expand Up @@ -98,8 +99,7 @@ public static DataBaseProperty getDBInfo() {
}

public static String getDomain() {
String domain = System.getenv(domainENV);
return domain == null ? "public" : domain;
return getConfigValue(domainENV, domainKey, "public");
}

public static void cleanAllTable() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,10 @@ public boolean isAuthZEnabled() {
return authZEnabled;
}

public void setAuthZEnabled(boolean enabled){
this.authZEnabled = enabled;
}

public String getAuthZCasbinModel() {
return authZCasbinModel;
}
Expand Down
Loading

0 comments on commit d8cff58

Please sign in to comment.