diff --git a/.github/actions/upload-artifacts/action.yml b/.github/actions/upload-artifacts/action.yml index 4322fe7c0db2..e5322151a806 100644 --- a/.github/actions/upload-artifacts/action.yml +++ b/.github/actions/upload-artifacts/action.yml @@ -6,7 +6,7 @@ inputs: required: true target-file: description: The path of the target artifact - required: true + required: false version: description: Version of the artifact required: true @@ -18,6 +18,7 @@ runs: using: composite steps: - name: Create artifacts directory + if: ${{ inputs.target-file != '' }} working-directory: ${{ inputs.working-dir }} shell: bash run: | @@ -51,13 +52,13 @@ runs: # Note: The artifacts will be double zip compressed(related issue: https://github.com/actions/upload-artifact/issues/39). # However, when we use 'actions/download-artifact@v3' to download the artifacts, it will be automatically unzipped. - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifacts-dir }} path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.tar.gz - name: Upload checksum - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifacts-dir }}.sha256sum path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.sha256sum diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 62a537a23130..d5df2c0da31e 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -76,8 +76,8 @@ jobs: - name: Run taplo run: taplo format --check - sqlness: - name: Sqlness Test + build: + name: Build GreptimeDB binaries if: github.event.pull_request.draft == false runs-on: ${{ matrix.os }} strategy: @@ -87,15 +87,47 @@ jobs: steps: - uses: actions/checkout@v3 - uses: arduino/setup-protoc@v1 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.RUST_TOOLCHAIN }} - - name: Rust Cache - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@v2 + - name: Build greptime binaries + shell: bash + run: cargo build + - name: Pack greptime binaries + shell: bash + run: | + mkdir bins && \ + mv ./target/debug/greptime bins && \ + mv ./target/debug/sqlness-runner bins + - name: Print greptime binaries info + run: ls -lh bins + - name: Upload artifacts + uses: ./.github/actions/upload-artifacts + with: + artifacts-dir: bins + version: current + + sqlness: + name: Sqlness Test + if: github.event.pull_request.draft == false + needs: build + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ ubuntu-20.04 ] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - name: Download pre-built binaries + uses: actions/download-artifact@v4 + with: + name: bins + path: . + - name: Unzip binaries + run: tar -xvf ./bins.tar.gz - name: Run sqlness - run: cargo sqlness + run: ./bins/sqlness-runner -c ./tests/cases --bins-dir ./bins - name: Upload sqlness logs if: always() uses: actions/upload-artifact@v3 @@ -107,6 +139,7 @@ jobs: sqlness-kafka-wal: name: Sqlness Test with Kafka Wal if: github.event.pull_request.draft == false + needs: build runs-on: ${{ matrix.os }} strategy: matrix: @@ -114,19 +147,18 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - uses: arduino/setup-protoc@v1 + - name: Download pre-built binaries + uses: actions/download-artifact@v4 with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ env.RUST_TOOLCHAIN }} - - name: Rust Cache - uses: Swatinem/rust-cache@v2 + name: bins + path: . + - name: Unzip binaries + run: tar -xvf ./bins.tar.gz - name: Setup kafka server working-directory: tests-integration/fixtures/kafka run: docker compose -f docker-compose-standalone.yml up -d --wait - name: Run sqlness - run: cargo sqlness -w kafka -k 127.0.0.1:9092 + run: ./bins/sqlness-runner -w kafka -k 127.0.0.1:9092 -c ./tests/cases --bins-dir ./bins - name: Upload sqlness logs if: always() uses: actions/upload-artifact@v3 diff --git a/docker/dev-builder/ubuntu/Dockerfile b/docker/dev-builder/ubuntu/Dockerfile index d78603234e43..1e0a902eea47 100644 --- a/docker/dev-builder/ubuntu/Dockerfile +++ b/docker/dev-builder/ubuntu/Dockerfile @@ -40,7 +40,7 @@ RUN apt-get -y purge python3.8 && \ # wildcard here. However, that requires the git's config files and the submodules all owned by the very same user. # It's troublesome to do this since the dev build runs in Docker, which is under user "root"; while outside the Docker, # it can be a different user that have prepared the submodules. -RUN git config --global --add safe.directory ‘*’ +RUN git config --global --add safe.directory * # Install Python dependencies. COPY $DOCKER_BUILD_ROOT/docker/python/requirements.txt /etc/greptime/requirements.txt diff --git a/tests/runner/src/env.rs b/tests/runner/src/env.rs index 8d1f189b7266..0b9ebe705046 100644 --- a/tests/runner/src/env.rs +++ b/tests/runner/src/env.rs @@ -57,6 +57,11 @@ pub struct Env { data_home: PathBuf, server_addr: Option, wal: WalConfig, + + /// The path to the directory that contains the pre-built GreptimeDB binary. + /// When running in CI, this is expected to be set. + /// If not set, this runner will build the GreptimeDB binary itself when needed, and set this field by then. + bins_dir: Arc>>, } #[async_trait] @@ -78,11 +83,17 @@ impl EnvController for Env { } impl Env { - pub fn new(data_home: PathBuf, server_addr: Option, wal: WalConfig) -> Self { + pub fn new( + data_home: PathBuf, + server_addr: Option, + wal: WalConfig, + bins_dir: Option, + ) -> Self { Self { data_home, server_addr, wal, + bins_dir: Arc::new(Mutex::new(bins_dir)), } } @@ -90,7 +101,7 @@ impl Env { if let Some(server_addr) = self.server_addr.clone() { self.connect_db(&server_addr) } else { - Self::build_db().await; + self.build_db(); self.setup_wal(); let db_ctx = GreptimeDBContext::new(self.wal.clone()); @@ -116,7 +127,7 @@ impl Env { if let Some(server_addr) = self.server_addr.clone() { self.connect_db(&server_addr) } else { - Self::build_db().await; + self.build_db(); self.setup_wal(); let db_ctx = GreptimeDBContext::new(self.wal.clone()); @@ -249,8 +260,12 @@ impl Env { #[cfg(windows)] let program = "greptime.exe"; + let bins_dir = self.bins_dir.lock().unwrap().clone().expect( + "GreptimeDB binary is not available. Please pass in the path to the directory that contains the pre-built GreptimeDB binary. Or you may call `self.build_db()` beforehand.", + ); + let mut process = Command::new(program) - .current_dir(util::get_binary_dir("debug")) + .current_dir(bins_dir) .env("TZ", "UTC") .args(args) .stdout(log_file) @@ -374,7 +389,11 @@ impl Env { } /// Build the DB with `cargo build --bin greptime` - async fn build_db() { + fn build_db(&self) { + if self.bins_dir.lock().unwrap().is_some() { + return; + } + println!("Going to build the DB..."); let output = Command::new("cargo") .current_dir(util::get_workspace_root()) @@ -389,7 +408,12 @@ impl Env { io::stderr().write_all(&output.stderr).unwrap(); panic!(); } - println!("Build finished, starting..."); + + let _ = self + .bins_dir + .lock() + .unwrap() + .insert(util::get_binary_dir("debug")); } } diff --git a/tests/runner/src/main.rs b/tests/runner/src/main.rs index c1cd0110562d..415672a759e2 100644 --- a/tests/runner/src/main.rs +++ b/tests/runner/src/main.rs @@ -62,6 +62,11 @@ struct Args { /// from starting a kafka cluster, and use the given endpoint as kafka backend. #[clap(short, long)] kafka_wal_broker_endpoints: Option, + + /// The path to the directory where GreptimeDB's binaries resides. + /// If not set, sqlness will build GreptimeDB on the fly. + #[clap(long)] + bins_dir: Option, } #[tokio::main] @@ -94,6 +99,9 @@ async fn main() { }, }; - let runner = Runner::new(config, Env::new(data_home, args.server_addr, wal)); + let runner = Runner::new( + config, + Env::new(data_home, args.server_addr, wal, args.bins_dir), + ); runner.run().await.unwrap(); } diff --git a/tests/runner/src/util.rs b/tests/runner/src/util.rs index 71ae585a81ad..5ae63ede37c7 100644 --- a/tests/runner/src/util.rs +++ b/tests/runner/src/util.rs @@ -91,7 +91,7 @@ pub fn get_workspace_root() -> String { runner_crate_path.into_os_string().into_string().unwrap() } -pub fn get_binary_dir(mode: &str) -> String { +pub fn get_binary_dir(mode: &str) -> PathBuf { // first go to the workspace root. let mut workspace_root = PathBuf::from(get_workspace_root()); @@ -99,7 +99,7 @@ pub fn get_binary_dir(mode: &str) -> String { workspace_root.push("target"); workspace_root.push(mode); - workspace_root.into_os_string().into_string().unwrap() + workspace_root } /// Spin-waiting a socket address is available, or timeout.