diff --git a/digdag-core/src/main/java/io/digdag/core/database/DatabaseProjectStoreManager.java b/digdag-core/src/main/java/io/digdag/core/database/DatabaseProjectStoreManager.java index 748f42da25..5e69763892 100644 --- a/digdag-core/src/main/java/io/digdag/core/database/DatabaseProjectStoreManager.java +++ b/digdag-core/src/main/java/io/digdag/core/database/DatabaseProjectStoreManager.java @@ -41,8 +41,6 @@ import org.skife.jdbi.v2.sqlobject.stringtemplate.UseStringTemplate3StatementLocator; import org.skife.jdbi.v2.tweak.ResultSetMapper; -import javax.activation.DataSource; - import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -55,8 +53,8 @@ import java.util.Map; import java.util.stream.Collectors; -import static java.util.Locale.ENGLISH; import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Locale.ENGLISH; public class DatabaseProjectStoreManager extends BasicDatabaseStoreManager @@ -127,23 +125,18 @@ public DatabaseProjectStore(int siteId) this.siteId = siteId; } - //public List getAllProjects() - //{ - // return dao.getProjects(siteId, Integer.MAX_VALUE, 0); - //} - @DigdagTimed(value = "dpst_", category = "db", appendMethodName = true) @Override - public List getProjectsWithLatestRevision(int pageSize, Optional lastId, AccessController.ListFilter acFilter) + public List getProjectsWithLatestRevision(int pageSize, Optional lastId, Optional namePattern, AccessController.ListFilter acFilter) { - return autoCommit((handle, dao) -> dao.getProjectsWithLatestRevision(siteId, pageSize, lastId.or(0), acFilter.getSql())); + return autoCommit((handle, dao) -> dao.getProjectsWithLatestRevision(siteId, pageSize, lastId.or(0), generatePartialMatchPattern(namePattern), acFilter.getSql())); } @DigdagTimed(value = "dpst_", category = "db", appendMethodName = true) @Override - public List getProjects(int pageSize, Optional lastId, AccessController.ListFilter acFilter) + public List getProjects(int pageSize, Optional lastId, Optional namePattern, AccessController.ListFilter acFilter) { - return autoCommit((handle, dao) -> dao.getProjects(siteId, pageSize, lastId.or(0), acFilter.getSql())); + return autoCommit((handle, dao) -> dao.getProjects(siteId, pageSize, lastId.or(0), generatePartialMatchPattern(namePattern), acFilter.getSql())); } @DigdagTimed(value = "dpst_", category = "db", appendMethodName = true) @@ -301,10 +294,17 @@ public StoredWorkflowDefinitionWithProject getLatestWorkflowDefinitionByName(int public List getLatestActiveWorkflowDefinitions( int pageSize, Optional lastId, + Optional namePattern, AccessController.ListFilter acFilter) throws ResourceNotFoundException { - return autoCommit((handle, dao) -> dao.getLatestActiveWorkflowDefinitions(siteId, pageSize, lastId.or(0L), acFilter.getSql())); + return autoCommit((handle, dao) -> dao.getLatestActiveWorkflowDefinitions( + siteId, + pageSize, + lastId.or(0L), + generatePartialMatchPattern(namePattern), + acFilter.getSql()) + ); } @DigdagTimed(value = "dpst_", category = "db", appendMethodName = true) @@ -364,6 +364,19 @@ public TimeZoneMap getWorkflowTimeZonesByIdList(List defIdList) Map map = IdTimeZone.listToMap(list); return new TimeZoneMap(map); } + + private String generatePartialMatchPattern(Optional pattern) + { + // If provided pattern is absent or empty string, just set '%' + // so that the pattern does not affect to a where clause. + return !pattern.or("").isEmpty() ? "%" + escapeLikePattern(pattern.get()) + "%" : "%"; + } + + private String escapeLikePattern(String pattern) + { + return pattern.replace("%", "\\%") + .replace("_", "\\_"); + } } private static class IdTimeZone @@ -567,11 +580,17 @@ public interface H2Dao ") a on a.id = rev.id" + " where proj.site_id = :siteId" + " and proj.name is not null" + + " and proj.name like :namePattern" + " and " + " and proj.id > :lastId" + " order by proj.id asc" + " limit :limit") - List getProjectsWithLatestRevision(@Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") int lastId, @Define("acFilter") String acFilter); + List getProjectsWithLatestRevision( + @Bind("siteId") int siteId, + @Bind("limit") int limit, + @Bind("lastId") int lastId, + @Bind("namePattern") String namePattern, + @Define("acFilter") String acFilter); // h2's MERGE doesn't return generated id when conflicting row already exists @SqlUpdate("merge into projects" + @@ -599,6 +618,12 @@ public interface H2Dao " join projects proj on a.project_id = proj.id" + " join workflow_configs wc on wc.id = wd.config_id" + " where wd.id \\> :lastId" + + // `workflow_definitions` table has a composite index + // for `revision_id` and `name` (`workflow_definitions_on_revision_id_and_name`). + // And the index is used for filter by `revision_id` and `name`. + // Since this query always limits the records by `revision_id` (the latest revision's one), + // partial matching of `name` (e.g. '%test%') can be accepted. + " and wd.name like :namePattern" + " and " + " order by wd.id" + " limit :limit") @@ -606,6 +631,7 @@ List getLatestActiveWorkflowDefinitions( @Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") long lastId, + @Bind("namePattern") String namePattern, @Define("acFilter") String acFilter); } @@ -621,13 +647,19 @@ public interface PgDao " join revisions rev on proj.id = rev.project_id" + " where proj.site_id = :siteId" + " and proj.name is not null" + + " and proj.name like :namePattern" + " and " + " and proj.id > :lastId" + ") as projects_with_revision" + " where projects_with_revision.revision_id = projects_with_revision.max_revision_id" + " order by id asc" + " limit :limit") - List getProjectsWithLatestRevision(@Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") int lastId, @Define("acFilter") String acFilter); + List getProjectsWithLatestRevision( + @Bind("siteId") int siteId, + @Bind("limit") int limit, + @Bind("lastId") int lastId, + @Bind("namePattern") String namePattern, + @Define("acFilter") String acFilter); @SqlQuery("insert into projects" + " (site_id, name, created_at)" + @@ -657,6 +689,12 @@ public interface PgDao " group by r.project_id" + " )) " + " and wf.id \\> :lastId" + + // `workflow_definitions` table has a composite index + // for `revision_id` and `name` (`workflow_definitions_on_revision_id_and_name`). + // And the index is used for filter by `revision_id` and `name`. + // Since this query always limits the records by `revision_id` (the latest revision's one), + // partial matching of `name` (e.g. '%test%') can be accepted. + " and wf.name like :namePattern" + " and " + " order by wf.id" + " limit :limit" + @@ -669,6 +707,7 @@ List getLatestActiveWorkflowDefinitions( @Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") long lastId, + @Bind("namePattern") String namePattern, @Define("acFilter") String acFilter); } @@ -678,6 +717,7 @@ public interface Dao " where proj.site_id = :siteId" + " and proj.name is not null" + " and proj.id \\> :lastId" + + " and proj.name like :namePattern" + " and " + " order by proj.id asc" + " limit :limit") @@ -685,9 +725,15 @@ List getProjects( @Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") int lastId, + @Bind("namePattern") String namePattern, @Define("acFilter") String acFilter); - List getProjectsWithLatestRevision(@Bind("siteId") int siteId, @Bind("limit") int limit, @Bind("lastId") int lastId, @Define("acFilter") String acFilter); + List getProjectsWithLatestRevision( + @Bind("siteId") int siteId, + @Bind("limit") int limit, + @Bind("lastId") int lastId, + @Bind("namePattern") String namePattern, + @Define("acFilter") String acFilter); @SqlUpdate("update projects" + " set deleted_name = name, deleted_at = now(), name = NULL" + @@ -774,7 +820,7 @@ List getProjects( " limit 1") StoredWorkflowDefinitionWithProject getLatestWorkflowDefinitionByName(@Bind("siteId") int siteId, @Bind("projId") int projId, @Bind("name") String name); - List getLatestActiveWorkflowDefinitions(int siteId, int limit, long lastId, String acFilter); + List getLatestActiveWorkflowDefinitions(int siteId, int limit, long lastId, String namePattern, String acFilter); // getWorkflowDetailsById is same with getWorkflowDetailsByIdInternal // excepting site_id check diff --git a/digdag-core/src/main/java/io/digdag/core/repository/ProjectStore.java b/digdag-core/src/main/java/io/digdag/core/repository/ProjectStore.java index a6093b57ff..ff32345c8c 100644 --- a/digdag-core/src/main/java/io/digdag/core/repository/ProjectStore.java +++ b/digdag-core/src/main/java/io/digdag/core/repository/ProjectStore.java @@ -1,17 +1,15 @@ package io.digdag.core.repository; -import java.util.List; -import java.util.Map; -import java.time.ZoneId; - import com.google.common.base.Optional; import io.digdag.spi.ac.AccessController; +import java.util.List; + public interface ProjectStore { - List getProjects(int pageSize, Optional lastId, AccessController.ListFilter acFilter); + List getProjects(int pageSize, Optional lastId, Optional namePattern, AccessController.ListFilter acFilter); - List getProjectsWithLatestRevision(int pageSize, Optional lastId, AccessController.ListFilter acFilter); + List getProjectsWithLatestRevision(int pageSize, Optional lastId, Optional namePattern, AccessController.ListFilter acFilter); ProjectMap getProjectsByIdList(List projIdList); @@ -58,16 +56,16 @@ byte[] getRevisionArchiveData(int revId) List getWorkflowDefinitions(int revId, int pageSize, Optional lastId, AccessController.ListFilter acFilter); StoredWorkflowDefinition getWorkflowDefinitionByName(int revId, String name) - throws ResourceNotFoundException; + throws ResourceNotFoundException; StoredWorkflowDefinitionWithProject getWorkflowDefinitionById(long wfId) - throws ResourceNotFoundException; + throws ResourceNotFoundException; StoredWorkflowDefinitionWithProject getLatestWorkflowDefinitionByName(int projId, String name) - throws ResourceNotFoundException; + throws ResourceNotFoundException; - List getLatestActiveWorkflowDefinitions(int pageSize, Optional lastId, AccessController.ListFilter acFilter) - throws ResourceNotFoundException; + List getLatestActiveWorkflowDefinitions(int pageSize, Optional lastId, Optional namePattern, AccessController.ListFilter acFilter) + throws ResourceNotFoundException; TimeZoneMap getWorkflowTimeZonesByIdList(List defIdList); } diff --git a/digdag-core/src/test/java/io/digdag/core/database/DatabaseProjectStoreManagerTest.java b/digdag-core/src/test/java/io/digdag/core/database/DatabaseProjectStoreManagerTest.java index c462398d01..e6c7e712c3 100644 --- a/digdag-core/src/test/java/io/digdag/core/database/DatabaseProjectStoreManagerTest.java +++ b/digdag-core/src/test/java/io/digdag/core/database/DatabaseProjectStoreManagerTest.java @@ -1,17 +1,48 @@ package io.digdag.core.database; -import java.util.*; +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import io.digdag.core.repository.ImmutableProject; +import io.digdag.core.repository.ImmutableRevision; +import io.digdag.core.repository.ImmutableWorkflowDefinition; +import io.digdag.core.repository.Project; +import io.digdag.core.repository.ProjectControl; +import io.digdag.core.repository.ProjectMap; +import io.digdag.core.repository.ProjectStore; +import io.digdag.core.repository.ProjectStoreManager; +import io.digdag.core.repository.ResourceConflictException; +import io.digdag.core.repository.Revision; +import io.digdag.core.repository.StoredProject; +import io.digdag.core.repository.StoredProjectWithRevision; +import io.digdag.core.repository.StoredRevision; +import io.digdag.core.repository.StoredWorkflowDefinition; +import io.digdag.core.repository.StoredWorkflowDefinitionWithProject; +import io.digdag.core.repository.TimeZoneMap; +import io.digdag.core.repository.WorkflowDefinition; +import io.digdag.core.schedule.SchedulerManager; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + import java.time.Instant; +import java.util.List; import java.util.concurrent.atomic.AtomicReference; -import org.skife.jdbi.v2.IDBI; -import org.junit.*; -import com.google.common.base.Optional; -import com.google.common.collect.*; -import io.digdag.core.repository.*; -import io.digdag.core.schedule.*; +import java.util.stream.Collectors; + +import static io.digdag.core.database.DatabaseTestingUtils.assertConflict; +import static io.digdag.core.database.DatabaseTestingUtils.assertEmpty; +import static io.digdag.core.database.DatabaseTestingUtils.assertNotConflict; +import static io.digdag.core.database.DatabaseTestingUtils.assertNotFound; +import static io.digdag.core.database.DatabaseTestingUtils.createRevision; +import static io.digdag.core.database.DatabaseTestingUtils.createWorkflow; +import static io.digdag.core.database.DatabaseTestingUtils.setupDatabase; import static java.nio.charset.StandardCharsets.UTF_8; -import static io.digdag.core.database.DatabaseTestingUtils.*; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; public class DatabaseProjectStoreManagerTest { @@ -197,15 +228,15 @@ public void testGetAndNotFounds() //// // public simple listings // - assertEquals(ImmutableList.of(proj1, proj2), store.getProjects(100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(proj1), store.getProjects(1, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(proj2), store.getProjects(100, Optional.of(proj1.getId()), () -> "true")); - assertEmpty(anotherSite.getProjects(100, Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj1, proj2), store.getProjects(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj1), store.getProjects(1, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj2), store.getProjects(100, Optional.of(proj1.getId()), Optional.absent(), () -> "true")); + assertEmpty(anotherSite.getProjects(100, Optional.absent(), Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(proj1Rev1, proj2Rev3), store.getProjectsWithLatestRevision(100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(proj1Rev1), store.getProjectsWithLatestRevision(1, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(proj2Rev3), store.getProjectsWithLatestRevision(100, Optional.of(proj1.getId()), () -> "true")); - assertEmpty(anotherSite.getProjectsWithLatestRevision(100, Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj1Rev1, proj2Rev3), store.getProjectsWithLatestRevision(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj1Rev1), store.getProjectsWithLatestRevision(1, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(proj2Rev3), store.getProjectsWithLatestRevision(100, Optional.of(proj1.getId()), Optional.absent(), () -> "true")); + assertEmpty(anotherSite.getProjectsWithLatestRevision(100, Optional.absent(), Optional.absent(), () -> "true")); assertEquals(ImmutableList.of(rev3, rev2), store.getRevisions(proj2.getId(), 100, Optional.absent())); // revision is returned in reverse order assertEquals(ImmutableList.of(rev3), store.getRevisions(proj2.getId(), 1, Optional.absent())); @@ -217,10 +248,10 @@ public void testGetAndNotFounds() assertEquals(ImmutableList.of(wf4), store.getWorkflowDefinitions(rev3.getId(), 100, Optional.of(wf3.getId()), () -> "true")); assertEmpty(anotherSite.getWorkflowDefinitions(rev3.getId(), 100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(wfDetails1, wfDetails3, wfDetails4), store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(wfDetails1), store.getLatestActiveWorkflowDefinitions(1, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(wfDetails4), store.getLatestActiveWorkflowDefinitions(100, Optional.of(wfDetails3.getId()), () -> "true")); - assertEmpty(anotherSite.getLatestActiveWorkflowDefinitions(100, Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(wfDetails1, wfDetails3, wfDetails4), store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(wfDetails1), store.getLatestActiveWorkflowDefinitions(1, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(wfDetails4), store.getLatestActiveWorkflowDefinitions(100, Optional.of(wfDetails3.getId()), Optional.absent(), () -> "true")); + assertEmpty(anotherSite.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.absent(), () -> "true")); //// // public simple getters @@ -282,6 +313,71 @@ public void testGetAndNotFounds() }); } + @Test + public void testGetActiveWorkflows() + throws Exception + { + factory.begin(() -> { + Project srcProj1 = Project.of("proj1"); + Revision srcRev1 = createRevision("rev1"); + WorkflowDefinition srcWf1 = createWorkflow("wf1"); + WorkflowDefinition srcWf2 = createWorkflow("test_wf2"); + WorkflowDefinition srcWf3 = createWorkflow("test_wf3"); + WorkflowDefinition srcWf4 = createWorkflow("wf%4"); + WorkflowDefinition srcWf5 = createWorkflow("wf_5"); + WorkflowDefinition srcWf6 = createWorkflow("wf%_6"); + final AtomicReference revRef = new AtomicReference<>(); + final AtomicReference wfRef1 = new AtomicReference<>(); + final AtomicReference wfRef2 = new AtomicReference<>(); + final AtomicReference wfRef3 = new AtomicReference<>(); + final AtomicReference wfRef4 = new AtomicReference<>(); + final AtomicReference wfRef5 = new AtomicReference<>(); + final AtomicReference wfRef6 = new AtomicReference<>(); + StoredProject proj1 = store.putAndLockProject( + srcProj1, + (store, stored) -> { + ProjectControl lock = new ProjectControl(store, stored); + assertNotConflict(() -> { + revRef.set(lock.insertRevision(srcRev1)); + wfRef1.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf1), sm, Instant.now()).get(0)); + wfRef2.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf2), sm, Instant.now()).get(0)); + wfRef3.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf3), sm, Instant.now()).get(0)); + wfRef4.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf4), sm, Instant.now()).get(0)); + wfRef5.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf5), sm, Instant.now()).get(0)); + wfRef6.set(lock.insertWorkflowDefinitions(revRef.get(), ImmutableList.of(srcWf6), sm, Instant.now()).get(0)); + }); + return lock.get(); + }); + StoredWorkflowDefinition wf1 = wfRef1.get(); + StoredWorkflowDefinition wf2 = wfRef2.get(); + StoredWorkflowDefinition wf3 = wfRef3.get(); + StoredWorkflowDefinition wf4 = wfRef4.get(); + StoredWorkflowDefinition wf5 = wfRef5.get(); + StoredWorkflowDefinition wf6 = wfRef6.get(); + StoredWorkflowDefinitionWithProject wfDetails1 = StoredWorkflowDefinitionWithProject.of(wf1, proj1, srcRev1); + StoredWorkflowDefinitionWithProject wfDetails2 = StoredWorkflowDefinitionWithProject.of(wf2, proj1, srcRev1); + StoredWorkflowDefinitionWithProject wfDetails3 = StoredWorkflowDefinitionWithProject.of(wf3, proj1, srcRev1); + StoredWorkflowDefinitionWithProject wfDetails4 = StoredWorkflowDefinitionWithProject.of(wf4, proj1, srcRev1); + StoredWorkflowDefinitionWithProject wfDetails5 = StoredWorkflowDefinitionWithProject.of(wf5, proj1, srcRev1); + StoredWorkflowDefinitionWithProject wfDetails6 = StoredWorkflowDefinitionWithProject.of(wf6, proj1, srcRev1); + + assertEquals(ImmutableList.of(wfDetails1, wfDetails2, wfDetails3, wfDetails4, wfDetails5, wfDetails6), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(wfDetails1, wfDetails2, wfDetails3, wfDetails4, wfDetails5, wfDetails6), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable(""), () -> "true")); + assertEquals(ImmutableList.of(wfDetails2, wfDetails3), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable("test"), () -> "true")); + assertEquals(ImmutableList.of(wfDetails4, wfDetails6), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable("%"), () -> "true")); + assertEquals(ImmutableList.of(wfDetails2, wfDetails3, wfDetails5, wfDetails6), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable("_"), () -> "true")); + assertEquals(ImmutableList.of(wfDetails6), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable("%_"), () -> "true")); + assertEquals(ImmutableList.of(), + store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.fromNullable("*"), () -> "true")); + }); + } + @Test public void testRevisionArchiveData() throws Exception @@ -335,9 +431,9 @@ public void testDeleteProject() assertNotFound(() -> store.getProjectByName(deletingProject.getName())); // listing doesn't include deleted projects - assertEquals(ImmutableList.of(), store.getProjects(100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(), store.getProjectsWithLatestRevision(100, Optional.absent(), () -> "true")); - assertEquals(ImmutableList.of(), store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(), store.getProjects(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(), store.getProjectsWithLatestRevision(100, Optional.absent(), Optional.absent(), () -> "true")); + assertEquals(ImmutableList.of(), store.getLatestActiveWorkflowDefinitions(100, Optional.absent(), Optional.absent(), () -> "true")); // lookup by project/revision id succeeds and deletedAt is set StoredProject deletedProj = store.getProjectById(deletingProject.getId()); @@ -362,4 +458,58 @@ public void testDeleteProject() assertNotEquals(sameName.getId(), deletingProject.getId()); }); } + + @Test + public void testGetProjects() + throws Exception + { + factory.begin(() -> { + StoredProject proj1 = createTestProject("proj1abc", "proj1_rev1", "proj1_wf1"); + StoredProject proj2 = createTestProject("proj2def", "proj2_rev1", "proj2_wf1"); + StoredProject proj3 = createTestProject("proj3ghi", "proj3_rev1", "proj3_wf1"); + StoredProject proj4 = createTestProject("other4jkl", "other4_rev1", "other4_wf1"); + List projects; + + projects = store.getProjects(100, Optional.absent(), Optional.absent(), () -> "true"); + assertEquals(ImmutableList.of("proj1abc", "proj2def", "proj3ghi", "other4jkl"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + + // Check pageSize + projects = store.getProjects(2, Optional.absent(), Optional.absent(), () -> "true"); + assertEquals(ImmutableList.of("proj1abc", "proj2def"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + + // Check lastId + projects = store.getProjects(2, Optional.of(proj2.getId()), Optional.absent(), () -> "true"); + assertEquals(ImmutableList.of("proj3ghi", "other4jkl"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + + // Check namePattern + projects = store.getProjects(100, Optional.absent(), Optional.of("proj"), () -> "true"); + assertEquals(ImmutableList.of("proj1abc", "proj2def", "proj3ghi"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + + // Check namePattern + projects = store.getProjects(100, Optional.absent(), Optional.of("ghi"), () -> "true"); + assertEquals(ImmutableList.of("proj3ghi"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + + // Check combination + projects = store.getProjects(100, Optional.of(proj1.getId()), Optional.of("proj"), () -> "true"); + assertEquals(ImmutableList.of("proj2def", "proj3ghi"), projects.stream().map( (x) -> x.getName()).collect(Collectors.toList())); + }); + } + + private StoredProject createTestProject(String projectName, String revision, String workflowName) + throws ResourceConflictException + { + Project srcProj1 = Project.of(projectName); + Revision srcRev1 = createRevision(revision); + WorkflowDefinition srcWf1 = createWorkflow(workflowName); + return store.putAndLockProject( + srcProj1, + (store, stored) -> { + ProjectControl lock = new ProjectControl(store, stored); + assertNotConflict(() -> { + StoredRevision rev1 = lock.insertRevision(srcRev1); + lock.insertWorkflowDefinitions(rev1, ImmutableList.of(srcWf1), sm, Instant.now()); + }); + return lock.get(); + }); + } } diff --git a/digdag-server/src/main/java/io/digdag/server/rs/ProjectResource.java b/digdag-server/src/main/java/io/digdag/server/rs/ProjectResource.java index a760534c92..d716f243bb 100644 --- a/digdag-server/src/main/java/io/digdag/server/rs/ProjectResource.java +++ b/digdag-server/src/main/java/io/digdag/server/rs/ProjectResource.java @@ -243,7 +243,13 @@ public RestProject getProject(@QueryParam("name") String name) @ApiOperation("List projects with filters") public RestProjectCollection getProjects( @ApiParam(value="exact matching filter on project name", required=false) - @QueryParam("name") String name) + @QueryParam("name") String name, + @ApiParam(value="list projects whose id is grater than this id for pagination", required=false) + @QueryParam("last_id") Integer lastId, + @ApiParam(value="number of projects to return", required=false) + @QueryParam("count") Integer count, + @ApiParam(value="name pattern to be partially matched", required=false) + @QueryParam("name_pattern") String namePattern) { return tm.begin(() -> { ProjectStore ps = rm.getProjectStore(getSiteId()); @@ -274,7 +280,10 @@ public RestProjectCollection getProjects( siteTarget, getAuthenticatedUser()); - collection = ps.getProjectsWithLatestRevision(100, Optional.absent(), + collection = ps.getProjectsWithLatestRevision( + Optional.fromNullable(count).or(100), + Optional.fromNullable(lastId), + Optional.fromNullable(namePattern), ac.getListProjectsFilterOfSite(siteTarget, getAuthenticatedUser())) .stream() .map(projWithRev -> { diff --git a/digdag-server/src/main/java/io/digdag/server/rs/WorkflowResource.java b/digdag-server/src/main/java/io/digdag/server/rs/WorkflowResource.java index 70aa73129b..ffe504c6fd 100644 --- a/digdag-server/src/main/java/io/digdag/server/rs/WorkflowResource.java +++ b/digdag-server/src/main/java/io/digdag/server/rs/WorkflowResource.java @@ -113,7 +113,10 @@ public RestWorkflowDefinitionCollection getWorkflowDefinitions( @ApiParam(value="list workflows whose id is grater than this id for pagination", required=false) @QueryParam("last_id") Long lastId, @ApiParam(value="number of workflows to return", required=false) - @QueryParam("count") Integer count) + @QueryParam("count") Integer count, + @ApiParam(value="name pattern to be partially matched", required=false) + @QueryParam("name_pattern") String namePattern + ) throws ResourceNotFoundException, AccessControlException { final SiteTarget siteTarget = SiteTarget.of(getSiteId()); @@ -123,6 +126,7 @@ public RestWorkflowDefinitionCollection getWorkflowDefinitions( List defs = rm.getProjectStore(getSiteId()) .getLatestActiveWorkflowDefinitions(Optional.fromNullable(count).or(100), Optional.fromNullable(lastId), // check NotFound first + Optional.fromNullable(namePattern), ac.getListWorkflowsFilterOfSite( SiteTarget.of(getSiteId()), getAuthenticatedUser()));