diff --git a/deployment/bin/hassu.ts b/deployment/bin/hassu.ts index 0cc549085..531c6c5a5 100644 --- a/deployment/bin/hassu.ts +++ b/deployment/bin/hassu.ts @@ -34,6 +34,7 @@ async function main() { const hassuFrontendStack = new HassuFrontendStack(app, { internalBucket: hassuDatabaseStack.internalBucket, projektiTable: hassuDatabaseStack.projektiTable, + aineistoImportQueue: hassuBackendStack.aineistoImportQueue, }); await hassuFrontendStack.process().catch((e) => { console.log("Deployment of HassuFrontendStack failed:", e); diff --git a/deployment/lib/hassu-backend.ts b/deployment/lib/hassu-backend.ts index adc5a98d5..553e63199 100644 --- a/deployment/lib/hassu-backend.ts +++ b/deployment/lib/hassu-backend.ts @@ -46,6 +46,7 @@ export const backendStackName = "hassu-backend-" + Config.env; export class HassuBackendStack extends Stack { private readonly props: HassuBackendStackProps; private baseLayer: lambda.LayerVersion; + public aineistoImportQueue: Queue; constructor(scope: App, props: HassuBackendStackProps) { const terminationProtection = Config.getEnvConfig().terminationProtection; @@ -77,6 +78,7 @@ export class HassuBackendStack extends Stack { const personSearchUpdaterLambda = await this.createPersonSearchUpdaterLambda(commonEnvironmentVariables); const aineistoSQS = await this.createAineistoImporterQueue(); + this.aineistoImportQueue = aineistoSQS; const emailSQS = await this.createEmailQueueSystem(); const pdfGeneratorLambda = await this.createPdfGeneratorLambda(config); const yllapitoBackendLambda = await this.createBackendLambda( diff --git a/deployment/lib/hassu-frontend.ts b/deployment/lib/hassu-frontend.ts index 7aeacc4b1..ff6fb64ca 100644 --- a/deployment/lib/hassu-frontend.ts +++ b/deployment/lib/hassu-frontend.ts @@ -29,6 +29,7 @@ import { readAccountStackOutputs, readBackendStackOutputs, readDatabaseStackOutp import { IOriginAccessIdentity } from "aws-cdk-lib/aws-cloudfront/lib/origin-access-identity"; import { getOpenSearchDomain } from "./common"; import { Table } from "aws-cdk-lib/aws-dynamodb"; +import { Queue } from "aws-cdk-lib/aws-sqs"; // These should correspond to CfnOutputs produced by this stack export type FrontendStackOutputs = { @@ -40,6 +41,7 @@ export type FrontendStackOutputs = { interface HassuFrontendStackProps { internalBucket: Bucket; projektiTable: Table; + aineistoImportQueue: Queue; } const REGION = "us-east-1"; @@ -78,7 +80,6 @@ export class HassuFrontendStack extends Stack { this.cloudFrontOriginAccessIdentityReportBucket = (await readPipelineStackOutputs()).CloudfrontOriginAccessIdentityReportBucket || ""; // Empty default string for localstack deployment const accountStackOutputs = await readAccountStackOutputs(); - const { AineistoImportSqsUrl } = await readBackendStackOutputs(); await new Builder(".", "./build", { enableHTTPCompression: true, @@ -92,7 +93,7 @@ export class HassuFrontendStack extends Stack { TABLE_PROJEKTI: Config.projektiTableName, SEARCH_DOMAIN: accountStackOutputs.SearchDomainEndpointOutput, INTERNAL_BUCKET_NAME: Config.internalBucketName, - AINEISTO_IMPORT_SQS_URL: AineistoImportSqsUrl, + AINEISTO_IMPORT_SQS_URL: this.props.aineistoImportQueue.queueUrl, }, }).build(); @@ -200,9 +201,11 @@ export class HassuFrontendStack extends Stack { const searchDomain = await getOpenSearchDomain(this, accountStackOutputs); if (nextJSLambdaEdge.nextApiLambda) { searchDomain.grantIndexReadWrite("projekti-" + Config.env + "-*", nextJSLambdaEdge.nextApiLambda); - if (env !== "prod") { - const projektiTable = this.props.projektiTable; - projektiTable.grantReadWriteData(nextJSLambdaEdge.nextApiLambda); + const environmentsBlacklistedFromTimeShift = ["prod", "training"]; + const isEnvironmentBlacklistedFromTimeShift = environmentsBlacklistedFromTimeShift.includes(env); + if (!isEnvironmentBlacklistedFromTimeShift) { + this.props.projektiTable.grantReadWriteData(nextJSLambdaEdge.nextApiLambda); + this.props.aineistoImportQueue.grantSendMessages(nextJSLambdaEdge.nextApiLambda); } }