Skip to content

Commit

Permalink
Merge branch 'master' into feature/add-cors-origin
Browse files Browse the repository at this point in the history
  • Loading branch information
halbekanne committed Jul 16, 2023
2 parents a7a1adb + ac10493 commit 0a91c86
Show file tree
Hide file tree
Showing 8 changed files with 184 additions and 30 deletions.
70 changes: 43 additions & 27 deletions Readme.md
Original file line number Diff line number Diff line change
@@ -1,24 +1,29 @@
# Dancer
This is the backend for dancier.
This is the backend (for frontend) for dancier.

## Working locally

### Setting up the database

Just start the database with the provided docker-compose.yml.
It will expose the port locally and will also setup a GUI via pg-admin.
You set up the database and close it to change admin rights. Then you open it again.
````sh
docker-compose up -d
docker-compose down
cd volumes/
ls -l
docker logs dancer_pg-admin_1
sudo chown 5050:5050 -Rv pg-admin-data/
cd ..
docker-compose up -d
We are working locally with a docker-compose setup, that launches every needed service.

Before you run it the first time, you have to set this up....

### Setting everything up

You have to have maven, docker and docker-compose being installed.

Then in the project root folder create the following directories with the proper rights.

````bash
# for the pg-admin volume
mkdir -p volumes/pg-admin-data
sudo chown 5050:5050 volumes/pg-admin-data

# for the kafka volume

mkdir -p volumes/kafka
sudo chown 1001:1001 volumes/kafka
````

You can now access the database GUI with your browser:

[PG-Admin](http://localhost:5050)
Expand All @@ -33,24 +38,35 @@ Here you can configure the connection to the postgres instance:
|--------|--------|----|----|
|dancer-db|dancer|dancer|dancer|

### Run the dancer locally
````shell
./mvnw spring-boot:run
````
This will bootstrap the database. You can start using it.
See the api-documentation to see what you can do:

[OpenApi](https://editor.swagger.io/?url=https%3A%2F%2Fraw.luolix.top%2Fdancier%2Fdancer%2Fmaster%2Fopenapi.yml)
### Building and running the dancer

### Building
#### Without test and update the running docker environment
(assuming the docker-compose setup is up and running)
````bash
./mvnw clean install -DskipTests; docker-compose up --build -d dancer
````
#### Building with tests
````bash
./mvnw clean install
````
#### running the dancer not inside docker-compose
(assuming the docker-compose setup is up and running)

``./mvnw clean install`

This will also run the test _and_ integration tests.
````shell
# stopping dancer in docker-compose
docker-compose stop dancer;
# running the boot app with overwriting the needed host
./mvnw spring-boot:run -Dspring-boot.run.arguments="--spring.datasource.url=jdbc:postgresql://localhost:5432/dancer --spring.kafka.bootstrap-servers=localhost:9092"
````

You can then inspect the test-coverage:
#### checking test-coverage
Show test coverage in target/site/jacoco/index.html:
`.target/site/jacoco/index.html

[Show test coverage in target/site/jacoco/index.html](.target/site/jacoco/index.html)
### Accessing the API-Definition
[OpenApi](https://editor.swagger.io/?url=https%3A%2F%2Fraw.luolix.top%2Fdancier%2Fdancer%2Fmaster%2Fopenapi.yml)

### Local Mailing
When working locally the mailing-system of the backend is configured to _not_ send the mails, but to dump them only to the log.
Expand Down
40 changes: 39 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ services:
- ./volumes/dancer-data:/var/lib/postgresql/data

pg-admin:
image: dpage/pgadmin4:6.15
image: dpage/pgadmin4:7
restart: always
ports:
- "5050:80"
Expand All @@ -43,3 +43,41 @@ services:
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml

kafka:
container_name: kafka
image: bitnami/kafka:3.3.1
environment:
KAFKA_ENABLE_KRAFT: 'yes'
KAFKA_KRAFT_CLUSTER_ID: r7dMBY60T16TrNCGeXniLw
KAFKA_CFG_PROCESS_ROLES: broker,controller
KAFKA_CFG_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_CFG_LISTENERS: BROKER://:9092 ,CONTROLLER://:9093, LOCALHOST://:9081
KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: BROKER:PLAINTEXT, CONTROLLER:PLAINTEXT, LOCALHOST:PLAINTEXT
KAFKA_CFG_ADVERTISED_LISTENERS: BROKER://kafka:9092, LOCALHOST://localhost:9081
KAFKA_BROKER_ID: 1
KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: 1@kafka:9093
ALLOW_PLAINTEXT_LISTENER: 'yes'
KAFKA_CFG_INTER_BROKER_LISTENER_NAME: BROKER
KAFKA_CFG_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: "false"
volumes:
- ./volumes/kafka:/bitnami/kafka
ports:
- 127.0.0.1:9092:9081
- 127.0.0.1:9081:9081
kafka-ui:
container_name: kafka-ui
image: tchiotludo/akhq:latest
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/ui"]
interval: 10s
timeout: 2s
ports:
- "8088:8080"
environment:
AKHQ_CONFIGURATION: |
akhq:
connections:
docker-kafka-server:
properties:
bootstrap.servers: "kafka:9092"
4 changes: 4 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ private void handleRegistrationAttemptOfAlreadyExistingAccount(User user) {
}

private String passwordResetLink(String passwordResetCode) {
return frontendBaseName + "/authentication/change-password/" + passwordResetCode;
return frontendBaseName + "/registration/reset-password/" + passwordResetCode;
}

private String emailValidationLink(String validationCode) {
Expand Down Expand Up @@ -186,7 +186,7 @@ public User checkEmailValidationCode(String code) {
.findByCode(code).orElseThrow(() ->new ApplicationException("Unable to validate"));
if (emailValidationCode.getExpiresAt().isBefore(Instant.now())) {
throw new ApplicationException("Unable to Validate, code already expired");
};
}
User user = userRepository.findById(emailValidationCode
.getUserId())
.orElseThrow(() -> new ApplicationException("No user associated with this code."));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package net.dancier.dancer.core.config;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import java.util.HashMap;
import java.util.Map;

@Configuration
public class KafkaProducerConfiguration {

private static Logger log = LoggerFactory.getLogger(KafkaProducerConfiguration.class);
@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;

@Bean
ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "20971520");
return new DefaultKafkaProducerFactory<>(configProps);
}

@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}

}
35 changes: 35 additions & 0 deletions src/main/java/net/dancier/dancer/core/config/KafkaTopicConfig.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package net.dancier.dancer.core.config;

import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaAdmin;

import java.util.HashMap;
import java.util.Map;

@Configuration
public class KafkaTopicConfig {
private static Logger log = LoggerFactory.getLogger(KafkaTopicConfig.class);

@Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;

@Bean
public KafkaAdmin kafkaAdmin() {
Map<String, Object> configs = new HashMap<>();
configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
return new KafkaAdmin(configs);
}

@Bean
public NewTopic profileUpdated() {
return new NewTopic("profile-updated", 1, (short) 1);
}

}
20 changes: 20 additions & 0 deletions src/main/java/net/dancier/dancer/output/OutboxJob.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package net.dancier.dancer.output;

import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Profile;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

@Profile("!test")
@Component
@RequiredArgsConstructor
public class OutboxJob {
private final Logger log = LoggerFactory.getLogger(OutboxJob.class);

@Scheduled(fixedRate = 10000)
public void process() {
log.debug("Sending out events");
}
}
2 changes: 2 additions & 0 deletions src/main/resources/application.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
spring:
kafka:
bootstrap-servers: kafka:9092
jpa:
show-sql: false
hibernate:
Expand Down

0 comments on commit 0a91c86

Please sign in to comment.