Skip to content

Commit

Permalink
Merge branch 'main' into show-results-user-access
Browse files Browse the repository at this point in the history
  • Loading branch information
Abhitator216 authored Jun 25, 2024
2 parents 1fbaedf + ea74f3e commit e02abe3
Show file tree
Hide file tree
Showing 45 changed files with 1,163 additions and 107 deletions.
23 changes: 23 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,29 @@ All notable changes to HyperSwitch will be documented here.

- - -

## 2024.06.25.0

### Features

- **ci:** Add vector to handle logs pipeline ([#5021](https://github.com/juspay/hyperswitch/pull/5021)) ([`fed7b69`](https://github.com/juspay/hyperswitch/commit/fed7b697995b37bf3ef198121de571c6e338863c))
- **router:** Add support for googlepay step up flow ([#2744](https://github.com/juspay/hyperswitch/pull/2744)) ([`ff84d78`](https://github.com/juspay/hyperswitch/commit/ff84d78c6512f70d761148274e97286f5cf021dd))
- **users:** Decision manager flow changes for SSO ([#4995](https://github.com/juspay/hyperswitch/pull/4995)) ([`8ceaaa9`](https://github.com/juspay/hyperswitch/commit/8ceaaa9e3d95558a7252a9a986b39c8377426857))
- Added kafka events for authentication create and update ([#4991](https://github.com/juspay/hyperswitch/pull/4991)) ([`10e9121`](https://github.com/juspay/hyperswitch/commit/10e9121341fe25d195f4c9a25dcc383c2ffd0c95))

### Bug Fixes

- **access_token:** Use `merchant_connector_id` in access token ([#5106](https://github.com/juspay/hyperswitch/pull/5106)) ([`b7bf457`](https://github.com/juspay/hyperswitch/commit/b7bf457d0cdcc4d4947b5750a7982aca85d3a7e9))

### Refactors

- **core:** Introduce an interface to switch between old and new connector integration implementations on the connectors ([#5013](https://github.com/juspay/hyperswitch/pull/5013)) ([`e658899`](https://github.com/juspay/hyperswitch/commit/e658899c1406225bb905ce4fb76e13fa3609666e))
- **events:** Populate object identifiers in outgoing webhooks analytics events during retries ([#5067](https://github.com/juspay/hyperswitch/pull/5067)) ([`b878405`](https://github.com/juspay/hyperswitch/commit/b87840595d4bc325d37779512dc5504a8a613e5d))
- [Fiserv] Remove Default Case Handling ([#4767](https://github.com/juspay/hyperswitch/pull/4767)) ([`9caabef`](https://github.com/juspay/hyperswitch/commit/9caabeff86dfb93b29d6f734e6724f4d69bdda4e))

**Full Changelog:** [`2024.06.24.0...2024.06.25.0`](https://github.com/juspay/hyperswitch/compare/2024.06.24.0...2024.06.25.0)

- - -

## 2024.06.24.0

### Features
Expand Down
30 changes: 30 additions & 0 deletions api-reference/openapi_spec.json
Original file line number Diff line number Diff line change
Expand Up @@ -9684,6 +9684,23 @@
"GoPayRedirection": {
"type": "object"
},
"GooglePayAssuranceDetails": {
"type": "object",
"required": [
"card_holder_authenticated",
"account_verified"
],
"properties": {
"card_holder_authenticated": {
"type": "boolean",
"description": "indicates that Cardholder possession validation has been performed"
},
"account_verified": {
"type": "boolean",
"description": "indicates that identification and verifications (ID&V) was performed"
}
}
},
"GooglePayPaymentMethodInfo": {
"type": "object",
"required": [
Expand All @@ -9698,6 +9715,14 @@
"card_details": {
"type": "string",
"description": "The details of the card"
},
"assurance_details": {
"allOf": [
{
"$ref": "#/components/schemas/GooglePayAssuranceDetails"
}
],
"nullable": true
}
}
},
Expand Down Expand Up @@ -9845,6 +9870,11 @@
}
],
"nullable": true
},
"assurance_details_required": {
"type": "boolean",
"description": "Whether assurance details are required",
"nullable": true
}
}
},
Expand Down
23 changes: 12 additions & 11 deletions config/config.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -587,17 +587,18 @@ enabled = true # Switch to enable or disable PayPal onboard
source = "logs" # The event sink to push events supports kafka or logs (stdout)

[events.kafka]
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
dispute_analytics_topic = "topic" # Kafka topic to be used for Dispute events
audit_events_topic = "topic" # Kafka topic to be used for Payment Audit events
payout_analytics_topic = "topic" # Kafka topic to be used for Payouts and PayoutAttempt events
consolidated_events_topic = "topic" # Kafka topic to be used for Consolidated events
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
dispute_analytics_topic = "topic" # Kafka topic to be used for Dispute events
audit_events_topic = "topic" # Kafka topic to be used for Payment Audit events
payout_analytics_topic = "topic" # Kafka topic to be used for Payouts and PayoutAttempt events
consolidated_events_topic = "topic" # Kafka topic to be used for Consolidated events
authentication_analytics_topic = "topic" # Kafka topic to be used for Authentication events

# File storage configuration
[file_storage]
Expand Down
2 changes: 1 addition & 1 deletion config/dashboard.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ surcharge=false
dispute_evidence_upload=false
paypal_automatic_flow=false
threeds_authenticator=false
global_search=false
global_search=true
dispute_analytics=true
configure_pmts=false
branding=false
Expand Down
23 changes: 12 additions & 11 deletions config/deployments/env_specific.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,18 @@ sts_role_session_name = "" # An identifier for the assumed role session, used to
source = "logs" # The event sink to push events supports kafka or logs (stdout)

[events.kafka]
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
dispute_analytics_topic = "topic" # Kafka topic to be used for Dispute events
audit_events_topic = "topic" # Kafka topic to be used for Payment Audit events
payout_analytics_topic = "topic" # Kafka topic to be used for Payouts and PayoutAttempt events
consolidated_events_topic = "topic" # Kafka topic to be used for Consolidated events
brokers = [] # Kafka broker urls for bootstrapping the client
intent_analytics_topic = "topic" # Kafka topic to be used for PaymentIntent events
attempt_analytics_topic = "topic" # Kafka topic to be used for PaymentAttempt events
refund_analytics_topic = "topic" # Kafka topic to be used for Refund events
api_logs_topic = "topic" # Kafka topic to be used for incoming api events
connector_logs_topic = "topic" # Kafka topic to be used for connector api events
outgoing_webhook_logs_topic = "topic" # Kafka topic to be used for outgoing webhook events
dispute_analytics_topic = "topic" # Kafka topic to be used for Dispute events
audit_events_topic = "topic" # Kafka topic to be used for Payment Audit events
payout_analytics_topic = "topic" # Kafka topic to be used for Payouts and PayoutAttempt events
consolidated_events_topic = "topic" # Kafka topic to be used for Consolidated events
authentication_analytics_topic = "topic" # Kafka topic to be used for Authentication events

# File storage configuration
[file_storage]
Expand Down
1 change: 1 addition & 0 deletions config/development.toml
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,7 @@ dispute_analytics_topic = "hyperswitch-dispute-events"
audit_events_topic = "hyperswitch-audit-events"
payout_analytics_topic = "hyperswitch-payout-events"
consolidated_events_topic = "hyperswitch-consolidated-events"
authentication_analytics_topic = "hyperswitch-authentication-events"

[analytics]
source = "sqlx"
Expand Down
1 change: 1 addition & 0 deletions config/docker_compose.toml
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,7 @@ dispute_analytics_topic = "hyperswitch-dispute-events"
audit_events_topic = "hyperswitch-audit-events"
payout_analytics_topic = "hyperswitch-payout-events"
consolidated_events_topic = "hyperswitch-consolidated-events"
authentication_analytics_topic = "hyperswitch-authentication-events"

[analytics]
source = "sqlx"
Expand Down
8 changes: 8 additions & 0 deletions config/prometheus.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,11 @@ scrape_configs:

static_configs:
- targets: ["otel-collector:8888"]

- job_name: "vector"

# metrics_path defaults to '/metrics'
# scheme defaults to 'http'.

static_configs:
- targets: ["vector:9598"]
134 changes: 134 additions & 0 deletions config/vector.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
acknowledgements:
enabled: true

api:
enabled: true
address: 0.0.0.0:8686

sources:
kafka_tx_events:
type: kafka
bootstrap_servers: kafka0:29092
group_id: sessionizer
topics:
- hyperswitch-payment-attempt-events
- hyperswitch-payment-intent-events
- hyperswitch-refund-events
- hyperswitch-dispute-events
decoding:
codec: json

app_logs:
type: docker_logs
include_labels:
- "logs=promtail"

vector_metrics:
type: internal_metrics

node_metrics:
type: host_metrics

transforms:
plus_1_events:
type: filter
inputs:
- kafka_tx_events
condition: ".sign_flag == 1"

hs_server_logs:
type: filter
inputs:
- app_logs
condition: '.labels."com.docker.compose.service" == "hyperswitch-server"'

parsed_hs_server_logs:
type: remap
inputs:
- app_logs
source: |-
.message = parse_json!(.message)
events:
type: remap
inputs:
- plus_1_events
source: |-
.timestamp = from_unix_timestamp!(.created_at, unit: "seconds")
sinks:
opensearch_events:
type: elasticsearch
inputs:
- events
endpoints:
- "https://opensearch:9200"
id_key: message_key
api_version: v7
tls:
verify_certificate: false
verify_hostname: false
auth:
strategy: basic
user: admin
password: 0penS3arc#
encoding:
except_fields:
- message_key
- offset
- partition
- topic
bulk:
# Add a date prefixed index for better grouping
# index: "vector-{{ .topic }}-%Y-%m-%d"
index: "{{ .topic }}"

opensearch_logs:
type: elasticsearch
inputs:
- parsed_hs_server_logs
endpoints:
- "https://opensearch:9200"
api_version: v7
tls:
verify_certificate: false
verify_hostname: false
auth:
strategy: basic
user: admin
password: 0penS3arc#
bulk:
# Add a date prefixed index for better grouping
# index: "vector-{{ .topic }}-%Y-%m-%d"
index: "logs-{{ .container_name }}-%Y-%m-%d"

log_events:
type: loki
inputs:
- kafka_tx_events
endpoint: http://loki:3100
labels:
source: vector
topic: "{{ .topic }}"
job: kafka
encoding:
codec: json

log_app_loki:
type: loki
inputs:
- parsed_hs_server_logs
endpoint: http://loki:3100
labels:
source: vector
job: app_logs
container: "{{ .container_name }}"
stream: "{{ .stream }}"
encoding:
codec: json

metrics:
type: prometheus_exporter
inputs:
- vector_metrics
- node_metrics
Loading

0 comments on commit e02abe3

Please sign in to comment.