Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support custom host name for SegmentIntegration #739

Merged
merged 2 commits into from
Dec 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 28 additions & 3 deletions analytics/src/main/java/com/segment/analytics/Analytics.java
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,8 @@ public static void setSingletonInstance(Analytics analytics) {
@NonNull final ValueMap defaultProjectSettings,
@NonNull Lifecycle lifecycle,
boolean nanosecondTimestamps,
boolean useNewLifecycleMethods) {
boolean useNewLifecycleMethods,
String defaultApiHost) {
this.application = application;
this.networkExecutor = networkExecutor;
this.stats = stats;
Expand Down Expand Up @@ -280,7 +281,8 @@ public void run() {
// ...defaultProjectSettings.integrations
// Segment.io: {
// ...defaultProjectSettings.integrations.Segment.io
// apiKey: "{writeKey}"
// apiKey: "{writeKey}",
// apiHost: "{defaultApiHost}"
// }
// }
// }
Expand Down Expand Up @@ -309,6 +311,18 @@ public void run() {
edgeFunctionMiddleware.setEdgeFunctionData(
projectSettings.edgeFunctions());
}
boolean apiHostSet =
projectSettings
.getValueMap("integrations")
.getValueMap("Segment.io")
.containsKey("apiHost");
if (!apiHostSet) {
// Use default apiHost region
projectSettings
.getValueMap("integrations")
.getValueMap("Segment.io")
.putValue("apiHost", defaultApiHost);
}
HANDLER.post(
new Runnable() {
@Override
Expand Down Expand Up @@ -1068,6 +1082,7 @@ public static class Builder {
private Crypto crypto;
private ValueMap defaultProjectSettings = new ValueMap();
private boolean useNewLifecycleMethods = true; // opt-out feature
private String defaultApiHost = Utils.DEFAULT_API_HOST;

/** Start building a new {@link Analytics} instance. */
public Builder(Context context, String writeKey) {
Expand Down Expand Up @@ -1377,6 +1392,15 @@ public Builder defaultProjectSettings(ValueMap defaultProjectSettings) {
return this;
}

/**
* Set the apiHost name for the region to which Segment sends events to. Defaults to
* "api.segment.io/v1"
*/
public Builder defaultApiHost(String apiHost) {
this.defaultApiHost = apiHost;
return this;
}

/**
* The executor on which payloads are dispatched asynchronously. This is not exposed
* publicly.
Expand Down Expand Up @@ -1499,7 +1523,8 @@ public Analytics build() {
defaultProjectSettings,
lifecycle,
nanosecondTimestamps,
useNewLifecycleMethods);
useNewLifecycleMethods,
defaultApiHost);
}
}

Expand Down
4 changes: 2 additions & 2 deletions analytics/src/main/java/com/segment/analytics/Client.java
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ public void close() throws IOException {
this.connectionFactory = connectionFactory;
}

Connection upload() throws IOException {
HttpURLConnection connection = connectionFactory.upload(writeKey);
Connection upload(String apiHost) throws IOException {
HttpURLConnection connection = connectionFactory.upload(apiHost, writeKey);
return createPostConnection(connection);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ public HttpURLConnection projectSettings(String writeKey) throws IOException {
* Return a {@link HttpURLConnection} that writes batched payloads to {@code
* https://api.segment.io/v1/import}.
*/
public HttpURLConnection upload(String writeKey) throws IOException {
HttpURLConnection connection = openConnection("https://api.segment.io/v1/import");
public HttpURLConnection upload(String apiHost, String writeKey) throws IOException {
HttpURLConnection connection = openConnection(String.format("https://%s/import", apiHost));
connection.setRequestProperty("Authorization", authorizationHeader(writeKey));
connection.setRequestProperty("Content-Encoding", "gzip");
connection.setDoOutput(true);
Expand All @@ -64,7 +64,7 @@ public HttpURLConnection upload(String writeKey) throws IOException {
}

/**
* Configures defaults for connections opened with {@link #upload(String)}, and {@link
* Configures defaults for connections opened with {@link #upload(String, String)}, and {@link
* #projectSettings(String)}.
*/
protected HttpURLConnection openConnection(String url) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ public Integration<?> create(ValueMap settings, Analytics analytics) {
analytics.flushIntervalInMillis,
analytics.flushQueueSize,
analytics.getLogger(),
analytics.crypto);
analytics.crypto,
settings);
}

@Override
Expand Down Expand Up @@ -121,6 +122,7 @@ public String key() {
private final Cartographer cartographer;
private final ExecutorService networkExecutor;
private final ScheduledExecutorService flushScheduler;
private final String apiHost;
/**
* We don't want to stop adding payloads to our disk queue when we're uploading payloads. So we
* upload payloads on a network executor instead.
Expand Down Expand Up @@ -178,7 +180,8 @@ static synchronized SegmentIntegration create(
long flushIntervalInMillis,
int flushQueueSize,
Logger logger,
Crypto crypto) {
Crypto crypto,
ValueMap settings) {
PayloadQueue payloadQueue;
try {
File folder = context.getDir("segment-disk-queue", Context.MODE_PRIVATE);
Expand All @@ -188,6 +191,7 @@ static synchronized SegmentIntegration create(
logger.error(e, "Could not create disk queue. Falling back to memory queue.");
payloadQueue = new PayloadQueue.MemoryQueue();
}
String apiHost = settings.getString("apiHost");
return new SegmentIntegration(
context,
client,
Expand All @@ -199,7 +203,8 @@ static synchronized SegmentIntegration create(
flushIntervalInMillis,
flushQueueSize,
logger,
crypto);
crypto,
apiHost);
}

SegmentIntegration(
Expand All @@ -213,7 +218,8 @@ static synchronized SegmentIntegration create(
long flushIntervalInMillis,
int flushQueueSize,
Logger logger,
Crypto crypto) {
Crypto crypto,
String apiHost) {
this.context = context;
this.client = client;
this.networkExecutor = networkExecutor;
Expand All @@ -225,6 +231,7 @@ static synchronized SegmentIntegration create(
this.flushQueueSize = flushQueueSize;
this.flushScheduler = Executors.newScheduledThreadPool(1, new AnalyticsThreadFactory());
this.crypto = crypto;
this.apiHost = apiHost;

segmentThread = new HandlerThread(SEGMENT_THREAD_NAME, THREAD_PRIORITY_BACKGROUND);
segmentThread.start();
Expand Down Expand Up @@ -373,7 +380,7 @@ void performFlush() {
Client.Connection connection = null;
try {
// Open a connection.
connection = client.upload();
connection = client.upload(apiHost);

// Write the payloads into the OutputStream.
BatchPayloadWriter writer =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ public final class Utils {
public static final int DEFAULT_FLUSH_INTERVAL = 30 * 1000; // 30s
public static final int DEFAULT_FLUSH_QUEUE_SIZE = 20;
public static final boolean DEFAULT_COLLECT_DEVICE_ID = true;
public static final String DEFAULT_API_HOST = "api.segment.io/v1";

/** Creates a mutable HashSet instance containing the given elements in unspecified order */
public static <T> Set<T> newSet(T... values) {
Expand Down
Loading