Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions deploy/mcp-oauth-proxy/templates/configmap.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,12 @@ data:
{{- range $allowedRoles }}
- {{ . | quote }}
{{- end }}
{{- $dbxSql := .Values.application.tokenExchange.databricksSql | default (dict) }}
databricks-sql:
workspace-host-template: {{ $dbxSql.workspaceHostTemplate | default "https://%s.cloud.databricks.com" | quote }}
resource-path-prefix: {{ $dbxSql.resourcePathPrefix | default "/v1/databricks-sql/" | quote }}
workspace-segment-pattern: {{ $dbxSql.workspaceSegmentPattern | default "^dbc-[a-zA-Z0-9.-]+$" | quote }}
oauth-scope: {{ $dbxSql.oauthScope | default "sql" | quote }}
oktaServiceTypeApp:
auth-server-url: {{ .Values.application.oktaServiceTypeApp.endpoint | quote }}
client-id: {{ .Values.application.oktaServiceTypeApp.clientId | quote }}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Copyright The Athenz Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.athenz.mop.config;

import io.smallrye.config.ConfigMapping;
import io.smallrye.config.WithDefault;
import io.smallrye.config.WithName;

/**
* Databricks SQL MCP: Okta ID token exchange at each workspace {@code POST /oidc/v1/token}.
*/
@ConfigMapping(prefix = "server.token-exchange.databricks-sql")
public interface DatabricksSqlTokenExchangeConfig {

@WithName("workspace-host-template")
@WithDefault("https://%s.cloud.databricks.com")
String workspaceHostTemplate();

/** Path prefix before the workspace segment; segment is followed by {@code /mcp}. */
@WithName("resource-path-prefix")
@WithDefault("/v1/databricks-sql/")
String resourcePathPrefix();

/** Regex applied only to the workspace path segment (e.g. {@code dbc-…} deployment id). */
@WithName("workspace-segment-pattern")
@WithDefault("^dbc-[a-zA-Z0-9.-]+$")
String workspaceSegmentPattern();

@WithName("oauth-scope")
@WithDefault("sql")
String oauthScope();
}
7 changes: 6 additions & 1 deletion src/main/java/io/athenz/mop/model/AuthorizationResultDO.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,9 @@
*/
package io.athenz.mop.model;

public record AuthorizationResultDO(AuthResult authResult, TokenWrapper token) {}
public record AuthorizationResultDO(AuthResult authResult, TokenWrapper token, String oauthScope) {

public AuthorizationResultDO(AuthResult authResult, TokenWrapper token) {
this(authResult, token, null);
}
}
16 changes: 15 additions & 1 deletion src/main/java/io/athenz/mop/service/AudienceConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,19 @@ private AudienceConstants() {
public static final String PROVIDER_GOOGLE_LOGGING = "google-logging";
public static final String PROVIDER_SPLUNK = "splunk";

/** Databricks SQL MCP resource mapping {@code token.audience} / {@code token.as} routing id. */
public static final String PROVIDER_DATABRICKS_SQL = "databricks-sql";

/**
* DynamoDB / userinfo provider column for a Databricks SQL workspace (prefix + workspace hostname).
*/
public static String databricksSqlStorageProvider(String hostname) {
if (StringUtils.isBlank(hostname)) {
return PROVIDER_DATABRICKS_SQL;
}
return PROVIDER_DATABRICKS_SQL + "-" + hostname.trim();
}

/** Exchanged access token stored by audience so {@code GET /userinfo} can resolve Okta profile from Splunk/Glean/GCP tokens. */
public static boolean storesExchangedTokenForUserinfo(String audience) {
if (StringUtils.isBlank(audience)) {
Expand All @@ -42,6 +55,7 @@ public static boolean storesExchangedTokenForUserinfo(String audience) {
return PROVIDER_GLEAN.equals(audience)
|| PROVIDER_GOOGLE_MONITORING.equals(audience)
|| PROVIDER_GOOGLE_LOGGING.equals(audience)
|| PROVIDER_SPLUNK.equals(audience);
|| PROVIDER_SPLUNK.equals(audience)
|| PROVIDER_DATABRICKS_SQL.equals(audience);
}
}
29 changes: 20 additions & 9 deletions src/main/java/io/athenz/mop/service/AuthorizerService.java
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ public class AuthorizerService {
@Inject
ConfigService configService;

@Inject
ExchangedTokenUserinfoStoreProviderResolver exchangedTokenUserinfoStoreProviderResolver;

public void storeTokens(String lookupKey, JsonWebToken idToken, JsonWebToken accessToken, RefreshToken refreshToken, String provider) {
String user = userPrefix + accessToken.getName();
storeTokens(
Expand Down Expand Up @@ -157,7 +160,7 @@ public TokenResponse getTokenFromAuthorizationServer(String subject, String scop
atDO.token().accessToken(),
TOKEN_TYPE,
atDO.token().ttl(),
resourceMeta.scopes().toString()
tokenResponseScope(atDO, resourceMeta)
);

} else {
Expand All @@ -179,12 +182,19 @@ public TokenResponse getTokenFromAuthorizationServer(String subject, String scop
atDO.token().accessToken(),
TOKEN_TYPE,
atDO.token().ttl(),
resourceMeta.scopes().toString()
tokenResponseScope(atDO, resourceMeta)
);
}
return tokenResponse;
}

private static String tokenResponseScope(AuthorizationResultDO atDO, ResourceMeta resourceMeta) {
if (atDO != null && StringUtils.isNotBlank(atDO.oauthScope())) {
return atDO.oauthScope();
}
return resourceMeta != null ? resourceMeta.scopes().toString() : "";
}

/**
* Store exchanged token in DynamoDB when the resource has an audience that uses a dedicated provider
* (Glean, Gcp Monitoring, Gcp Logging, Splunk). Stores with provider = audience so /userinfo can resolve by token.
Expand All @@ -202,22 +212,23 @@ private void storeExchangedTokenByAudienceIfNeeded(String resource, TokenWrapper
if (!AudienceConstants.storesExchangedTokenForUserinfo(audience)) {
return;
}
log.info("Storing exchanged token for user: {} audience: {}", oktaToken.key(), audience);
String storeProvider = exchangedTokenUserinfoStoreProviderResolver.resolve(resource, audience);
log.info("Storing exchanged token for user: {} provider: {}", oktaToken.key(), storeProvider);

long nowSeconds = Instant.now().getEpochSecond();
long absoluteTtl = nowSeconds + (exchangedToken.ttl() != null ? exchangedToken.ttl() : 3600L) + TOKEN_STORE_TTL_GRACE_SECONDS;

TokenWrapper toStore = new TokenWrapper(
oktaToken.key(),
audience,
storeProvider,
null,
exchangedToken.accessToken(),
null,
absoluteTtl
);

tokenStore.storeUserToken(oktaToken.key(), audience, toStore);
log.info("Successfully stored token for user: {} provider: {} with ttl: {}", oktaToken.key(), audience, toStore.ttl());
tokenStore.storeUserToken(oktaToken.key(), storeProvider, toStore);
log.info("Successfully stored token for user: {} provider: {} with ttl: {}", oktaToken.key(), storeProvider, toStore.ttl());
}

/**
Expand All @@ -239,7 +250,7 @@ private void storeRefreshedAccessToken(String resource, String userId, String pr
String audience = resourceMeta != null ? resourceMeta.audience() : null;
boolean storeByAudience = AudienceConstants.storesExchangedTokenForUserinfo(audience);
if (resourceMeta != null && storeByAudience) {
storeProvider = audience;
storeProvider = exchangedTokenUserinfoStoreProviderResolver.resolve(resource, audience);
long absoluteTtl = Instant.now().getEpochSecond() + (returnedToken.ttl() != null ? returnedToken.ttl() : 3600L) + TOKEN_STORE_TTL_GRACE_SECONDS;
toStore = new TokenWrapper(
userId,
Expand Down Expand Up @@ -307,7 +318,6 @@ public RefreshAndTokenResult refreshUpstreamAndGetToken(String userId, String pr
tokenStore.storeUserToken(userId, provider, toStore);

ResourceMeta resourceMeta = configService.getResourceMeta(resource);
String scopeStr = resourceMeta != null ? resourceMeta.scopes().toString() : "";
// Run the new access token through the resource's authorization server (e.g. Okta/Glean exchange)
// so the client receives the same exchanged token as in the auth_code flow (TokenExchangeServiceOktaImpl 87-92)
TokenExchangeService accessTokenIssuer = tokenExchangeServiceProducer.getTokenExchangeServiceImplementation(
Expand All @@ -325,6 +335,7 @@ public RefreshAndTokenResult refreshUpstreamAndGetToken(String userId, String pr
return null;
}
storeRefreshedAccessToken(resource, userId, provider, toStore, atDO.token());
String scopeStr = tokenResponseScope(atDO, resourceMeta);
TokenResponse tokenResponse = new TokenResponse(
atDO.token().accessToken(),
TOKEN_TYPE,
Expand Down Expand Up @@ -357,7 +368,6 @@ public RefreshAndTokenResult completeRefreshWithOktaTokens(String userId, String
tokenStore.storeUserToken(userId, provider, toStore);

ResourceMeta resourceMeta = configService.getResourceMeta(resource);
String scopeStr = resourceMeta != null ? resourceMeta.scopes().toString() : "";
TokenExchangeService accessTokenIssuer = tokenExchangeServiceProducer.getTokenExchangeServiceImplementation(
resourceMeta != null ? resourceMeta.authorizationServer() : provider);
TokenExchangeDO accessTokenRequestDO = new TokenExchangeDO(
Expand All @@ -373,6 +383,7 @@ public RefreshAndTokenResult completeRefreshWithOktaTokens(String userId, String
return null;
}
storeRefreshedAccessToken(resource, userId, provider, toStore, atDO.token());
String scopeStr = tokenResponseScope(atDO, resourceMeta);
TokenResponse tokenResponse = new TokenResponse(
atDO.token().accessToken(),
TOKEN_TYPE,
Expand Down
47 changes: 47 additions & 0 deletions src/main/java/io/athenz/mop/service/DatabricksSqlTokenClient.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Copyright The Athenz Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.athenz.mop.service;

import jakarta.enterprise.context.ApplicationScoped;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.util.Optional;

/**
* HTTP POST to Databricks workspace OIDC token endpoint (form body).
*/
@ApplicationScoped
public class DatabricksSqlTokenClient {

public record DatabricksTokenHttpResponse(int statusCode, String body, Optional<String> requestId) {}

public DatabricksTokenHttpResponse postForm(URI tokenEndpoint, String formBody) throws IOException, InterruptedException {
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder()
.uri(tokenEndpoint)
.header("Content-Type", "application/x-www-form-urlencoded")
.header("Accept", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(formBody, StandardCharsets.UTF_8))
.build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString(StandardCharsets.UTF_8));
Optional<String> requestId = response.headers().firstValue("x-request-id");
return new DatabricksTokenHttpResponse(response.statusCode(), response.body(), requestId);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
/*
* Copyright The Athenz Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.athenz.mop.service;

import io.athenz.mop.config.DatabricksSqlTokenExchangeConfig;
import java.net.URI;
import java.util.Optional;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;

/**
* Parses MCP resource URLs for Databricks SQL ({@code .../v1/databricks-sql/<segment>/mcp}),
* validates the workspace segment, and builds the workspace API host.
*/
public final class DatabricksSqlWorkspaceResolver {

private DatabricksSqlWorkspaceResolver() {
}

/** Resolved workspace: full token URL host and hostname used for storage keys. */
public record DatabricksSqlWorkspace(String workspaceBaseUrl, String hostname) {}

/**
* Extract and validate workspace from {@code resource} URI path, then build
* {@code https://<deployment>.cloud.databricks.com} (or template equivalent).
*/
public static Optional<DatabricksSqlWorkspace> resolve(String resource, DatabricksSqlTokenExchangeConfig config) {
if (StringUtils.isBlank(resource) || config == null) {
return Optional.empty();
}
URI uri;
try {
uri = URI.create(resource.trim());
} catch (Exception e) {
return Optional.empty();
}
String path = uri.getPath();
if (StringUtils.isBlank(path)) {
return Optional.empty();
}
String prefix = normalizePrefix(config.resourcePathPrefix());
if (!path.startsWith(prefix)) {
return Optional.empty();
}
String tail = path.substring(prefix.length());
String suffix = "/mcp";
if (!tail.endsWith(suffix) || tail.length() <= suffix.length()) {
return Optional.empty();
}
String segment = tail.substring(0, tail.length() - suffix.length());
if (segment.isEmpty() || segment.indexOf('/') >= 0) {
return Optional.empty();
}
Pattern p;
try {
p = Pattern.compile(config.workspaceSegmentPattern());
} catch (Exception e) {
return Optional.empty();
}
if (!p.matcher(segment).matches()) {
return Optional.empty();
}
String template = config.workspaceHostTemplate();
if (StringUtils.isBlank(template) || !template.contains("%s")) {
return Optional.empty();
}
String workspaceUrl;
try {
workspaceUrl = String.format(template, segment);
} catch (Exception e) {
return Optional.empty();
}
URI ws;
try {
ws = URI.create(workspaceUrl);
} catch (Exception e) {
return Optional.empty();
}
String host = ws.getHost();
if (StringUtils.isBlank(host)) {
return Optional.empty();
}
return Optional.of(new DatabricksSqlWorkspace(workspaceUrl, host));
}

static String normalizePrefix(String prefix) {
if (prefix == null || prefix.isEmpty()) {
return "/";
}
String p = prefix.startsWith("/") ? prefix : "/" + prefix;
return p.endsWith("/") ? p : p + "/";
}
}
Loading