Skip to content

feat(core): Add attributes client #118

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Aug 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ jobs:
--client-secret=secret \
--platform-endpoint=localhost:8080 \
-i \
encrypt --kas-url=localhost:8080 --mime-type=text/plain -f data -m 'here is some metadata' > test.tdf
encrypt --kas-url=localhost:8080 --mime-type=text/plain --attr https://example.com/attr/attr1/value/value1 -f data -m 'here is some metadata' > test.tdf

java -jar target/cmdline.jar \
--client-id=opentdf-sdk \
Expand Down Expand Up @@ -175,7 +175,7 @@ jobs:
--client-secret=secret \
--platform-endpoint=localhost:8080 \
-i \
encryptnano --kas-url=http://localhost:8080 -f data -m 'here is some metadata' > nano.ntdf
encryptnano --kas-url=http://localhost:8080 --attr https://example.com/attr/attr1/value/value1 -f data -m 'here is some metadata' > nano.ntdf

java -jar target/cmdline.jar \
--client-id=opentdf-sdk \
Expand Down
12 changes: 11 additions & 1 deletion cmdline/src/main/java/io/opentdf/platform/Command.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Stream;

@CommandLine.Command(name = "tdf")
class Command {
Expand All @@ -52,6 +53,8 @@ void encrypt(
@Option(names = {"-f", "--file"}, defaultValue = Option.NULL_VALUE) Optional<File> file,
@Option(names = {"-k", "--kas-url"}, required = true, split = ",") List<String> kas,
@Option(names = {"-m", "--metadata"}, defaultValue = Option.NULL_VALUE) Optional<String> metadata,
// cant split on optional parameters
@Option(names = {"-a", "--attr"}, defaultValue = Option.NULL_VALUE) Optional<String> attributes,
@Option(names = {"--mime-type"}, defaultValue = Option.NULL_VALUE) Optional<String> mimeType) throws
IOException, JOSEException {

Expand All @@ -66,6 +69,9 @@ void encrypt(
configs.add(Config.withKasInformation(kasInfos));
metadata.map(Config::withMetaData).ifPresent(configs::add);
mimeType.map(Config::withMimeType).ifPresent(configs::add);
attributes.ifPresent(attr -> {
configs.add(Config.withDataAttributes(attr.split(",")));
});

var tdfConfig = Config.newTDFConfig(configs.toArray(Consumer[]::new));
try (var in = file.isEmpty() ? new BufferedInputStream(System.in) : new FileInputStream(file.get())) {
Expand Down Expand Up @@ -113,7 +119,8 @@ void readMetadata(@Option(names = {"-f", "--file"}, required = true) Path tdfPat
void createNanoTDF(
@Option(names = {"-f", "--file"}, defaultValue = Option.NULL_VALUE) Optional<File> file,
@Option(names = {"-k", "--kas-url"}, required = true) List<String> kas,
@Option(names = {"-m", "--metadata"}, defaultValue = Option.NULL_VALUE) Optional<String> metadata) throws Exception {
@Option(names = {"-m", "--metadata"}, defaultValue = Option.NULL_VALUE) Optional<String> metadata,
@Option(names = {"-a", "--attr"}, defaultValue = Option.NULL_VALUE) Optional<String> attributes) throws Exception {

var sdk = buildSDK();
var kasInfos = kas.stream().map(k -> {
Expand All @@ -124,6 +131,9 @@ void createNanoTDF(

List<Consumer<Config.NanoTDFConfig>> configs = new ArrayList<>();
configs.add(Config.withNanoKasInformation(kasInfos));
attributes.ifPresent(attr -> {
configs.add(Config.witDataAttributes(attr.split(",")));
});

var nanoTDFConfig = Config.newNanoTDFConfig(configs.toArray(Consumer[]::new));
try (var in = file.isEmpty() ? new BufferedInputStream(System.in) : new FileInputStream(file.get())) {
Expand Down
40 changes: 40 additions & 0 deletions sdk/src/main/java/io/opentdf/platform/sdk/AttributesClient.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package io.opentdf.platform.sdk;

import io.grpc.ManagedChannel;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsRequest;
import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse;


public class AttributesClient implements SDK.AttributesService {

private final ManagedChannel channel;

/***
* A client that communicates with KAS
* @param channelFactory A function that produces channels that can be used to communicate
* @param dpopKey
*/
public AttributesClient(ManagedChannel channel) {
this.channel = channel;
}


@Override
public synchronized void close() {
this.channel.shutdownNow();
}


// make this protected so we can test the address normalization logic
synchronized AttributesServiceGrpc.AttributesServiceBlockingStub getStub() {
return AttributesServiceGrpc.newBlockingStub(channel);
}


@Override
public GetAttributeValuesByFqnsResponse getAttributeValuesByFqn(GetAttributeValuesByFqnsRequest request) {
return getStub().getAttributeValuesByFqns(request);
}

}
16 changes: 12 additions & 4 deletions sdk/src/main/java/io/opentdf/platform/sdk/SDK.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import io.opentdf.platform.authorization.AuthorizationServiceGrpc;
import io.opentdf.platform.authorization.AuthorizationServiceGrpc.AuthorizationServiceFutureStub;
import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsRequest;
import io.opentdf.platform.policy.attributes.AttributesServiceGrpc.AttributesServiceFutureStub;
import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc;
import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc.NamespaceServiceFutureStub;
Expand All @@ -13,6 +14,9 @@
import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc;
import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub;
import io.opentdf.platform.sdk.nanotdf.NanoTDFType;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse;

import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -45,17 +49,20 @@ public interface KAS extends AutoCloseable {
byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kasURL);
}

public interface AttributesService extends AutoCloseable {
GetAttributeValuesByFqnsResponse getAttributeValuesByFqn(GetAttributeValuesByFqnsRequest request);
}

// TODO: add KAS
public interface Services extends AutoCloseable {
AuthorizationServiceFutureStub authorization();
AttributesServiceFutureStub attributes();
AttributesService attributes();
NamespaceServiceFutureStub namespaces();
SubjectMappingServiceFutureStub subjectMappings();
ResourceMappingServiceFutureStub resourceMappings();
KAS kas();

static Services newServices(ManagedChannel channel, KAS kas) {
var attributeService = AttributesServiceGrpc.newFutureStub(channel);
static Services newServices(ManagedChannel channel, KAS kas, AttributesService attributeService) {
var namespaceService = NamespaceServiceGrpc.newFutureStub(channel);
var subjectMappingService = SubjectMappingServiceGrpc.newFutureStub(channel);
var resourceMappingService = ResourceMappingServiceGrpc.newFutureStub(channel);
Expand All @@ -65,11 +72,12 @@ static Services newServices(ManagedChannel channel, KAS kas) {
@Override
public void close() throws Exception {
channel.shutdownNow();
attributeService.close();
kas.close();
}

@Override
public AttributesServiceFutureStub attributes() {
public AttributesService attributes() {
return attributeService;
}

Expand Down
8 changes: 6 additions & 2 deletions sdk/src/main/java/io/opentdf/platform/sdk/SDKBuilder.java
Original file line number Diff line number Diff line change
Expand Up @@ -193,20 +193,24 @@ ServicesAndInternals buildServices() {

var authInterceptor = getGrpcAuthInterceptor(dpopKey);
ManagedChannel channel;
ManagedChannel attributesChannel;
Function<String, ManagedChannel> managedChannelFactory;
if (authInterceptor == null) {
channel = getManagedChannelBuilder(platformEndpoint).build();
attributesChannel = getManagedChannelBuilder(platformEndpoint).build();
managedChannelFactory = (String endpoint) -> getManagedChannelBuilder(endpoint).build();

} else {
channel = getManagedChannelBuilder(platformEndpoint).intercept(authInterceptor).build();
attributesChannel = getManagedChannelBuilder(platformEndpoint).intercept(authInterceptor).build();
managedChannelFactory = (String endpoint) -> getManagedChannelBuilder(endpoint).intercept(authInterceptor).build();
}
var client = new KASClient(managedChannelFactory, dpopKey);
var kasclient = new KASClient(managedChannelFactory, dpopKey);
var attrclient = new AttributesClient(attributesChannel);
return new ServicesAndInternals(
authInterceptor,
sslFactory == null ? null : sslFactory.getTrustManager().orElse(null),
SDK.Services.newServices(channel, client)
SDK.Services.newServices(channel, kasclient, attrclient)
);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package io.opentdf.platform.sdk;

import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsRequest;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse.AttributeAndValue;
import io.opentdf.platform.policy.Attribute;
import io.opentdf.platform.policy.Namespace;
import io.opentdf.platform.policy.Value;
import io.opentdf.platform.policy.AttributeRuleTypeEnum;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

import org.junit.jupiter.api.Test;

import static io.opentdf.platform.sdk.SDKBuilderTest.getRandomPort;
import static org.assertj.core.api.Assertions.assertThat;


public class AttributeClientTest {
@Test
void testGettingAttributeByFqn() throws IOException {
AttributesServiceGrpc.AttributesServiceImplBase attributesService = new AttributesServiceGrpc.AttributesServiceImplBase() {
@Override
public void getAttributeValuesByFqns(GetAttributeValuesByFqnsRequest request,
io.grpc.stub.StreamObserver<GetAttributeValuesByFqnsResponse> responseObserver) {
Attribute attribute1 = Attribute.newBuilder().setId("CLS").setNamespace(
Namespace.newBuilder().setId("v").setName("virtru.com").setFqn("https://virtru.com").build())
.setName("Classification").setRule(AttributeRuleTypeEnum.ATTRIBUTE_RULE_TYPE_ENUM_HIERARCHY).setFqn("https://virtru.com/attr/classification").build();

Value attributeValue1 = Value.newBuilder()
.setValue("value1")
.build();

// Create a sample AttributeValues object
AttributeAndValue attributeAndValues = AttributeAndValue.newBuilder().setAttribute(attribute1)
.setValue(attributeValue1)
.build();
GetAttributeValuesByFqnsResponse response = GetAttributeValuesByFqnsResponse.newBuilder()
.putFqnAttributeValues("https://virtru.com/attr/classification/value/value1",attributeAndValues)
.build();
responseObserver.onNext(response);
responseObserver.onCompleted();

}
};

Server attrServer = null;
try {
attrServer = startServer(attributesService);
String attrServerUrl = "localhost:" + attrServer.getPort();
ManagedChannel channel = ManagedChannelBuilder
.forTarget(attrServerUrl)
.usePlaintext()
.build();
try (var attr = new AttributesClient(channel)) {
GetAttributeValuesByFqnsResponse resp = attr.getAttributeValuesByFqn(GetAttributeValuesByFqnsRequest.newBuilder().build());
Set<String> fqnSet = new HashSet<>(Arrays.asList("https://virtru.com/attr/classification/value/value1"));
assertThat(resp.getFqnAttributeValuesMap().keySet()).isEqualTo(fqnSet);
assertThat(resp.getFqnAttributeValuesCount()).isEqualTo(1);
}
} finally {
if (attrServer != null) {
attrServer.shutdownNow();
}
}
}
private static Server startServer(AttributesServiceGrpc.AttributesServiceImplBase attrService) throws IOException {
return ServerBuilder
.forPort(getRandomPort())
.directExecutor()
.addService(attrService)
.build()
.start();
}

}
Loading