use of org.projectnessie.model.Branch in project nessie by projectnessie.
the class AbstractRestGCTest method testMultiRefSharedTable.
@Test
public void testMultiRefSharedTable() throws BaseNessieClientServerException {
// ------ Time ---- | --- branch1 -----| ---- branch2 -----| --- branch3 -----|
// t0 | create branch | | |
// t1 | TABLE_ONE : 41 | {TABLE_ONE : 41} | {TABLE_ONE : 41} | --(expired)
// t2 | TABLE_ONE : 42 | {TABLE_ONE : 42} | {TABLE_ONE : 42} |
// t3 | | create branch | |
// t4 | | | create branch |
// t5 | | TABLE_ONE : 43 | |
// t6 |-- cut off time --|-- cut off time -- |-- cut off time --|
// t7 | TABLE_ONE : 44 | | |
// t8 | | TABLE_ONE : 45 | |
// t9 | | | TABLE_ONE : 46 |
// t10 | | | delete branch |
// t11 | DROP TABLE_ONE | | |
String prefix = "multiRefSharedTable";
IdentifiedResult expectedResult = new IdentifiedResult();
Branch branch1 = createBranch(prefix + "_1");
// commit for TABLE_ONE on branch1
CommitOutput b1table1 = commitSingleOp(prefix, branch1, branch1.getHash(), 41, CID_ONE, TABLE_ONE, METADATA_ZERO, null, null);
// expired as it is before cutoff time and not the commit head
fillExpectedContents(Branch.of(branch1.getName(), b1table1.hash), 1, expectedResult);
String firstCommitHash = b1table1.hash;
b1table1 = commitSingleOp(prefix, branch1, b1table1.hash, 42, CID_ONE, TABLE_ONE, METADATA_ONE, b1table1.content, null);
Branch branch2 = createBranch(prefix + "_2", Branch.of(branch1.getName(), b1table1.hash));
// expired as it is before cutoff time and not the commit head
fillExpectedContents(Branch.of(branch2.getName(), firstCommitHash), 1, expectedResult);
Branch branch3 = createBranch(prefix + "_3", Branch.of(branch1.getName(), b1table1.hash));
// expired as it is before cutoff time and not the commit head
fillExpectedContents(Branch.of(branch3.getName(), firstCommitHash), 1, expectedResult);
// commit for TABLE_ONE on branch2
CommitOutput b2table1 = commitSingleOp(prefix, branch2, branch2.getHash(), 43, CID_ONE, TABLE_ONE, METADATA_TWO, b1table1.content, null);
final Instant cutoffTime = Instant.now();
// commit for TABLE_ONE on branch1
b1table1 = commitSingleOp(prefix, branch1, b1table1.hash, 44, CID_ONE, TABLE_ONE, METADATA_THREE, b2table1.content, null);
// commit for TABLE_ONE on branch2
b2table1 = commitSingleOp(prefix, branch2, b2table1.hash, 45, CID_ONE, TABLE_ONE, METADATA_FOUR, b1table1.content, null);
// commit for TABLE_ONE on branch3
CommitOutput b3table1 = commitSingleOp(prefix, branch3, branch3.getHash(), 46, CID_ONE, TABLE_ONE, METADATA_FIVE, b2table1.content, null);
// delete branch3 should not affect as it is performed after cutoff timestamp.
deleteBranch(branch3.getName(), b3table1.hash);
// drop table TABLE_ONE on branch1 should not affect as it is performed after cutoff timestamp.
dropTableCommit(prefix, branch1, b1table1.hash, TABLE_ONE);
performGc(cutoffTime, null, expectedResult, Arrays.asList(branch1.getName(), branch2.getName(), branch3.getName()), true, null);
}
use of org.projectnessie.model.Branch in project nessie by projectnessie.
the class AbstractRestAssign method testAssignRefToFreshMain.
/**
* Assigning a branch/tag to a fresh main without any commits didn't work in 0.9.2
*/
@ParameterizedTest
@EnumSource(ReferenceMode.class)
public void testAssignRefToFreshMain(ReferenceMode refMode) throws BaseNessieClientServerException {
Reference main = getApi().getReference().refName("main").get();
// make sure main doesn't have any commits
LogResponse log = getApi().getCommitLog().refName(main.getName()).get();
assertThat(log.getLogEntries()).isEmpty();
Branch testBranch = createBranch("testBranch");
getApi().assignBranch().branch(testBranch).assignTo(main).assign();
Reference testBranchRef = getApi().getReference().refName(testBranch.getName()).get();
assertThat(testBranchRef.getHash()).isEqualTo(main.getHash());
String testTag = "testTag";
Reference testTagRef = getApi().createReference().sourceRefName(main.getName()).reference(Tag.of(testTag, main.getHash())).create();
assertThat(testTagRef.getHash()).isNotNull();
getApi().assignTag().hash(testTagRef.getHash()).tagName(testTag).assignTo(refMode.transform(main)).assign();
testTagRef = getApi().getReference().refName(testTag).get();
assertThat(testTagRef.getHash()).isEqualTo(main.getHash());
}
use of org.projectnessie.model.Branch in project nessie by projectnessie.
the class AbstractDeltaTest method removeBranches.
@AfterEach
void removeBranches() throws NessieConflictException, NessieNotFoundException {
for (Reference ref : api.getAllReferences().get().getReferences()) {
if (ref instanceof Branch) {
api.deleteBranch().branchName(ref.getName()).hash(ref.getHash()).delete();
}
if (ref instanceof Tag) {
api.deleteTag().tagName(ref.getName()).hash(ref.getHash()).delete();
}
}
api.createReference().reference(Branch.of("main", null)).create();
api.close();
api = null;
}
use of org.projectnessie.model.Branch in project nessie by projectnessie.
the class ITDeltaLogBranches method testBranches.
@Test
void testBranches() throws BaseNessieClientServerException {
Dataset<Row> targetTable = createKVDataSet(Arrays.asList(tuple2(1, 10), tuple2(2, 20), tuple2(3, 30), tuple2(4, 40)), "key", "value");
// write some data to table
targetTable.write().format("delta").save(tempPath.getAbsolutePath());
// create test at the point where there is only 1 commit
Branch sourceRef = api.getDefaultBranch();
api.createReference().sourceRefName(sourceRef.getName()).reference(Branch.of("test", sourceRef.getHash())).create();
// add some more data to main
targetTable.write().format("delta").mode("append").save(tempPath.getAbsolutePath());
// read main and record number of rows
DeltaTable target = DeltaTable.forPath(spark, tempPath.getAbsolutePath());
int expectedSize = target.toDF().collectAsList().size();
/*
It is hard to change ref in Detla for the following reasons
* DeltaTables are cached
* hadoop/spark config don't get updated in the cached tables
* there is currently no way to pass down a branch or hash via '@' or '#'
Below we manually invaildate the cache and update the ref before reading the table off test
As the table itself is cached we can't read from main w/o invalidating, hence reading from main above
*/
DeltaLog.invalidateCache(spark, new Path(tempPath.getAbsolutePath()));
spark.sparkContext().conf().set("spark.sql.catalog.spark_catalog.ref", "test");
Dataset<Row> targetBranch = spark.read().format("delta").load(tempPath.getAbsolutePath());
// we expect the table from test to be half the size of the table from main
Assertions.assertEquals(expectedSize * 0.5, targetBranch.collectAsList().size());
}
use of org.projectnessie.model.Branch in project nessie by projectnessie.
the class TestAuthorizationRules method testCannotReadTargetBranch.
@Test
@TestSecurity(user = "user1")
void testCannotReadTargetBranch() throws BaseNessieClientServerException {
String role = "user1";
String branchName = "allowedBranchForUser1";
createBranch(Branch.of(branchName, null), role, false);
String disallowedBranch = "disallowedBranchForUser1";
createBranch(Branch.of(disallowedBranch, null), role, false);
final Branch branch = retrieveBranch(branchName, role, false);
String errorMessage = String.format("'VIEW_REFERENCE' is not allowed for role '%s' on reference '%s'", role, disallowedBranch);
assertThatThrownBy(() -> api().assignBranch().branch(branch).assignTo(Branch.of(disallowedBranch, branch.getHash())).assign()).isInstanceOf(NessieForbiddenException.class).hasMessageContaining(errorMessage);
assertThatThrownBy(() -> api().mergeRefIntoBranch().fromRef(branch).branch((Branch.of(disallowedBranch, branch.getHash()))).merge()).isInstanceOf(NessieForbiddenException.class).hasMessageContaining(errorMessage);
assertThatThrownBy(() -> api().transplantCommitsIntoBranch().hashesToTransplant(Arrays.asList(branch.getHash())).fromRefName(branch.getName()).branch((Branch.of(disallowedBranch, branch.getHash()))).transplant()).isInstanceOf(NessieForbiddenException.class).hasMessageContaining(errorMessage);
deleteBranch(branch, role, false);
}
Aggregations