use of org.neo4j.configuration.GraphDatabaseSettings.DEFAULT_DATABASE_NAME in project neo4j by neo4j.
the class ImportCommandTest method assertTokenIndexesCreated.
private void assertTokenIndexesCreated() {
DatabaseManagementService dbms = dbmsService();
try (var tx = dbms.database(DEFAULT_DATABASE_NAME).beginTx()) {
var indexes = stream(tx.schema().getIndexes().spliterator(), false).collect(toList());
assertThat(indexes.stream().filter(index -> index.getIndexType() == LOOKUP).count()).isEqualTo(2);
assertTrue(indexes.stream().anyMatch(IndexDefinition::isNodeIndex));
assertTrue(indexes.stream().anyMatch(IndexDefinition::isRelationshipIndex));
} finally {
dbms.shutdown();
}
}
use of org.neo4j.configuration.GraphDatabaseSettings.DEFAULT_DATABASE_NAME in project neo4j by neo4j.
the class IndexStatisticsStoreTest method shouldNotStartWithoutFileIfReadOnly.
@Test
void shouldNotStartWithoutFileIfReadOnly() {
final IndexStatisticsStore indexStatisticsStore = new IndexStatisticsStore(pageCache, testDirectory.file("non-existing"), immediate(), readOnly(), DEFAULT_DATABASE_NAME, PageCacheTracer.NULL);
final Exception e = assertThrows(Exception.class, indexStatisticsStore::init);
assertTrue(Exceptions.contains(e, t -> t instanceof ReadOnlyDbException));
assertTrue(Exceptions.contains(e, t -> t instanceof TreeFileNotFoundException));
assertTrue(Exceptions.contains(e, t -> t instanceof IllegalStateException));
}
use of org.neo4j.configuration.GraphDatabaseSettings.DEFAULT_DATABASE_NAME in project neo4j by neo4j.
the class BatchingNeoStoresTest method someDataInTheDatabase.
private void someDataInTheDatabase(Config config) throws Exception {
NullLog nullLog = NullLog.getInstance();
try (JobScheduler scheduler = JobSchedulerFactory.createInitialisedScheduler();
PageCache pageCache = new ConfiguringPageCacheFactory(fileSystem, Config.defaults(), PageCacheTracer.NULL, nullLog, scheduler, Clocks.nanoClock(), new MemoryPools()).getOrCreatePageCache();
Lifespan life = new Lifespan()) {
// TODO this little dance with TokenHolders is really annoying and must be solved with a better abstraction
DeferredInitializedTokenCreator propertyKeyTokenCreator = new DeferredInitializedTokenCreator() {
@Override
void create(String name, boolean internal, int id) {
txState.propertyKeyDoCreateForName(name, internal, id);
}
};
DeferredInitializedTokenCreator labelTokenCreator = new DeferredInitializedTokenCreator() {
@Override
void create(String name, boolean internal, int id) {
txState.labelDoCreateForName(name, internal, id);
}
};
DeferredInitializedTokenCreator relationshipTypeTokenCreator = new DeferredInitializedTokenCreator() {
@Override
void create(String name, boolean internal, int id) {
txState.relationshipTypeDoCreateForName(name, internal, id);
}
};
TokenHolders tokenHolders = new TokenHolders(new DelegatingTokenHolder(propertyKeyTokenCreator, TokenHolder.TYPE_PROPERTY_KEY), new DelegatingTokenHolder(labelTokenCreator, TokenHolder.TYPE_LABEL), new DelegatingTokenHolder(relationshipTypeTokenCreator, TokenHolder.TYPE_RELATIONSHIP_TYPE));
IndexConfigCompleter indexConfigCompleter = index -> index;
RecoveryCleanupWorkCollector recoveryCleanupWorkCollector = immediate();
RecordStorageEngine storageEngine = life.add(new RecordStorageEngine(databaseLayout, Config.defaults(), pageCache, fileSystem, NullLogProvider.getInstance(), tokenHolders, new DatabaseSchemaState(NullLogProvider.getInstance()), new StandardConstraintSemantics(), indexConfigCompleter, LockService.NO_LOCK_SERVICE, new DatabaseHealth(PanicEventGenerator.NO_OP, nullLog), new DefaultIdGeneratorFactory(fileSystem, immediate(), DEFAULT_DATABASE_NAME), new DefaultIdController(), recoveryCleanupWorkCollector, PageCacheTracer.NULL, true, INSTANCE, writable(), CommandLockVerification.Factory.IGNORE, LockVerificationMonitor.Factory.IGNORE));
// Create the relationship type token
TxState txState = new TxState();
NeoStores neoStores = storageEngine.testAccessNeoStores();
CommandCreationContext commandCreationContext = storageEngine.newCommandCreationContext(INSTANCE);
commandCreationContext.initialize(NULL);
propertyKeyTokenCreator.initialize(neoStores.getPropertyKeyTokenStore(), txState);
labelTokenCreator.initialize(neoStores.getLabelTokenStore(), txState);
relationshipTypeTokenCreator.initialize(neoStores.getRelationshipTypeTokenStore(), txState);
int relTypeId = tokenHolders.relationshipTypeTokens().getOrCreateId(RELTYPE.name());
apply(txState, commandCreationContext, storageEngine);
// Finally, we're initialized and ready to create two nodes and a relationship
txState = new TxState();
long node1 = commandCreationContext.reserveNode();
long node2 = commandCreationContext.reserveNode();
txState.nodeDoCreate(node1);
txState.nodeDoCreate(node2);
txState.relationshipDoCreate(commandCreationContext.reserveRelationship(), relTypeId, node1, node2);
apply(txState, commandCreationContext, storageEngine);
neoStores.flush(NULL);
}
}
use of org.neo4j.configuration.GraphDatabaseSettings.DEFAULT_DATABASE_NAME in project neo4j by neo4j.
the class GBPTreeTest method readHeaderMustWorkWithOpenIndex.
@Test
void readHeaderMustWorkWithOpenIndex() throws Exception {
// GIVEN
byte[] headerBytes = new byte[12];
random.nextBytes(headerBytes);
Consumer<PageCursor> headerWriter = pc -> pc.putBytes(headerBytes);
// WHEN
try (PageCache pageCache = createPageCache(defaultPageSize);
GBPTree<MutableLong, MutableLong> ignore = index(pageCache).with(headerWriter).build()) {
byte[] readHeader = new byte[headerBytes.length];
AtomicInteger length = new AtomicInteger();
Header.Reader headerReader = headerData -> {
length.set(headerData.limit());
headerData.get(readHeader);
};
GBPTree.readHeader(pageCache, indexFile, headerReader, DEFAULT_DATABASE_NAME, NULL);
// THEN
assertEquals(headerBytes.length, length.get());
assertArrayEquals(headerBytes, readHeader);
}
}
use of org.neo4j.configuration.GraphDatabaseSettings.DEFAULT_DATABASE_NAME in project neo4j by neo4j.
the class NumberArrayTest method arrays.
public static Collection<NumberArrayTestData> arrays() {
PageCache pageCache = fixture.pageCache;
Path dir = fixture.directory;
NullLog log = NullLog.getInstance();
Collection<NumberArrayTestData> list = new ArrayList<>();
Map<String, NumberArrayFactory> factories = new HashMap<>();
factories.put("HEAP", NumberArrayFactories.HEAP);
factories.put("OFF_HEAP", NumberArrayFactories.OFF_HEAP);
factories.put("AUTO_WITHOUT_PAGECACHE", NumberArrayFactories.AUTO_WITHOUT_PAGECACHE);
factories.put("CHUNKED_FIXED_SIZE", NumberArrayFactories.CHUNKED_FIXED_SIZE);
factories.put("autoWithPageCacheFallback", NumberArrayFactories.auto(pageCache, NULL, dir, true, NO_MONITOR, log, DEFAULT_DATABASE_NAME));
factories.put("PageCachedNumberArrayFactory", new PageCachedNumberArrayFactory(pageCache, NULL, dir, log, DEFAULT_DATABASE_NAME));
for (Map.Entry<String, NumberArrayFactory> entry : factories.entrySet()) {
String name = entry.getKey() + " => ";
NumberArrayFactory factory = entry.getValue();
list.add(arrayData(name + "IntArray", factory.newIntArray(INDEXES, -1, INSTANCE), random -> random.nextInt(1_000_000_000), (array, index, value) -> array.set(index, (Integer) value), IntArray::get));
list.add(arrayData(name + "DynamicIntArray", factory.newDynamicIntArray(CHUNK_SIZE, -1, INSTANCE), random -> random.nextInt(1_000_000_000), (array, index, value) -> array.set(index, (Integer) value), IntArray::get));
list.add(arrayData(name + "LongArray", factory.newLongArray(INDEXES, -1, INSTANCE), random -> random.nextLong(1_000_000_000), (array, index, value) -> array.set(index, (Long) value), LongArray::get));
list.add(arrayData(name + "DynamicLongArray", factory.newDynamicLongArray(CHUNK_SIZE, -1, INSTANCE), random -> random.nextLong(1_000_000_000), (array, index, value) -> array.set(index, (Long) value), LongArray::get));
list.add(arrayData(name + "ByteArray5", factory.newByteArray(INDEXES, defaultByteArray(5), INSTANCE), random -> random.nextInt(1_000_000_000), (array, index, value) -> array.setInt(index, 1, (Integer) value), (array, index) -> array.getInt(index, 1)));
list.add(arrayData(name + "DynamicByteArray5", factory.newDynamicByteArray(CHUNK_SIZE, defaultByteArray(5), INSTANCE), random -> random.nextInt(1_000_000_000), (array, index, value) -> array.setInt(index, 1, (Integer) value), (array, index) -> array.getInt(index, 1)));
Function<RandomRule, Object> valueGenerator = random -> new long[] { random.nextLong(), random.nextInt(), (short) random.nextInt(), (byte) random.nextInt() };
Writer<ByteArray> writer = (array, index, value) -> {
long[] values = (long[]) value;
array.setLong(index, 0, values[0]);
array.setInt(index, 8, (int) values[1]);
array.setShort(index, 12, (short) values[2]);
array.setByte(index, 14, (byte) values[3]);
};
Reader<ByteArray> reader = (array, index) -> new long[] { array.getLong(index, 0), array.getInt(index, 8), array.getShort(index, 12), array.getByte(index, 14) };
list.add(arrayData(name + "ByteArray15", factory.newByteArray(INDEXES, defaultByteArray(15), INSTANCE), valueGenerator, writer, reader));
list.add(arrayData(name + "DynamicByteArray15", factory.newDynamicByteArray(CHUNK_SIZE, defaultByteArray(15), INSTANCE), valueGenerator, writer, reader));
}
return list;
}
Aggregations