src_fm_fc_ms_ff
stringlengths 43
86.8k
| target
stringlengths 20
276k
|
---|---|
LBlockHashTableEight implements AutoCloseable { public Optional<BloomFilter> prepareBloomFilter(final boolean sizeDynamically) throws Exception { final long bloomFilterSize = sizeDynamically ? Math.min(BloomFilter.getOptimalSize(size()), BLOOMFILTER_MAX_SIZE) : BLOOMFILTER_MAX_SIZE; try (ArrowBuf keyHolder = allocator.buffer(9); RollbackCloseable closeOnErr = new RollbackCloseable()) { final BloomFilter bloomFilter = new BloomFilter(allocator, Thread.currentThread().getName(), bloomFilterSize); closeOnErr.add(bloomFilter); bloomFilter.setup(); for (int chunk = 0; chunk < tableFixedAddresses.length; chunk++) { final long chunkAddr = tableFixedAddresses[chunk]; final long chunkEnd = chunkAddr + (MAX_VALUES_PER_BATCH * BLOCK_WIDTH); for (long blockAddr = chunkAddr; blockAddr < chunkEnd; blockAddr += BLOCK_WIDTH) { final long key = PlatformDependent.getLong(blockAddr); if (key == this.freeValue) { continue; } keyHolder.writerIndex(0); final byte validityByte = (key == NULL_KEY_VALUE) ? (byte)0x00 : (byte)0x01; keyHolder.writeByte(validityByte); keyHolder.writeLong(key); bloomFilter.put(keyHolder, 9); } } closeOnErr.commit(); return Optional.of(bloomFilter); } } LBlockHashTableEight(HashConfig config, BufferAllocator allocator, int initialSize); int insert(long key, int keyHash); int getNull(); int insertNull(); int insertNull(int oldNullKeyOrdinal); int get(long key, int keyHash); @Override int hashCode(); @Override String toString(); @Override boolean equals(Object obj); int size(); int blocks(); int capacity(); @Override void close(); long getRehashTime(TimeUnit unit); int getRehashCount(); Optional<BloomFilter> prepareBloomFilter(final boolean sizeDynamically); static final int KEY_WIDTH; static final int ORDINAL_WIDTH; static final int BLOCK_WIDTH; static final int BITS_IN_CHUNK; static final int MAX_VALUES_PER_BATCH; static final int CHUNK_OFFSET_MASK; static final int POSITIVE_MASK; static final int NO_MATCH; } | @Test public void testPrepareBloomFilter() throws Exception { List<AutoCloseable> closeables = new ArrayList<>(); try (ArrowBuf keyBuf = testAllocator.buffer(9); LBlockHashTableEight table = new LBlockHashTableEight(HashConfig.getDefault(), testAllocator, 16)) { Set<Long> dataSet = generatedData(10); dataSet.stream().forEach(key -> table.insert(key, (int) HashComputation.computeHash(key))); final Optional<BloomFilter> bloomFilterOptional = table.prepareBloomFilter(false); assertTrue(bloomFilterOptional.isPresent()); closeables.add(bloomFilterOptional.get()); dataSet.stream().forEach(key -> assertTrue(bloomFilterOptional.get().mightContain(writeKey(keyBuf, key), 9))); Set<Long> differentData = generatedData(100); long fpCount = differentData.stream() .filter(key -> !dataSet.contains(key)) .filter(key -> bloomFilterOptional.get().mightContain(writeKey(keyBuf, key), 9)).count(); assertTrue("False positive count is high - " + fpCount, fpCount < 5); BloomFilter bloomFilter = bloomFilterOptional.get(); assertFalse(bloomFilter.mightContain(writeNull(keyBuf), 9)); table.insertNull(); BloomFilter bloomFilter2 = table.prepareBloomFilter(false).get(); closeables.add(bloomFilter2); assertTrue(bloomFilter2.mightContain(writeNull(keyBuf), 9)); } finally { AutoCloseables.close(closeables); } } |
AbstractDataCollector implements DataCollector { @Override public synchronized void close() throws Exception { if(!closed){ final List<AutoCloseable> closeables = new ArrayList<>(); closeables.addAll(Arrays.asList(buffers)); closeables.add(new AutoCloseable() { @Override public void close() throws Exception { for (int i = 0; i < completionMessages.length; i++) { completionMessages[i].informUpstreamIfNecessary(); } } }); closeables.add(new AutoCloseable() { @Override public void close() throws Exception { closed = true; } }); AutoCloseables.close(closeables); } } AbstractDataCollector(
SharedResourceGroup resourceGroup,
boolean isDiscrete,
Collector collector,
final int bufferCapacity,
BufferAllocator allocator,
SabotConfig config,
FragmentHandle handle,
FragmentWorkQueue workQueue,
TunnelProvider tunnelProvider,
SpillService spillService,
EndpointsIndex endpointsIndex); @Override int getOppositeMajorFragmentId(); @Override RawBatchBuffer[] getBuffers(); @Override void streamCompleted(int minorFragmentId); @Override void batchArrived(int minorFragmentId, RawFragmentBatch batch); @Override int getTotalIncomingFragments(); @Override synchronized void close(); } | @Test public void testReserveMemory() { SharedResourceGroup resourceGroup = mock(SharedResourceGroup.class); SabotConfig config = mock(SabotConfig.class); FragmentWorkQueue workQueue = mock(FragmentWorkQueue.class); TunnelProvider tunnelProvider = mock(TunnelProvider.class); EndpointsIndex endpointsIndex = new EndpointsIndex( Arrays.asList( NodeEndpoint.newBuilder().setAddress("localhost").setFabricPort(12345).build(), NodeEndpoint.newBuilder().setAddress("localhost").setFabricPort(12345).build() ) ); List<CoordExecRPC.MinorFragmentIndexEndpoint> list = Arrays.asList( MinorFragmentIndexEndpoint.newBuilder().setEndpointIndex(0).setMinorFragmentId(0).build(), MinorFragmentIndexEndpoint.newBuilder().setEndpointIndex(0).setMinorFragmentId(0).build() ); CoordExecRPC.Collector collector = CoordExecRPC.Collector.newBuilder() .setIsSpooling(true) .setOppositeMajorFragmentId(3) .setSupportsOutOfOrder(true) .addAllIncomingMinorFragmentIndex(list) .build(); ExecProtos.FragmentHandle handle = ExecProtos.FragmentHandle.newBuilder().setMajorFragmentId(2323).setMinorFragmentId(234234).build(); BufferAllocator allocator = allocatorRule.newAllocator("test-abstract-data-collector", 0, 2000000); boolean outOfMemory = false; final SchedulerService schedulerService = Mockito.mock(SchedulerService.class); final SpillService spillService = new SpillServiceImpl(DremioConfig.create(null, config), new DefaultSpillServiceOptions(), new Provider<SchedulerService>() { @Override public SchedulerService get() { return schedulerService; } }); try { AbstractDataCollector dataCollector = new AbstractDataCollector(resourceGroup, true, collector, 10240, allocator, config, handle, workQueue, tunnelProvider, spillService, endpointsIndex) { @Override protected RawBatchBuffer getBuffer(int minorFragmentId) { return null; } }; } catch (OutOfMemoryException e) { assertEquals(allocator.getPeakMemoryAllocation(), 1024*1024); outOfMemory = true; } assertTrue(outOfMemory); allocator.close(); } |
PivotDef { public PivotDef( int blockWidth, int variableCount, int bitCount, List<VectorPivotDef> fields) { super(); this.blockWidth = blockWidth; this.variableCount = variableCount; this.bitCount = bitCount; this.vectorPivots = ImmutableList.copyOf(fields); this.fixedPivots = FluentIterable.from(vectorPivots).filter(new Predicate<VectorPivotDef>(){ @Override public boolean apply(VectorPivotDef input) { return input.getType().mode != FieldMode.VARIABLE; }}).toList(); this.bitPivots = FluentIterable.from(vectorPivots).filter(new Predicate<VectorPivotDef>(){ @Override public boolean apply(VectorPivotDef input) { return input.getType().mode == FieldMode.BIT; }}).toList(); this.nonBitFixedPivots = FluentIterable.from(vectorPivots).filter(new Predicate<VectorPivotDef>(){ @Override public boolean apply(VectorPivotDef input) { return input.getType().mode == FieldMode.FIXED; }}).toList(); this.variablePivots = FluentIterable.from(vectorPivots).filter(new Predicate<VectorPivotDef>(){ @Override public boolean apply(VectorPivotDef input) { return input.getType().mode == FieldMode.VARIABLE; }}).toList(); this.outputVectors = FluentIterable.from(vectorPivots).transform(new Function<VectorPivotDef, FieldVector>(){ @Override public FieldVector apply(VectorPivotDef input) { return input.getOutgoingVector(); }}).toList(); } PivotDef(
int blockWidth,
int variableCount,
int bitCount,
List<VectorPivotDef> fields); ImmutableList<VectorPivotDef> getBitPivots(); ImmutableList<VectorPivotDef> getNonBitFixedPivots(); int getBlockWidth(); int getVariableCount(); int getBitCount(); List<VectorPivotDef> getVectorPivots(); List<VectorPivotDef> getVariablePivots(); List<VectorPivotDef> getFixedPivots(); List<FieldVector> getOutputVectors(); } | @Test public void pivotDef(){ try( IntVector col1 = new IntVector("col1", allocator); IntVector col2 = new IntVector("col2", allocator); BigIntVector col3 = new BigIntVector("col3", allocator); TimeMilliVector col4 = new TimeMilliVector("col4", allocator); VarCharVector col5 = new VarCharVector("col5", allocator); VarCharVector col6 = new VarCharVector("col6", allocator); BitVector col7 = new BitVector("col7", allocator); ){ PivotDef pivot = PivotBuilder.getBlockDefinition(FluentIterable .from(ImmutableList.of(col1, col2, col3, col4, col5, col6, col7)) .transform(new Function<FieldVector, FieldVectorPair>(){ @Override public FieldVectorPair apply(FieldVector input) { return new FieldVectorPair(input, input); }}) .toList()); assertEquals(2, pivot.getVariableCount()); assertEquals( 4 + 4 + 4 + 8 + 4 + 0 + 0 + 0 + 4 , pivot.getBlockWidth()); assertEquals(8, pivot.getBitCount()); assertEquals(ImmutableList.of( new VectorPivotDef(FieldType.FOUR_BYTE, 0, 0, 4, col1, col1), new VectorPivotDef(FieldType.FOUR_BYTE, 0, 1, 8, col2, col2), new VectorPivotDef(FieldType.EIGHT_BYTE, 0, 2, 12, col3, col3), new VectorPivotDef(FieldType.FOUR_BYTE, 0, 3, 20, col4, col4), new VectorPivotDef(FieldType.BIT, 0, 6, 7, col7, col7) ), pivot.getFixedPivots()); assertEquals(ImmutableList.of( new VectorPivotDef(FieldType.VARIABLE, 0, 4, 0, col5, col5), new VectorPivotDef(FieldType.VARIABLE, 0, 5, 1, col6, col6) ), pivot.getVariablePivots()); } } |
BoundedPivots { public static int pivot(PivotDef pivot, int start, int count, FixedBlockVector fixedBlock, VariableBlockVector variable) { if (pivot.getVariableCount() > 0) { int updatedCount = pivotVariableLengths(pivot.getVariablePivots(), fixedBlock, variable, start, count); Preconditions.checkState(updatedCount <= count); count = updatedCount; } for(VectorPivotDef def : pivot.getFixedPivots()){ switch(def.getType()){ case BIT: pivotBit(def, fixedBlock, start, count); break; case FOUR_BYTE: pivot4Bytes(def, fixedBlock, start, count); break; case EIGHT_BYTE: pivot8Bytes(def, fixedBlock, start, count); break; case SIXTEEN_BYTE: pivot16Bytes(def, fixedBlock, start, count); break; case VARIABLE: default: throw new UnsupportedOperationException("Pivot: unknown type: " + Describer.describe(def.getIncomingVector().getField())); } } return count; } static int pivot(PivotDef pivot, int start, int count, FixedBlockVector fixedBlock, VariableBlockVector variable); } | @Test public void boolNullEveryOther() throws Exception { final int count = 1024; try ( BitVector in = new BitVector("in", allocator); BitVector out = new BitVector("out", allocator); ) { in.allocateNew(count); ArrowBuf tempBuf = allocator.buffer(1024); for (int i = 0; i < count; i ++) { if (i % 2 == 0) { in.set(i, 1); } } in.setValueCount(count); final PivotDef pivot = PivotBuilder.getBlockDefinition(new FieldVectorPair(in, out)); try ( final FixedBlockVector fbv = new FixedBlockVector(allocator, pivot.getBlockWidth()); final VariableBlockVector vbv = new VariableBlockVector(allocator, pivot.getVariableCount()); ) { fbv.ensureAvailableBlocks(count); Pivots.pivot(pivot, count, fbv, vbv); Unpivots.unpivot(pivot, fbv, vbv, 0, count); for (int i = 0; i < count; i++) { assertEquals(in.getObject(i), out.getObject(i)); } } tempBuf.release(); } } |
ScanOperator implements ProducerOperator { @Override public void workOnOOB(OutOfBandMessage message) { final ArrowBuf msgBuf = message.getBuffer(); final String senderInfo = String.format("Frag %d:%d, OpId %d", message.getSendingMajorFragmentId(), message.getSendingMinorFragmentId(), message.getSendingOperatorId()); if (msgBuf==null || msgBuf.capacity()==0) { logger.warn("Empty runtime filter received from {}", senderInfo); return; } msgBuf.retain(); logger.info("Filter received from {}", senderInfo); try(RollbackCloseable closeOnErr = new RollbackCloseable()) { closeOnErr.add(msgBuf); final BloomFilter bloomFilter = BloomFilter.prepareFrom(msgBuf); final ExecProtos.RuntimeFilter protoFilter = message.getPayload(ExecProtos.RuntimeFilter.parser()); final RuntimeFilter filter = RuntimeFilter.getInstance(protoFilter, bloomFilter, senderInfo); boolean isAlreadyPresent = this.runtimeFilters.stream().anyMatch(r -> r.isOnSameColumns(filter)); if (protoFilter.getPartitionColumnFilter().getSizeBytes() != bloomFilter.getSizeInBytes()) { logger.error("Invalid incoming runtime filter size. Expected size {}, actual size {}, filter {}", protoFilter.getPartitionColumnFilter().getSizeBytes(), bloomFilter.getSizeInBytes(), bloomFilter.toString()); AutoCloseables.close(filter); } else if (isAlreadyPresent) { logger.debug("Skipping enforcement because filter is already present {}", filter); AutoCloseables.close(filter); } else { logger.debug("Adding filter to the record readers {}, current reader {}, FPP {}.", filter, this.currentReader.getClass().getName(), bloomFilter.getExpectedFPP()); this.runtimeFilters.add(filter); this.currentReader.addRuntimeFilter(filter); } closeOnErr.commit(); } catch (Exception e) { logger.warn("Error while merging runtime filter piece from " + message.getSendingMajorFragmentId() + ":" + message.getSendingMinorFragmentId(), e); } } ScanOperator(SubScan config, OperatorContext context, Iterator<RecordReader> readers); ScanOperator(SubScan config, OperatorContext context,
Iterator<RecordReader> readers, GlobalDictionaries globalDictionaries, CoordinationProtos.NodeEndpoint foremanEndpoint,
CoordExecRPC.QueryContextInformation queryContextInformation); @Override VectorAccessible setup(); @Override State getState(); @Override int outputData(); @Override void workOnOOB(OutOfBandMessage message); @Override OUT accept(OperatorVisitor<OUT, IN, EXCEP> visitor, IN value); @Override void close(); } | @Test public void testWorkOnOOBRuntimeFilterInvalidFilterSize() { int buildMinorFragment1 = 2; int buildMajorFragment1 = 1; try (ArrowBuf oobMessageBuf = testAllocator.buffer(128)) { RuntimeFilter filter1 = newRuntimeFilter(32, "col1", "col2"); OutOfBandMessage msg1 = newOOBMessage(filter1, oobMessageBuf, buildMajorFragment1, buildMinorFragment1); RecordReader mockReader = mock(RecordReader.class); ScanOperator scanOp = new ScanOperator(mock(SubScan.class), getMockContext(), Lists.newArrayList(mockReader).iterator(), null, null, null); scanOp.workOnOOB(msg1); msg1.getBuffer().release(); verify(mockReader, never()).addRuntimeFilter(any(com.dremio.exec.store.RuntimeFilter.class)); } } |
RpcCompatibilityEncoder extends MessageToMessageEncoder<OutboundRpcMessage> { @Override protected void encode(ChannelHandlerContext context, OutboundRpcMessage message, List<Object> out) throws Exception { if (message.mode != RpcMode.RESPONSE_FAILURE) { out.add(message); return; } final MessageLite pBody = message.pBody; if (!(pBody instanceof DremioPBError)) { out.add(message); return; } DremioPBError error = (DremioPBError) pBody; DremioPBError newError = ErrorCompatibility.convertIfNecessary(error); out.add(new OutboundRpcMessage(message.mode, message.rpcType, message.coordinationId, newError, message.dBodies)); } } | @Test public void testIgnoreOtherMessages() throws Exception { RpcCompatibilityEncoder encoder = new RpcCompatibilityEncoder(); ChannelHandlerContext context = mock(ChannelHandlerContext.class); OutboundRpcMessage message = new OutboundRpcMessage(RpcMode.PING, 0, 0, Acks.OK); List<Object> out = new ArrayList<>(); encoder.encode(context, message, out); assertEquals(1, out.size()); assertSame(message, out.get(0)); }
@Test public void testIgnoreNonDremioPBErrorMessage() throws Exception { RpcCompatibilityEncoder encoder = new RpcCompatibilityEncoder(); ChannelHandlerContext context = mock(ChannelHandlerContext.class); OutboundRpcMessage message = new OutboundRpcMessage(RpcMode.RESPONSE_FAILURE, 0, 0, Acks.OK); List<Object> out = new ArrayList<>(); encoder.encode(context, message, out); assertEquals(1, out.size()); assertSame(message, out.get(0)); }
@Test public void testUpdateErrorType() throws Exception { RpcCompatibilityEncoder encoder = new RpcCompatibilityEncoder(); ChannelHandlerContext context = mock(ChannelHandlerContext.class); DremioPBError error = DremioPBError.newBuilder() .setErrorType(ErrorType.IO_EXCEPTION) .setMessage("test message") .build(); OutboundRpcMessage message = new OutboundRpcMessage(RpcMode.RESPONSE_FAILURE, RpcType.RESP_QUERY_PROFILE, 12, error); List<Object> out = new ArrayList<>(); encoder.encode(context, message, out); assertEquals(1, out.size()); OutboundRpcMessage received = (OutboundRpcMessage) out.get(0); assertEquals(RpcMode.RESPONSE_FAILURE, received.mode); assertEquals(12, received.coordinationId); DremioPBError newError = (DremioPBError) received.pBody; assertEquals(ErrorType.RESOURCE, newError.getErrorType()); assertEquals("test message", newError.getMessage()); } |
FragmentTracker implements AutoCloseable { public void populate(List<PlanFragmentFull> fragments, ResourceSchedulingDecisionInfo decisionInfo) { for (PlanFragmentFull fragment : fragments) { final NodeEndpoint assignment = fragment.getMinor().getAssignment(); pendingNodes.add(assignment); } executorSet = executorSetService.getExecutorSet(decisionInfo.getEngineId(), decisionInfo.getSubEngineId()); executorSet.addNodeStatusListener(nodeStatusListener); validateEndpoints(); checkAndNotifyCompletionListener(); } FragmentTracker(
QueryId queryId,
CompletionListener completionListener,
Runnable queryCloser,
ExecutorServiceClientFactory executorServiceClientFactory,
ExecutorSetService executorSetService
); QueryId getQueryId(); void populate(List<PlanFragmentFull> fragments, ResourceSchedulingDecisionInfo decisionInfo); void nodeMarkFirstError(NodeQueryFirstError firstError); void nodeCompleted(NodeQueryCompletion completion); void screenCompleted(); @Override void close(); } | @Test public void testEmptyFragmentList() { InOrder inOrder = Mockito.inOrder(completionListener, queryCloser); FragmentTracker fragmentTracker = new FragmentTracker(queryId, completionListener, queryCloser, null, new LocalExecutorSetService(DirectProvider.wrap(coordinator), DirectProvider.wrap(optionManager))); fragmentTracker.populate(Collections.emptyList(), new ResourceSchedulingDecisionInfo()); inOrder.verify(completionListener).succeeded(); inOrder.verify(queryCloser).run(); } |
RexToExpr { public static LogicalExpression toExpr(ParseContext context, RelDataType rowType, RexBuilder rexBuilder, RexNode expr) { return toExpr(context, rowType, rexBuilder, expr, true); } static LogicalExpression toExpr(ParseContext context, RelDataType rowType, RexBuilder rexBuilder, RexNode expr); static LogicalExpression toExpr(ParseContext context, RelDataType rowType, RexBuilder rexBuilder, RexNode expr, boolean throwUserException); static LogicalExpression toExpr(ParseContext context, RelDataType rowType, RexBuilder rexBuilder, RexNode expr, boolean throwUserException, IntFunction<Optional<Integer>> inputFunction); static List<NamedExpression> projectToExpr(ParseContext context, List<Pair<RexNode, String>> projects, RelNode input); static List<NamedExpression> groupSetToExpr(RelNode input, ImmutableBitSet groupSet); static List<NamedExpression> aggsToExpr(
RelDataType rowType, RelNode input, ImmutableBitSet groupSet, List<AggregateCall> aggCalls); static boolean isLiteralNull(RexLiteral literal); static final String UNSUPPORTED_REX_NODE_ERROR; } | @Test public void testUnsupportedRexNode() { try { RelDataTypeFactory relFactory = SqlTypeFactoryImpl.INSTANCE; RexBuilder rex = new DremioRexBuilder(relFactory); RelDataType anyType = relFactory.createSqlType(SqlTypeName.ANY); List<RexNode> emptyList = new LinkedList<>(); ImmutableList<RexFieldCollation> e = ImmutableList.copyOf(new RexFieldCollation[0]); RexNode window = rex.makeOver(anyType, SqlStdOperatorTable.AVG, emptyList, emptyList, e, null, null, true, false, false, false); RexToExpr.toExpr(null, null, null, window); } catch (UserException e) { if (e.getMessage().contains(RexToExpr.UNSUPPORTED_REX_NODE_ERROR)) { return; } Assert.fail("Hit exception with unexpected error message"); } Assert.fail("Failed to raise the expected exception"); } |
SoftAffinityFragmentParallelizer implements FragmentParallelizer { @VisibleForTesting List<NodeEndpoint> findEndpoints(final Collection<NodeEndpoint> activeEndpoints, final Map<NodeEndpoint, EndpointAffinity> endpointAffinityMap, final int width, final ParallelizationParameters parameters) throws PhysicalOperatorSetupException { List<EndpointAffinity> sortedAffinityList; Set<NodeEndpoint> endpointsWithAffinity; if (endpointAffinityMap.isEmpty()) { endpointsWithAffinity = ImmutableSet.of(); sortedAffinityList = ImmutableList.of(); } else { final Set<NodeEndpoint> activeEndpointsSet = ImmutableSet.copyOf(activeEndpoints); sortedAffinityList = endpointAffinityMap.values() .stream() .filter((endpointAffinity) -> activeEndpointsSet.contains(endpointAffinity.getEndpoint())) .sorted(Comparator.comparing(EndpointAffinity::getAffinity).reversed()) .collect(ImmutableList.toImmutableList()); endpointsWithAffinity = sortedAffinityList.stream() .map(EndpointAffinity::getEndpoint) .collect(ImmutableSet.toImmutableSet()); } final List<NodeEndpoint> endpoints = Lists.newArrayList(); if (!sortedAffinityList.isEmpty()) { int numRequiredNodes = 0; for (EndpointAffinity ep : sortedAffinityList) { if (ep.isAssignmentRequired()) { numRequiredNodes++; } else { break; } } if (width < numRequiredNodes) { throw new PhysicalOperatorSetupException("Can not parallelize the fragment as the parallelization width (" + width + ") is " + "less than the number of mandatory nodes (" + numRequiredNodes + " nodes with +INFINITE affinity)."); } int affinedSlots = Math.max(1, (int) (parameters.getAffinityFactor() * width / activeEndpoints.size())) * sortedAffinityList.size(); affinedSlots = Math.max(affinedSlots, numRequiredNodes); affinedSlots = Math.min(affinedSlots, width); Iterator<EndpointAffinity> affinedEPItr = Iterators.cycle(sortedAffinityList); while (endpoints.size() < affinedSlots) { EndpointAffinity ea = affinedEPItr.next(); endpoints.add(ea.getEndpoint()); } } if (endpoints.size() < width) { List<NodeEndpoint> endpointsWithNoAffinity; if (endpointsWithAffinity.isEmpty()) { endpointsWithNoAffinity = Lists.newArrayList(activeEndpoints); } else { endpointsWithNoAffinity = Lists.newArrayList(Sets.difference(ImmutableSet.copyOf(activeEndpoints), endpointsWithAffinity)); } Collections.shuffle(endpointsWithNoAffinity, ThreadLocalRandom.current()); Iterator<NodeEndpoint> otherEPItr = Iterators.cycle(endpointsWithNoAffinity.size() > 0 ? endpointsWithNoAffinity : endpointsWithAffinity); while (endpoints.size() < width) { endpoints.add(otherEPItr.next()); } } return endpoints; } @Override void parallelizeFragment(final Wrapper fragmentWrapper, final ParallelizationParameters parameters,
final Collection<NodeEndpoint> activeEndpoints); @Override int getIdealFragmentWidth(final Wrapper fragment, final ParallelizationParameters parameters); static final SoftAffinityFragmentParallelizer INSTANCE; } | @Test public void testNodesSorting() throws Exception { List<CoordinationProtos.NodeEndpoint> activeEndpoints = ImmutableList.of(N2_EP1, N3_EP1, N4_EP2); EndpointAffinity N1_EP1A = new EndpointAffinity(N1_EP1, 0.5); EndpointAffinity N1_EP2A = new EndpointAffinity(N1_EP2, 0.5); EndpointAffinity N2_EP1A = new EndpointAffinity(N2_EP1, 0.5); Map<CoordinationProtos.NodeEndpoint, EndpointAffinity> endpointAffinityMap = ImmutableMap.of(N1_EP1, N1_EP1A, N2_EP1, N2_EP1A, N1_EP2, N1_EP2A); List<CoordinationProtos.NodeEndpoint> endpoints = SoftAffinityFragmentParallelizer.INSTANCE .findEndpoints( activeEndpoints, endpointAffinityMap, 1, newParameters(3, 5, 10, 0.3D, false)); assertNotNull(endpoints); assertEquals(1, endpoints.size()); assertEquals(N2_EP1, endpoints.get(0)); N2_EP1A = new EndpointAffinity(N2_EP1, 0.3); endpointAffinityMap = ImmutableMap.of(N1_EP1, N1_EP1A, N2_EP1, N2_EP1A, N1_EP2, N1_EP2A); endpoints = SoftAffinityFragmentParallelizer.INSTANCE .findEndpoints( activeEndpoints, endpointAffinityMap, 1, newParameters(3, 5, 10, 0.3D, false)); assertNotNull(endpoints); assertEquals(1, endpoints.size()); assertEquals(N2_EP1, endpoints.get(0)); } |
EndpointsIndex { public MinorFragmentEndpoint getFragmentEndpoint(MinorFragmentIndexEndpoint ep) { return fragmentsEndpointMap.computeIfAbsent(ep, k -> new MinorFragmentEndpoint(k.getMinorFragmentId(), endpoints.get(k.getEndpointIndex()))); } EndpointsIndex(List<NodeEndpoint> endpoints); EndpointsIndex(); NodeEndpoint getNodeEndpoint(int idx); MinorFragmentEndpoint getFragmentEndpoint(MinorFragmentIndexEndpoint ep); List<MinorFragmentEndpoint> getFragmentEndpoints(List<MinorFragmentIndexEndpoint> eps); } | @Test public void indexEndpointSingle() { EndpointsIndex.Builder indexBuilder = new EndpointsIndex.Builder(); NodeEndpoint ep = NodeEndpoint.newBuilder() .setAddress("localhost") .setFabricPort(1700) .build(); MinorFragmentEndpoint expected = new MinorFragmentEndpoint(16, ep); MinorFragmentIndexEndpoint indexEndpoint = indexBuilder.addFragmentEndpoint(16, ep); EndpointsIndex index = new EndpointsIndex(indexBuilder.getAllEndpoints()); MinorFragmentEndpoint out = index.getFragmentEndpoint(indexEndpoint); assertEquals(expected, out); } |
HardAffinityFragmentParallelizer implements FragmentParallelizer { @Override public void parallelizeFragment(final Wrapper fragmentWrapper, final ParallelizationParameters parameters, final Collection<NodeEndpoint> activeEndpoints) throws PhysicalOperatorSetupException { final Stats stats = fragmentWrapper.getStats(); final ParallelizationInfo pInfo = stats.getParallelizationInfo(); int totalMaxWidth = 0; final Map<NodeEndpoint, EndpointAffinity> endpointPool = Maps.newHashMap(); for(Entry<NodeEndpoint, EndpointAffinity> entry : pInfo.getEndpointAffinityMap().entrySet()) { if (entry.getValue().isAssignmentRequired()) { endpointPool.put(entry.getKey(), entry.getValue()); totalMaxWidth += Math.min(parameters.getMaxWidthPerNode(), entry.getValue().getMaxWidth()); if (totalMaxWidth < 0) { totalMaxWidth = Integer.MAX_VALUE; } } } int width = (int) Math.ceil(stats.getMaxCost() / parameters.getSliceTarget()); width = Math.max(endpointPool.size(), width); width = Math.max(1, Math.min(width, pInfo.getMaxWidth())); checkOrThrow(endpointPool.size() <= width, logger, "Number of mandatory endpoints ({}) that require an assignment is more than the allowed fragment max " + "width ({}).", endpointPool.size(), pInfo.getMaxWidth()); width = Math.max(1, Math.min(width, endpointPool.size()*parameters.getMaxWidthPerNode())); width = Math.min(totalMaxWidth, width); final Map<NodeEndpoint, Integer> endpoints = Maps.newHashMap(); for(Entry<NodeEndpoint, EndpointAffinity> entry : endpointPool.entrySet()) { endpoints.put(entry.getKey(), 1); } int totalAssigned = endpoints.size(); double totalAffinity = 1.0; for(EndpointAffinity epAff : endpointPool.values()) { totalAffinity += epAff.getAffinity(); } int remainingSlots = width - endpoints.size(); while (remainingSlots > 0) { for(EndpointAffinity epAf : endpointPool.values()) { final int moreAllocation = (int) Math.ceil( (epAf.getAffinity() / totalAffinity) * remainingSlots); int currentAssignments = endpoints.get(epAf.getEndpoint()); for(int i=0; i < moreAllocation && totalAssigned < width && currentAssignments < parameters.getMaxWidthPerNode() && currentAssignments < epAf.getMaxWidth(); i++) { totalAssigned++; currentAssignments++; } endpoints.put(epAf.getEndpoint(), currentAssignments); } final int previousRemainingSlots = remainingSlots; remainingSlots = width - totalAssigned; if (previousRemainingSlots == remainingSlots) { logger.error("Can't parallelize fragment: " + "Every mandatory node has exhausted the maximum width per node limit." + EOL + "Endpoint pool: {}" + EOL + "Assignment so far: {}" + EOL + "Width: {}", endpointPool, endpoints, width); throw new PhysicalOperatorSetupException("Can not parallelize fragment."); } } final List<NodeEndpoint> assignedEndpoints = Lists.newArrayList(); for(Entry<NodeEndpoint, Integer> entry : endpoints.entrySet()) { for(int i=0; i < entry.getValue(); i++) { assignedEndpoints.add(entry.getKey()); } } fragmentWrapper.setWidth(width); fragmentWrapper.assignEndpoints(parameters, assignedEndpoints); } private HardAffinityFragmentParallelizer(); @Override void parallelizeFragment(final Wrapper fragmentWrapper, final ParallelizationParameters parameters,
final Collection<NodeEndpoint> activeEndpoints); @Override int getIdealFragmentWidth(final Wrapper fragment, final ParallelizationParameters parameters); static final HardAffinityFragmentParallelizer INSTANCE; } | @Test public void simpleCase1() throws Exception { final Wrapper wrapper = newWrapper(200, 1, 20, Collections.singletonList(new EndpointAffinity(N1_EP1, 1.0, true, 50))); INSTANCE.parallelizeFragment(wrapper, newParameters(SLICE_TARGET_DEFAULT, 5, 20), null); assertEquals(1, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(1, assignedEps.size()); assertEquals(N1_EP1, assignedEps.get(0)); }
@Test public void matchHardAffinity() throws Exception { final Wrapper wrapper = newSplitWrapper(200, 1, 20, Collections.singletonList(new EndpointAffinity(N1_EP1, 1.0, true, 20)), new ExecutionNodeMap(ImmutableList.of(N1_EP1)) ); INSTANCE.parallelizeFragment(wrapper, newParameters(SLICE_TARGET_DEFAULT, 5, 20), null); assertEquals(1, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(1, assignedEps.size()); assertEquals(N1_EP1, assignedEps.get(0)); }
@Test public void noMatchHardAffinity() throws Exception { try { final Wrapper wrapper = newSplitWrapper(200, 1, 20, Collections.singletonList(new EndpointAffinity(N1_EP1, 1.0, true, 20)), new ExecutionNodeMap(ImmutableList.of(N2_EP1)) ); INSTANCE.parallelizeFragment(wrapper, newParameters(SLICE_TARGET_DEFAULT, 5, 20), ImmutableList.of(N2_EP1)); fail("Should throw exception as affinity endpoint does not match active endpoint"); } catch (UserException uex) { assertTrue(uex.getMessage().startsWith("No executors are available for data with hard affinity.")); } }
@Test public void simpleCase2() throws Exception { final Wrapper wrapper = newWrapper(200, 1, 20, Collections.singletonList(new EndpointAffinity(N1_EP1, 1.0, true, 50))); INSTANCE.parallelizeFragment(wrapper, newParameters(1, 5, 20), null); assertEquals(5, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(5, assignedEps.size()); for (NodeEndpoint ep : assignedEps) { assertEquals(N1_EP1, ep); } }
@Test public void multiNodeCluster1() throws Exception { final Wrapper wrapper = newWrapper(200, 1, 20, ImmutableList.of( new EndpointAffinity(N1_EP1, 0.15, true, 50), new EndpointAffinity(N1_EP2, 0.15, true, 50), new EndpointAffinity(N2_EP1, 0.10, true, 50), new EndpointAffinity(N3_EP2, 0.20, true, 50), new EndpointAffinity(N4_EP2, 0.20, true, 50) )); INSTANCE.parallelizeFragment(wrapper, newParameters(SLICE_TARGET_DEFAULT, 5, 20), null); assertEquals(5, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(5, assignedEps.size()); assertTrue(assignedEps.contains(N1_EP1)); assertTrue(assignedEps.contains(N1_EP2)); assertTrue(assignedEps.contains(N2_EP1)); assertTrue(assignedEps.contains(N3_EP2)); assertTrue(assignedEps.contains(N4_EP2)); }
@Test public void multiNodeCluster2() throws Exception { final Wrapper wrapper = newWrapper(200, 1, 20, ImmutableList.of( new EndpointAffinity(N1_EP2, 0.15, true, 50), new EndpointAffinity(N2_EP2, 0.15, true, 50), new EndpointAffinity(N3_EP1, 0.10, true, 50), new EndpointAffinity(N4_EP2, 0.20, true, 50), new EndpointAffinity(N1_EP1, 0.20, true, 50) )); INSTANCE.parallelizeFragment(wrapper, newParameters(1, 5, 20), null); assertEquals(20, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(20, assignedEps.size()); final HashMultiset<NodeEndpoint> counts = HashMultiset.create(); for(final NodeEndpoint ep : assignedEps) { counts.add(ep); } assertTrue(counts.count(N1_EP2) <= 5); assertTrue(counts.count(N2_EP2) <= 5); assertTrue(counts.count(N3_EP1) <= 5); assertTrue(counts.count(N4_EP2) <= 5); assertTrue(counts.count(N1_EP1) <= 5); }
@Test public void multiNodeClusterNonNormalizedAffinities() throws Exception { final Wrapper wrapper = newWrapper(2000, 1, 250, ImmutableList.of( new EndpointAffinity(N1_EP2, 15, true, 50), new EndpointAffinity(N2_EP2, 15, true, 50), new EndpointAffinity(N3_EP1, 10, true, 50), new EndpointAffinity(N4_EP2, 20, true, 50), new EndpointAffinity(N1_EP1, 20, true, 50) )); INSTANCE.parallelizeFragment(wrapper, newParameters(100, 20, 80), null); assertEquals(20, wrapper.getWidth()); final List<NodeEndpoint> assignedEps = wrapper.getAssignedEndpoints(); assertEquals(20, assignedEps.size()); final HashMultiset<NodeEndpoint> counts = HashMultiset.create(); for(final NodeEndpoint ep : assignedEps) { counts.add(ep); } assertThat(counts.count(N1_EP2), CoreMatchers.allOf(greaterThan(1), lessThanOrEqualTo(5))); assertThat(counts.count(N2_EP2), CoreMatchers.allOf(greaterThan(1), lessThanOrEqualTo(5))); assertThat(counts.count(N3_EP1), CoreMatchers.allOf(greaterThan(1), lessThanOrEqualTo(5))); assertThat(counts.count(N4_EP2), CoreMatchers.allOf(greaterThan(1), lessThanOrEqualTo(5))); assertThat(counts.count(N1_EP1), CoreMatchers.allOf(greaterThan(1), lessThanOrEqualTo(5))); }
@Test public void multiNodeClusterNegative2() throws Exception { final Wrapper wrapper = newWrapper(200, 1, 3, ImmutableList.of( new EndpointAffinity(N1_EP2, 0.15, true, 50), new EndpointAffinity(N2_EP2, 0.15, true, 50), new EndpointAffinity(N3_EP1, 0.10, true, 50), new EndpointAffinity(N4_EP2, 0.20, true, 50), new EndpointAffinity(N1_EP1, 0.20, true, 50) )); try { INSTANCE.parallelizeFragment(wrapper, newParameters(1, 2, 2), null); fail("Expected an exception, because max fragment width (3) is less than the number of mandatory nodes (5)"); } catch (Exception e) { } } |
MinorDataReader { public ByteString readProtoEntry(OpProps props, String key) throws Exception { return attrsMap.getAttrValue(props, key); } MinorDataReader(
FragmentHandle handle,
MinorDataSerDe serDe,
PlanFragmentsIndex index,
MinorAttrsMap attrsMap); FragmentHandle getHandle(); MinorFragmentIndexEndpoint readMinorFragmentIndexEndpoint(OpProps props, String key); List<MinorFragmentIndexEndpoint> readMinorFragmentIndexEndpoints(OpProps props, String key); ByteString readProtoEntry(OpProps props, String key); NormalizedPartitionInfo readSplitPartition(OpProps props, String key); T readJsonEntry(OpProps props, String key, Class<T> clazz); } | @Test public void multiAttrsInSamePOP() throws Exception { PlanFragmentsIndex.Builder indexBuilder = new PlanFragmentsIndex.Builder(); MinorDataSerDe serDe = new MinorDataSerDe(null, null); MinorDataWriter writer = new MinorDataWriter(null, dummyEndpoint, serDe, indexBuilder); MockStorePOP pop = new MockStorePOP(OpProps.prototype(1), null); List<HBaseSubScanSpec> specList = new ArrayList<>(); for (int i = 0; i < 4; ++i) { HBaseSubScanSpec spec = HBaseSubScanSpec .newBuilder() .setTableName("testTable" + i) .build(); specList.add(spec); writer.writeProtoEntry(pop.getProps(), "testKey" + i, spec); } MinorAttrsMap minorAttrsMap = MinorAttrsMap.create(writer.getAllAttrs()); MinorDataReader reader = new MinorDataReader(null, serDe, null, minorAttrsMap); for (int i = 0; i < 4; ++i) { HBaseSubScanSpec spec = HBaseSubScanSpec.parseFrom(reader.readProtoEntry(pop.getProps(), "testKey" + i)); assertEquals(spec, specList.get(i)); } }
@Test public void sameKeyMultiPOPs() throws Exception { PlanFragmentsIndex.Builder indexBuilder = new PlanFragmentsIndex.Builder(); MinorDataSerDe serDe = new MinorDataSerDe(null, null); MinorDataWriter writer = new MinorDataWriter(null, dummyEndpoint, serDe, indexBuilder); MockStorePOP pop1 = new MockStorePOP(OpProps.prototype(1), null); MockStorePOP pop2 = new MockStorePOP(OpProps.prototype(2), null); List<HBaseSubScanSpec> specList = new ArrayList<>(); for (int i = 0; i < 2; ++i) { HBaseSubScanSpec spec = HBaseSubScanSpec .newBuilder() .setTableName("testTable" + i) .build(); specList.add(spec); writer.writeProtoEntry(i == 0 ? pop1.getProps() : pop2.getProps(), "testKey", spec); } MinorAttrsMap minorAttrsMap = MinorAttrsMap.create(writer.getAllAttrs()); MinorDataReader reader = new MinorDataReader(null, serDe, null, minorAttrsMap); HBaseSubScanSpec spec1 = HBaseSubScanSpec.parseFrom(reader.readProtoEntry(pop1.getProps(), "testKey")); assertEquals(spec1, specList.get(0)); HBaseSubScanSpec spec2 = HBaseSubScanSpec.parseFrom(reader.readProtoEntry(pop2.getProps(), "testKey")); assertEquals(spec2, specList.get(1)); } |
MaterializationList implements MaterializationProvider { @VisibleForTesting protected List<DremioMaterialization> build(final MaterializationDescriptorProvider provider) { final Set<String> exclusions = Sets.newHashSet(session.getSubstitutionSettings().getExclusions()); final Set<String> inclusions = Sets.newHashSet(session.getSubstitutionSettings().getInclusions()); final boolean hasInclusions = !inclusions.isEmpty(); final List<DremioMaterialization> materializations = Lists.newArrayList(); for (final MaterializationDescriptor descriptor : provider.get()) { if( (hasInclusions && !inclusions.contains(descriptor.getLayoutId())) || exclusions.contains(descriptor.getLayoutId()) ) { continue; } try { final DremioMaterialization materialization = descriptor.getMaterializationFor(converter); if (materialization == null) { continue; } mapping.put(TablePath.of(descriptor.getPath()), descriptor); materializations.add(materialization); } catch (Throwable e) { logger.warn("failed to expand materialization {}", descriptor.getMaterializationId(), e); } } return materializations; } MaterializationList(final SqlConverter converter, final UserSession session,
final MaterializationDescriptorProvider provider); @Override List<DremioMaterialization> getMaterializations(); @Override java.util.Optional<DremioMaterialization> getDefaultRawMaterialization(NamespaceKey path, List<String> vdsFields); Optional<MaterializationDescriptor> getDescriptor(final List<String> path); Optional<MaterializationDescriptor> getDescriptor(final TablePath path); } | @Test public void testListDiscardsGivenExclusions() { when(excluded.getMaterializationFor(converter)).thenReturn(relOptMat1); when(excluded.getLayoutId()).thenReturn("rid-1"); when(included.getMaterializationFor(converter)).thenReturn(relOptMat2); when(included.getLayoutId()).thenReturn("rid-2"); SubstitutionSettings materializationSettings = new SubstitutionSettings(ImmutableList.of("rid-1")); when(session.getSubstitutionSettings()).thenReturn(materializationSettings); when(provider.get()).thenReturn(ImmutableList.of(excluded, included)); final MaterializationList materializations = new MaterializationList(converter, session, provider); materializations.build(provider); verify(excluded, never()).getMaterializationFor(any(SqlConverter.class)); verify(included, atLeastOnce()).getMaterializationFor(converter); } |
MaterializationExpander { @VisibleForTesting static boolean areRowTypesEqual(RelDataType rowType1, RelDataType rowType2) { if (rowType1 == rowType2) { return true; } if (rowType2.getFieldCount() != rowType1.getFieldCount()) { return false; } final List<RelDataTypeField> f1 = rowType1.getFieldList(); final List<RelDataTypeField> f2 = rowType2.getFieldList(); for (Pair<RelDataTypeField, RelDataTypeField> pair : Pair.zip(f1, f2)) { final RelDataType type1 = JavaTypeFactoryImpl.INSTANCE.createTypeWithNullability(pair.left.getType(), false); final RelDataType type2 = JavaTypeFactoryImpl.INSTANCE.createTypeWithNullability(pair.right.getType(), false); if (type1.equals(type2)) { continue; } if (type1.getSqlTypeName() == SqlTypeName.ANY || type2.getSqlTypeName() == SqlTypeName.ANY) { continue; } if (type1.getSqlTypeName().getFamily() == SqlTypeFamily.CHARACTER && type2.getSqlTypeName().getFamily() == SqlTypeFamily.CHARACTER) { continue; } if (type1.getSqlTypeName() == SqlTypeName.DOUBLE && type2.getSqlTypeName() == SqlTypeName .DECIMAL || isSumAggOutput(type1, type2)) { continue; } return false; } return true; } private MaterializationExpander(final SqlConverter parent); DremioMaterialization expand(MaterializationDescriptor descriptor); static RelNode deserializePlan(final byte[] planBytes, SqlConverter parent); static MaterializationExpander of(final SqlConverter parent); } | @Test public void testAreRowTypesEqualIgnoresNames() { final RelDataType type1 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.INTEGER), typeFactory.createSqlType(SqlTypeName.BIGINT), typeFactory.createSqlType(SqlTypeName.FLOAT), typeFactory.createSqlType(SqlTypeName.DOUBLE), typeFactory.createSqlType(SqlTypeName.DATE), typeFactory.createSqlType(SqlTypeName.TIMESTAMP), typeFactory.createSqlType(SqlTypeName.VARCHAR), typeFactory.createSqlType(SqlTypeName.BOOLEAN) ), asList("intC", "bigIntC", "floatC", "doubleC", "dateC", "tsC", "varcharC", "boolC") ); final RelDataType type2 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.INTEGER), typeFactory.createSqlType(SqlTypeName.BIGINT), typeFactory.createSqlType(SqlTypeName.FLOAT), typeFactory.createSqlType(SqlTypeName.DOUBLE), typeFactory.createSqlType(SqlTypeName.DATE), typeFactory.createSqlType(SqlTypeName.TIMESTAMP), typeFactory.createSqlType(SqlTypeName.VARCHAR), typeFactory.createSqlType(SqlTypeName.BOOLEAN) ), asList("intC2", "bigIntC2", "floatC2", "doubleC2", "dateC2", "tsC2", "varcharC2", "boolC2") ); Assert.assertTrue(MaterializationExpander.areRowTypesEqual(type1, type2)); }
@Test public void testAreRowTypesEqualIgnoresNullability() { final RelDataType type1 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.INTEGER), typeFactory.createSqlType(SqlTypeName.DOUBLE), typeFactory.createSqlType(SqlTypeName.TIMESTAMP), typeFactory.createSqlType(SqlTypeName.BOOLEAN) ), asList("intC", "doubleC", "tsC", "boolC") ); final RelDataType type2 = typeFactory.createStructType( asList( typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.INTEGER), true), typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.DOUBLE), true), typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.TIMESTAMP), true), typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.BOOLEAN), true) ), asList("intC2", "doubleC2", "tsC2", "boolC2") ); Assert.assertTrue(MaterializationExpander.areRowTypesEqual(type1, type2)); }
@Test public void testAreRowTypesEqualIgnoresAny() { final RelDataType type1 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.INTEGER), typeFactory.createSqlType(SqlTypeName.BIGINT) ), asList("intC", "bigIntC") ); final RelDataType type2 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.INTEGER), typeFactory.createSqlType(SqlTypeName.ANY) ), asList("intC", "anyC") ); Assert.assertTrue(MaterializationExpander.areRowTypesEqual(type1, type2)); }
@Test public void testAreRowTypesEqualMatchesCharVarchar() { final RelDataType type1 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.CHAR), typeFactory.createSqlType(SqlTypeName.VARCHAR) ), asList("charC", "varcharC") ); final RelDataType type2 = typeFactory.createStructType( asList( typeFactory.createSqlType(SqlTypeName.VARCHAR), typeFactory.createSqlType(SqlTypeName.CHAR) ), asList("varcharC", "charC") ); Assert.assertTrue(MaterializationExpander.areRowTypesEqual(type1, type2)); }
@Test public void testTimestampPrecisionMismatch() { final RelDataTypeFactory calciteFactory = new org.apache.calcite.jdbc.JavaTypeFactoryImpl(); final RelDataType type1 = calciteFactory.createStructType( Collections.singletonList(calciteFactory.createSqlType(SqlTypeName.TIMESTAMP, 0)), Collections.singletonList("ts0") ); final RelDataType type2 = typeFactory.createStructType( Collections.singletonList(typeFactory.createSqlType(SqlTypeName.TIMESTAMP, 3)), Collections.singletonList("ts3") ); Assert.assertFalse(MaterializationExpander.areRowTypesEqual(type1, type2)); } |
JSONElementLocator { public static JsonPath parsePath(String path) { if (path.startsWith(VALUE_PLACEHOLDER)) { return new JsonPath(path.substring(VALUE_PLACEHOLDER.length())); } throw new IllegalArgumentException(path + " must start with 'value'"); } JSONElementLocator(String text); static JsonPath parsePath(String path); Interval locatePath(JsonPath searchedPath); JsonSelection locate(int selectionStart, int selectionEnd); } | @Test public void testParseJsonPath() throws Exception { JsonPath p = JSONElementLocator.parsePath("value.a"); assertEquals(p.toString(), 1, p.size()); assertEquals(p.toString(), "a", p.last().asObject().getField()); }
@Test public void testParseJsonPath2() throws Exception { JsonPath p = JSONElementLocator.parsePath("value.a.b.c"); assertEquals(p.toString(), 3, p.size()); assertEquals(new JsonPath(new ObjectJsonPathElement("a"), new ObjectJsonPathElement("b"), new ObjectJsonPathElement("c")), p); }
@Test public void testParseJsonPath3() throws Exception { JsonPath p = JSONElementLocator.parsePath("value[0][1][2]"); assertEquals(p.toString(), 3, p.size()); assertEquals(new JsonPath(new ArrayJsonPathElement(0), new ArrayJsonPathElement(1), new ArrayJsonPathElement(2)), p); }
@Test public void testParseJsonPath4() throws Exception { JsonPath p = JSONElementLocator.parsePath("value[0].a[1]"); assertEquals(p.toString(), 3, p.size()); assertEquals(new JsonPath(new ArrayJsonPathElement(0), new ObjectJsonPathElement("a"), new ArrayJsonPathElement(1)), p); }
@Test public void testParseJsonPath5() throws Exception { JsonPath p = JSONElementLocator.parsePath("value.a[0].b"); assertEquals(p.toString(), 3, p.size()); assertEquals(new JsonPath(new ObjectJsonPathElement("a"), new ArrayJsonPathElement(0), new ObjectJsonPathElement("b")), p); } |
MetadataProviderConditions { public static Predicate<String> getTableTypePredicate(List<String> tableTypeFilter) { return tableTypeFilter.isEmpty() ? ALWAYS_TRUE : ImmutableSet.copyOf(tableTypeFilter)::contains; } private MetadataProviderConditions(); static Predicate<String> getTableTypePredicate(List<String> tableTypeFilter); static Predicate<String> getCatalogNamePredicate(LikeFilter filter); static Optional<SearchQuery> createConjunctiveQuery(
LikeFilter schemaNameFilter,
LikeFilter tableNameFilter
); } | @Test public void testTableTypesEmptyList() { assertSame(MetadataProviderConditions.ALWAYS_TRUE, MetadataProviderConditions.getTableTypePredicate(Collections.emptyList())); }
@Test public void testTableTypes() { Predicate<String> filter = MetadataProviderConditions.getTableTypePredicate(Arrays.asList("foo", "bar")); assertTrue(filter.test("foo")); assertTrue(filter.test("bar")); assertFalse(filter.test("baz")); assertFalse(filter.test("fooo")); assertFalse(filter.test("ofoo")); assertFalse(filter.test("FOO")); } |
MetadataProviderConditions { public static Predicate<String> getCatalogNamePredicate(LikeFilter filter) { if (filter == null || !filter.hasPattern() || SQL_LIKE_ANY_STRING_PATTERN.equals(filter.getPattern())) { return ALWAYS_TRUE; } final String patternString = RegexpUtil.sqlToRegexLike(filter.getPattern(), filter.hasEscape() ? filter.getEscape().charAt(0) : (char) 0); final Pattern pattern = Pattern.compile(patternString, Pattern.CASE_INSENSITIVE); return input -> pattern.matcher(input).matches(); } private MetadataProviderConditions(); static Predicate<String> getTableTypePredicate(List<String> tableTypeFilter); static Predicate<String> getCatalogNamePredicate(LikeFilter filter); static Optional<SearchQuery> createConjunctiveQuery(
LikeFilter schemaNameFilter,
LikeFilter tableNameFilter
); } | @Test public void testLikeFilterAlwaysTrue() { assertSame(MetadataProviderConditions.ALWAYS_TRUE, MetadataProviderConditions.getCatalogNamePredicate(null)); assertSame(MetadataProviderConditions.ALWAYS_TRUE, MetadataProviderConditions.getCatalogNamePredicate(newLikeFilter(null, "\\"))); assertSame(MetadataProviderConditions.ALWAYS_TRUE, MetadataProviderConditions.getCatalogNamePredicate(newLikeFilter("%", "\\"))); }
@Test public void testLikeFilter() { Predicate<String> filter = MetadataProviderConditions.getCatalogNamePredicate(newLikeFilter("abc", "\\")); assertTrue(filter.test("abc")); assertFalse(filter.test("abcd")); assertTrue(filter.test("ABC")); }
@Test public void testLikeFilterMixedCase() { Predicate<String> filter = MetadataProviderConditions.getCatalogNamePredicate(newLikeFilter("AbC", "\\")); assertTrue(filter.test("abc")); assertFalse(filter.test("abcd")); assertFalse(filter.test("aabc")); assertTrue(filter.test("ABC")); } |
MetadataProviderConditions { public static Optional<SearchQuery> createConjunctiveQuery( LikeFilter schemaNameFilter, LikeFilter tableNameFilter ) { final Optional<SearchQuery> schemaNameQuery = createLikeQuery(DatasetIndexKeys.UNQUOTED_SCHEMA.getIndexFieldName(), schemaNameFilter); final Optional<SearchQuery> tableNameQuery = createLikeQuery(DatasetIndexKeys.UNQUOTED_NAME.getIndexFieldName(), tableNameFilter); if (!schemaNameQuery.isPresent()) { return tableNameQuery; } if (!tableNameQuery.isPresent()) { return schemaNameQuery; } return Optional.of(SearchQuery.newBuilder() .setAnd(SearchQuery.And.newBuilder() .addClauses(schemaNameQuery.get()) .addClauses(tableNameQuery.get())) .build()); } private MetadataProviderConditions(); static Predicate<String> getTableTypePredicate(List<String> tableTypeFilter); static Predicate<String> getCatalogNamePredicate(LikeFilter filter); static Optional<SearchQuery> createConjunctiveQuery(
LikeFilter schemaNameFilter,
LikeFilter tableNameFilter
); } | @Test public void testCreateFilterAlwaysTrue() { assertFalse(MetadataProviderConditions.createConjunctiveQuery(null, null).isPresent()); assertFalse(MetadataProviderConditions.createConjunctiveQuery(null, newLikeFilter("%", null)).isPresent()); assertFalse(MetadataProviderConditions.createConjunctiveQuery(newLikeFilter("%", null), null).isPresent()); assertFalse(MetadataProviderConditions.createConjunctiveQuery(newLikeFilter("%", null), newLikeFilter("%", null)) .isPresent()); } |
SQLAnalyzerFactory { public static SQLAnalyzer createSQLAnalyzer(final String username, final SabotContext sabotContext, final List<String> context, final boolean createForSqlSuggestions, ProjectOptionManager projectOptionManager) { final ViewExpansionContext viewExpansionContext = new ViewExpansionContext(username); final OptionManager optionManager = OptionManagerWrapper.Builder.newBuilder() .withOptionManager(new DefaultOptionManager(sabotContext.getOptionValidatorListing())) .withOptionManager(new EagerCachingOptionManager(projectOptionManager)) .withOptionManager(new QueryOptionManager(sabotContext.getOptionValidatorListing())) .build(); final NamespaceKey defaultSchemaPath = context == null ? null : new NamespaceKey(context); final SchemaConfig newSchemaConfig = SchemaConfig.newBuilder(username) .defaultSchema(defaultSchemaPath) .optionManager(optionManager) .setViewExpansionContext(viewExpansionContext) .build(); Catalog catalog = sabotContext.getCatalogService() .getCatalog(MetadataRequestOptions.of(newSchemaConfig)); JavaTypeFactory typeFactory = JavaTypeFactoryImpl.INSTANCE; DremioCatalogReader catalogReader = new DremioCatalogReader(catalog, typeFactory); FunctionImplementationRegistry functionImplementationRegistry = optionManager.getOption (PlannerSettings.ENABLE_DECIMAL_V2_KEY).getBoolVal() ? sabotContext.getDecimalFunctionImplementationRegistry() : sabotContext.getFunctionImplementationRegistry(); OperatorTable opTable = new OperatorTable(functionImplementationRegistry); SqlOperatorTable chainedOpTable = new ChainedSqlOperatorTable(ImmutableList.<SqlOperatorTable>of(opTable, catalogReader)); SqlValidatorWithHints validator = createForSqlSuggestions ? new SqlAdvisorValidator(chainedOpTable, catalogReader, typeFactory, DremioSqlConformance.INSTANCE) : SqlValidatorUtil.newValidator(chainedOpTable, catalogReader, typeFactory, DremioSqlConformance.INSTANCE); return new SQLAnalyzer(validator); } static SQLAnalyzer createSQLAnalyzer(final String username,
final SabotContext sabotContext,
final List<String> context,
final boolean createForSqlSuggestions,
ProjectOptionManager projectOptionManager); } | @Test public void testCreationOfValidator() { SabotContext sabotContext = mock(SabotContext.class); FunctionImplementationRegistry functionImplementationRegistry = mock(FunctionImplementationRegistry.class); CatalogService catalogService = mock(CatalogService.class); Catalog catalog = mock(Catalog.class); ProjectOptionManager mockOptions = mock(ProjectOptionManager.class); when(mockOptions.getOptionValidatorListing()).thenReturn(mock(OptionValidatorListing.class)); when(sabotContext.getFunctionImplementationRegistry()).thenReturn(functionImplementationRegistry); when(sabotContext.getCatalogService()).thenReturn(catalogService); when(sabotContext.getCatalogService().getCatalog(any(MetadataRequestOptions.class))).thenReturn(catalog); OptionValue value1 = OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, PlannerSettings.ENABLE_DECIMAL_V2_KEY, false); OptionValue value2 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, UserSession.MAX_METADATA_COUNT.getOptionName(), 0); OptionList optionList = new OptionList(); optionList.add(value1); optionList.add(value2); when(mockOptions.getOption(PlannerSettings.ENABLE_DECIMAL_V2_KEY)).thenReturn(value1); when(mockOptions.getOption(UserSession.MAX_METADATA_COUNT.getOptionName())).thenReturn(value2); when(mockOptions.getNonDefaultOptions()).thenReturn(optionList); SQLAnalyzer sqlAnalyzer = SQLAnalyzerFactory.createSQLAnalyzer(SystemUser.SYSTEM_USERNAME, sabotContext, null, true, mockOptions); SqlValidatorWithHints validator = sqlAnalyzer.validator; assertTrue(validator instanceof SqlAdvisorValidator); sqlAnalyzer = SQLAnalyzerFactory.createSQLAnalyzer(SystemUser.SYSTEM_USERNAME, sabotContext, null, false, mockOptions); validator = sqlAnalyzer.validator; assertTrue(validator instanceof SqlValidatorImpl); } |
SqlConverter { @VisibleForTesting static SqlNode parseSingleStatementImpl(String sql, ParserConfig parserConfig, boolean isInnerQuery) { SqlNodeList list = parseMultipleStatementsImpl(sql, parserConfig, isInnerQuery); if (list.size() > 1) { SqlParserPos pos = list.get(1).getParserPosition(); int col = pos.getColumnNum(); String first = sql.substring(0, col); String second = sql.substring(col-1); UserException.Builder builder = UserException.parseError(); builder.message("Unable to parse multiple queries. First query is %s. Rest of submission is %s", first, second); throw builder.buildSilently(); } SqlNode newNode = list.get(0).accept(STRING_LITERAL_CONVERTER); return newNode; } SqlConverter(
final PlannerSettings settings,
final SqlOperatorTable operatorTable,
final FunctionContext functionContext,
final MaterializationDescriptorProvider materializationProvider,
final FunctionImplementationRegistry functions,
final UserSession session,
final AttemptObserver observer,
final Catalog catalog,
final SubstitutionProviderFactory factory,
final SabotConfig config,
final ScanResult scanResult
); SqlConverter(SqlConverter parent, DremioCatalogReader catalog); SqlNodeList parseMultipleStatements(String sql); SqlNode parse(String sql); ViewExpansionContext getViewExpansionContext(); UserSession getSession(); SqlNode validate(final SqlNode parsedNode); RelDataType getValidatedRowType(String sql); FunctionImplementationRegistry getFunctionImplementationRegistry(); PlannerSettings getSettings(); RelDataType getOutputType(SqlNode validatedNode); JavaTypeFactory getTypeFactory(); SqlOperatorTable getOpTab(); RelOptCostFactory getCostFactory(); FunctionContext getFunctionContext(); DremioCatalogReader getCatalogReader(); SqlParser.Config getParserConfig(); AttemptObserver getObserver(); MaterializationList getMaterializations(); int getNestingLevel(); RelOptCluster getCluster(); AccelerationAwareSubstitutionProvider getSubstitutionProvider(); RelSerializerFactory getSerializerFactory(); SabotConfig getConfig(); RelRootPlus toConvertibleRelRoot(final SqlNode validatedNode, boolean expand, boolean flatten); static final SqlShuttle STRING_LITERAL_CONVERTER; } | @Test(expected = UserException.class) public void testFailMultipleQueries() { ParserConfig config = new ParserConfig(ParserConfig.QUOTING, 100); SqlConverter.parseSingleStatementImpl("select * from t1; select * from t2", config, false); }
@Test public void testPassSemicolon() { ParserConfig config = new ParserConfig(ParserConfig.QUOTING, 100); SqlNode node = SqlConverter.parseSingleStatementImpl("select * from t1;", config, false); assertEquals("SELECT *\n" + "FROM \"t1\"", node.toSqlString(CalciteSqlDialect.DEFAULT).getSql()); } |
SQLAnalyzer { public List<SqlMoniker> suggest(String sql, int cursorPosition) { SqlAdvisor sqlAdvisor = new SqlAdvisor(validator); String[] replaced = {null}; return sqlAdvisor.getCompletionHints(sql, cursorPosition , replaced); } protected SQLAnalyzer(final SqlValidatorWithHints validator); List<SqlMoniker> suggest(String sql, int cursorPosition); List<SqlAdvisor.ValidateErrorInfo> validate(String sql); } | @Test public void testSuggestion() { final SqlParserUtil.StringAndPos stringAndPos = SqlParserUtil.findPos(sql); List<SqlMoniker> suggestions = sqlAnalyzer.suggest(stringAndPos.sql, stringAndPos.cursor); assertEquals(expectedSuggestionCount, suggestions.size()); if (checkSuggestions) { assertSuggestions(suggestions); } } |
SQLAnalyzer { public List<SqlAdvisor.ValidateErrorInfo> validate(String sql) { SqlAdvisor sqlAdvisor = new SqlAdvisor(validator); return sqlAdvisor.validate(sql); } protected SQLAnalyzer(final SqlValidatorWithHints validator); List<SqlMoniker> suggest(String sql, int cursorPosition); List<SqlAdvisor.ValidateErrorInfo> validate(String sql); } | @Test public void testValidation() { List<SqlAdvisor.ValidateErrorInfo> validationErrors = sqlAnalyzer.validate("select * from"); assertEquals(1, validationErrors.size()); assertEquals(10, validationErrors.get(0).getStartColumnNum()); assertEquals(13, validationErrors.get(0).getEndColumnNum()); } |
SimpleLimitExchangeRemover { public static Prel apply(PlannerSettings settings, Prel input){ if(!settings.isTrivialSingularOptimized() || settings.isLeafLimitsEnabled()) { return input; } if(input.accept(new Identifier(), false)){ return input.accept(new AllExchangeRemover(), null); } return input; } static Prel apply(PlannerSettings settings, Prel input); } | @Test public void simpleSelectNoLimit() { Prel input = newScreen( newProject(exprs(), rowType(), newUnionExchange( newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Project", "UnionExchange", "Project", "SystemScan"); }
@Test public void simpleSelectWithLimitWithSoftScan() { Prel input = newScreen( newLimit(0, 10, newProject(exprs(), rowType(), newUnionExchange( newLimit(0, 10, newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "Limit", "Project", "SystemScan"); }
@Test public void simpleSelectWithLimitWithSoftScanWithLeafLimitsEnabled() { OptionValue optionEnabled = OptionValue.createBoolean(OptionValue.OptionType.QUERY, PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName(), true); when(optionManager.getOption(PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName())).thenReturn(optionEnabled); optionList.remove(PlannerSettings.ENABLE_LEAF_LIMITS.getDefault()); optionList.add(optionEnabled); Prel input = newScreen( newLimit(0, 10, newProject(exprs(), rowType(), newUnionExchange( newLimit(0, 10, newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "UnionExchange", "Limit", "Project", "SystemScan"); }
@Test public void simpleSelectWithLargeLimitWithSoftScan() { Prel input = newScreen( newLimit(0, 200000, newProject(exprs(), rowType(), newUnionExchange( newLimit(0, 200000, newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "UnionExchange", "Limit", "Project", "SystemScan"); }
@Test public void simpleSelectWithLimitWithHardScan() { Prel input = newScreen( newLimit(0, 10, newProject(exprs(), rowType(), newUnionExchange( newLimit(0, 10, newProject(exprs(), rowType(), newHardScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "UnionExchange", "Limit", "Project", "SystemScan"); }
@Test public void joinWithLimitWithSoftScan() { Prel input = newScreen( newLimit(0, 10, newProject(exprs(), rowType(), newUnionExchange( newJoin( newProject(exprs(), rowType(), newSoftScan(rowType()) ), newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "UnionExchange", "HashJoin", "Project", "SystemScan", "Project", "SystemScan"); } |
PushLimitToPruneableScan extends Prule { @Override public boolean matches(RelOptRuleCall call) { return !((ScanPrelBase) call.rel(1)).hasFilter() && call.rel(1) instanceof PruneableScan; } private PushLimitToPruneableScan(); @Override boolean matches(RelOptRuleCall call); @Override void onMatch(RelOptRuleCall call); static final RelOptRule INSTANCE; } | @Test public void testRuleNoMatch() throws Exception { final TestScanPrel scan = new TestScanPrel(cluster, TRAITS, table, pluginId, metadata, PROJECTED_COLUMNS, 0, true); final LimitPrel limitNode = new LimitPrel(cluster, TRAITS, scan, REX_BUILDER.makeExactLiteral(BigDecimal.valueOf(offset)), REX_BUILDER.makeExactLiteral(BigDecimal.valueOf(fetch))); final RelOptRuleCall call = newCall(rel -> fail("Unexpected call to transformTo"), limitNode, scan); assertFalse(PushLimitToPruneableScan.INSTANCE.matches(call)); } |
KeyFairSliceCalculator { public int getTotalSize() { return totalSize; } KeyFairSliceCalculator(Map<String, Integer> originalKeySizes, int maxTotalSize); Integer getKeySlice(String key); int getTotalSize(); boolean keysTrimmed(); int numValidityBytes(); } | @Test public void testUnderflowSize() { KeyFairSliceCalculator keyFairSliceCalculator = new KeyFairSliceCalculator(newHashMap("k1", 4, "k2", 4, "k3", 4), 16); assertEquals("Invalid combined key size", 13, keyFairSliceCalculator.getTotalSize()); } |
GlobalDictionaryBuilder { public static String dictionaryFileName(String columnFullPath) { return format("_%s.%s", columnFullPath, DICTIONARY_FILES_EXTENSION); } static String dictionaryFileName(String columnFullPath); static String dictionaryFileName(ColumnDescriptor columnDescriptor); static Path dictionaryFilePath(Path dictionaryRootDir, String columnFullPath); static Path dictionaryFilePath(Path dictionaryRootDir, ColumnDescriptor columnDescriptor); static String getColumnFullPath(String dictionaryFileName); static long getDictionaryVersion(FileSystem fs, Path tableDir); static String dictionaryRootDirName(long version); static Path getDictionaryVersionedRootPath(FileSystem fs, Path tableDir, long version); static Path createDictionaryVersionedRootPath(FileSystem fs, Path tableDir, long nextVersion, Path tmpDictionaryRootPath); static Path getDictionaryFile(FileSystem fs, Path dictRootDir, String columnFullPath); static Path getDictionaryFile(FileSystem fs, Path dictRootDir, ColumnDescriptor columnDescriptor); static Map<String, Path> listDictionaryFiles(FileSystem fs, Path dictRootDir); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryRootDir,
String columnFullPath,
BufferAllocator bufferAllocator); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryRootDir,
ColumnDescriptor columnDescriptor,
BufferAllocator bufferAllocator); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryFile,
BufferAllocator bufferAllocator); static GlobalDictionariesInfo updateGlobalDictionaries(CompressionCodecFactory codecFactory, FileSystem fs, Path tableDir, Path partitionDir, BufferAllocator bufferAllocator); static GlobalDictionariesInfo createGlobalDictionaries(CompressionCodecFactory codecFactory,
FileSystem fs, Path tableDir, BufferAllocator bufferAllocator); static void writeDictionary(OutputStream out,
VectorAccessible input, int recordCount,
BufferAllocator bufferAllocator); static void main(String []args); static final String DICTIONARY_TEMP_ROOT_PREFIX; static final String DICTIONARY_ROOT_PREFIX; static final Pattern DICTIONARY_VERSION_PATTERN; static final Predicate<Path> DICTIONARY_ROOT_FILTER; static final String DICTIONARY_FILES_EXTENSION; static final Predicate<Path> DICTIONARY_FILES_FILTER; static final Pattern DICTIONARY_FILES_PATTERN; } | @Test public void testDictionaryFileName() throws Exception { assertEquals("_foo.dict", GlobalDictionaryBuilder.dictionaryFileName("foo")); assertEquals("_a.b.c.dict", GlobalDictionaryBuilder.dictionaryFileName("a.b.c")); assertEquals("_foo.dict", GlobalDictionaryBuilder.dictionaryFileName(new ColumnDescriptor(new String[]{"foo"}, INT64, 0, 1))); assertEquals("_a.b.c.dict", GlobalDictionaryBuilder.dictionaryFileName(new ColumnDescriptor(new String[]{"a", "b", "c"}, INT64, 0, 1))); } |
GlobalDictionaryBuilder { public static String getColumnFullPath(String dictionaryFileName) { final Matcher matcher = DICTIONARY_FILES_PATTERN.matcher(dictionaryFileName); if (matcher.find()) { return matcher.group(1); } return null; } static String dictionaryFileName(String columnFullPath); static String dictionaryFileName(ColumnDescriptor columnDescriptor); static Path dictionaryFilePath(Path dictionaryRootDir, String columnFullPath); static Path dictionaryFilePath(Path dictionaryRootDir, ColumnDescriptor columnDescriptor); static String getColumnFullPath(String dictionaryFileName); static long getDictionaryVersion(FileSystem fs, Path tableDir); static String dictionaryRootDirName(long version); static Path getDictionaryVersionedRootPath(FileSystem fs, Path tableDir, long version); static Path createDictionaryVersionedRootPath(FileSystem fs, Path tableDir, long nextVersion, Path tmpDictionaryRootPath); static Path getDictionaryFile(FileSystem fs, Path dictRootDir, String columnFullPath); static Path getDictionaryFile(FileSystem fs, Path dictRootDir, ColumnDescriptor columnDescriptor); static Map<String, Path> listDictionaryFiles(FileSystem fs, Path dictRootDir); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryRootDir,
String columnFullPath,
BufferAllocator bufferAllocator); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryRootDir,
ColumnDescriptor columnDescriptor,
BufferAllocator bufferAllocator); static VectorContainer readDictionary(FileSystem fs,
Path dictionaryFile,
BufferAllocator bufferAllocator); static GlobalDictionariesInfo updateGlobalDictionaries(CompressionCodecFactory codecFactory, FileSystem fs, Path tableDir, Path partitionDir, BufferAllocator bufferAllocator); static GlobalDictionariesInfo createGlobalDictionaries(CompressionCodecFactory codecFactory,
FileSystem fs, Path tableDir, BufferAllocator bufferAllocator); static void writeDictionary(OutputStream out,
VectorAccessible input, int recordCount,
BufferAllocator bufferAllocator); static void main(String []args); static final String DICTIONARY_TEMP_ROOT_PREFIX; static final String DICTIONARY_ROOT_PREFIX; static final Pattern DICTIONARY_VERSION_PATTERN; static final Predicate<Path> DICTIONARY_ROOT_FILTER; static final String DICTIONARY_FILES_EXTENSION; static final Predicate<Path> DICTIONARY_FILES_FILTER; static final Pattern DICTIONARY_FILES_PATTERN; } | @Test public void testExtractColumnName() throws Exception { assertEquals("foo", GlobalDictionaryBuilder.getColumnFullPath("_foo.dict")); assertEquals("a.b.c", GlobalDictionaryBuilder.getColumnFullPath("_a.b.c.dict")); } |
MemoryAllocationUtilities { @VisibleForTesting static void setMemory(final OptionManager optionManager, Map<Fragment, Wrapper> fragments, long maxMemoryPerNodePerQuery) { final ArrayListMultimap<NodeEndpoint, PhysicalOperator> consideredOps = ArrayListMultimap.create(); final ArrayListMultimap<NodeEndpoint, PhysicalOperator> nonConsideredOps = ArrayListMultimap.create(); long queryMaxAllocation = Long.MAX_VALUE; for(Entry<Fragment, Wrapper> entry: fragments.entrySet()) { FindConsideredOperators fco = new FindConsideredOperators(); entry.getKey().getRoot().accept(fco, null); for(NodeEndpoint e : entry.getValue().getAssignedEndpoints()) { consideredOps.putAll(e, fco.getConsideredOperators()); } for(NodeEndpoint e : entry.getValue().getAssignedEndpoints()) { nonConsideredOps.putAll(e, fco.getNonConsideredOperators()); } } for(NodeEndpoint ep : consideredOps.keySet()) { long outsideReserve = nonConsideredOps.get(ep).stream().mapToLong(t -> t.getProps().getMemReserve()).sum(); List<PhysicalOperator> ops = consideredOps.get(ep); long consideredOpsReserve = ops.stream().mapToLong(t -> t.getProps().getMemReserve()).sum(); if (outsideReserve + consideredOpsReserve > queryMaxAllocation) { throw UserException.resourceError() .message("Query was cancelled because the initial memory requirement (%s) is greater than the job memory limit set by the administrator (%s).", PrettyPrintUtils.bytePrint(outsideReserve + consideredOpsReserve, true), PrettyPrintUtils.bytePrint(queryMaxAllocation, true)) .build(logger); } final double totalWeights = ops.stream().mapToDouble(t -> t.getProps().getMemoryFactor()).sum(); final long memoryForHeavyOperations = maxMemoryPerNodePerQuery - outsideReserve; if(memoryForHeavyOperations < 1) { throw UserException.memoryError() .message("Query was cancelled because it exceeded the memory limits set by the administrator. Expected at least %s bytes, but only had %s available.", PrettyPrintUtils.bytePrint(outsideReserve, true), PrettyPrintUtils.bytePrint(maxMemoryPerNodePerQuery, true)) .build(logger); } final double baseWeight = memoryForHeavyOperations/totalWeights; ops.stream() .filter(op -> op.getProps().isMemoryBound()) .forEach(op -> { long targetValue = (long) (baseWeight * op.getProps().getMemoryFactor()); targetValue = Math.max(Math.min(targetValue, op.getProps().getMemLimit()), op.getProps().getMemReserve()); long lowLimit = op.getProps().getMemLowLimit(); long highLimit = op.getProps().getMemLimit(); op.getProps().setMemLimit(targetValue); if (targetValue < lowLimit) { op.getProps().setMemLimit(lowLimit); } if (targetValue > highLimit) { op.getProps().setMemLimit(highLimit); } }); } } private MemoryAllocationUtilities(); static void setupBoundedMemoryAllocations(
final PhysicalPlan plan,
final OptionManager optionManager,
final GroupResourceInformation clusterInfo,
final PlanningSet planningSet,
final long allocatedMemoryPerQuery
); } | @Test public void syntheticSimple() { ConfigurableOperator cnb = new ConfigurableOperator(OpProps.prototype(1, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithBound(false).cloneWithMemoryFactor(2.0d), ARBTRIARY_LEAF); ConfigurableOperator cb = new ConfigurableOperator(OpProps.prototype(1, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithBound(true).cloneWithMemoryFactor(1.0d), cnb); Fragment f1 = new Fragment(); f1.addOperator(cb); Wrapper w1 = new Wrapper(f1, 0); w1.overrideEndpoints(Collections.singletonList(N1)); MemoryAllocationUtilities.setMemory(options, ImmutableMap.of(f1, w1), 10); assertEquals(Long.MAX_VALUE, cnb.getProps().getMemLimit()); assertEquals(3, cb.getProps().getMemLimit()); }
@Test public void doubleSort() { ExternalSort es1 = new ExternalSort(OpProps.prototype().cloneWithNewReserve(0).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), ARBTRIARY_LEAF, Collections.emptyList(), false); ExternalSort es2 = new ExternalSort(OpProps.prototype().cloneWithNewReserve(0).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), es1, Collections.emptyList(), false); Fragment f1 = new Fragment(); f1.addOperator(es2); Wrapper wrapper = new Wrapper(f1, 0); wrapper.overrideEndpoints(Collections.singletonList(N1)); MemoryAllocationUtilities.setMemory(options, ImmutableMap.of(f1, wrapper), 10); assertEquals(4l, es1.getProps().getMemLimit()); assertEquals(4l, es2.getProps().getMemLimit()); }
@Test public void doubleSortWithExchange() { ExternalSort es1 = new ExternalSort(OpProps.prototype(0, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), ARBTRIARY_LEAF, Collections.emptyList(), false); SingleSender ss = new SingleSender(OpProps.prototype(1, Long.MAX_VALUE).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), Mockito.mock(BatchSchema.class), es1, 0, MinorFragmentIndexEndpoint.newBuilder().setMinorFragmentId(0).build()); Fragment f1 = new Fragment(); f1.addOperator(ss); Wrapper w1 = new Wrapper(f1, 0); w1.overrideEndpoints(Collections.singletonList(N1)); UnorderedReceiver or = new UnorderedReceiver(OpProps.prototype(1, Long.MAX_VALUE), Mockito.mock(BatchSchema.class), 0, Collections.emptyList(), false); ExternalSort es2 = new ExternalSort(OpProps.prototype(0, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), or, Collections.emptyList(), false); Fragment f2 = new Fragment(); f2.addOperator(es2); Wrapper w2 = new Wrapper(f2, 0); w2.overrideEndpoints(Collections.singletonList(N1)); MemoryAllocationUtilities.setMemory(options, ImmutableMap.of(f1, w1, f2, w2), 10); assertEquals(3l, es1.getProps().getMemLimit()); assertEquals(3l, es2.getProps().getMemLimit()); }
@Test public void doubleSortWithExchangeUnbalancedNodes() { ExternalSort es1 = new ExternalSort(OpProps.prototype(0, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), ARBTRIARY_LEAF, Collections.emptyList(), false); SingleSender ss = new SingleSender(OpProps.prototype(1, Long.MAX_VALUE).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), Mockito.mock(BatchSchema.class), es1, 0, MinorFragmentIndexEndpoint.newBuilder().setMinorFragmentId(0).build()); Fragment f1 = new Fragment(); f1.addOperator(ss); Wrapper w1 = new Wrapper(f1, 0); w1.overrideEndpoints(Arrays.asList(N1, N2)); UnorderedReceiver or = new UnorderedReceiver(OpProps.prototype(1, Long.MAX_VALUE), Mockito.mock(BatchSchema.class), 0, Collections.emptyList(), false); ExternalSort es2 = new ExternalSort(OpProps.prototype(0, Long.MAX_VALUE).cloneWithMemoryExpensive(true).cloneWithMemoryFactor(options.getOption(SORT_FACTOR)).cloneWithBound(options.getOption(SORT_BOUNDED)), or, Collections.emptyList(), false); Fragment f2 = new Fragment(); f2.addOperator(es2); Wrapper w2 = new Wrapper(f2, 0); w2.overrideEndpoints(Collections.singletonList(N1)); MemoryAllocationUtilities.setMemory(options, ImmutableMap.of(f1, w1, f2, w2), 10); assertEquals(3l, es1.getProps().getMemLimit()); assertEquals(3l, es2.getProps().getMemLimit()); } |
RuntimeFilterProbeTarget { public static List<RuntimeFilterProbeTarget> getProbeTargets(RuntimeFilterInfo runtimeFilterInfo) { final List<RuntimeFilterProbeTarget> targets = new ArrayList<>(); try { if (runtimeFilterInfo==null) { return targets; } for (RuntimeFilterEntry entry : runtimeFilterInfo.getPartitionJoinColumns()) { RuntimeFilterProbeTarget probeTarget = findOrCreateNew(targets, entry); probeTarget.addPartitionKey(entry.getBuildFieldName(), entry.getProbeFieldName()); } for (RuntimeFilterEntry entry : runtimeFilterInfo.getNonPartitionJoinColumns()) { RuntimeFilterProbeTarget probeTarget = findOrCreateNew(targets, entry); probeTarget.addNonPartitionKey(entry.getBuildFieldName(), entry.getProbeFieldName()); } } catch (RuntimeException e) { logger.error("Error while establishing probe scan targets from RuntimeFilterInfo", e); } return targets; } RuntimeFilterProbeTarget(int probeScanMajorFragmentId, int probeScanOperatorId); boolean isSameProbeCoordinate(int majorFragmentId, int operatorId); List<String> getPartitionBuildTableKeys(); List<String> getPartitionProbeTableKeys(); List<String> getNonPartitionBuildTableKeys(); List<String> getNonPartitionProbeTableKeys(); int getProbeScanMajorFragmentId(); int getProbeScanOperatorId(); @Override String toString(); String toTargetIdString(); static List<RuntimeFilterProbeTarget> getProbeTargets(RuntimeFilterInfo runtimeFilterInfo); } | @Test public void testNullRuntimeFilterInfoObj() { List<RuntimeFilterProbeTarget> probeTargets = RuntimeFilterProbeTarget.getProbeTargets(null); assertTrue(probeTargets.isEmpty()); } |
BloomFilter implements AutoCloseable { public void setup() { checkNotNull(this.allocator, "Setup not required for deserialized objects."); this.dataBuffer = this.allocator.buffer(this.sizeInBytes + META_BYTES_CNT); setup(dataBuffer); dataBuffer.writerIndex(0); for (int i = 0; i < sizeInBytes; i += 8) { dataBuffer.writeLong(0l); } byte[] metaBytes = new byte[24]; byte[] nameBytesAll = name.getBytes(StandardCharsets.UTF_8); System.arraycopy(name.getBytes(StandardCharsets.UTF_8), Math.max(0, nameBytesAll.length - 24), metaBytes, 0, Math.min(24, nameBytesAll.length)); this.name = new String(metaBytes, StandardCharsets.UTF_8); this.dataBuffer.writeBytes(metaBytes); this.dataBuffer.writeLong(0L); this.dataBuffer.readerIndex(0); this.numBitsSetLoc = dataBuffer.memoryAddress() + sizeInBytes + META_BYTES_CNT - 8; logger.debug("Bloomfilter {} set up completed.", this.name); } BloomFilter(BufferAllocator bufferAllocator, String name, long minSizeBytes); private BloomFilter(ArrowBuf dataBuffer); void setup(); String getName(); long getSizeInBytes(); static BloomFilter prepareFrom(ArrowBuf dataBuffer); ArrowBuf getDataBuffer(); boolean mightContain(ArrowBuf bloomFilterKey, int length); boolean put(ArrowBuf bloomFilterKey, int length); double getExpectedFPP(); boolean isCrossingMaxFPP(); long getOptimalInsertions(); static long getOptimalSize(long expectedInsertions); void merge(BloomFilter that); @VisibleForTesting long getNumBitsSet(); @Override String toString(); @Override void close(); } | @Test public void testSetup() { try (final BloomFilter bloomFilter = new BloomFilter(bfTestAllocator, TEST_NAME, 40)) { bloomFilter.setup(); assertEquals(bloomFilter.getDataBuffer().capacity(), bloomFilter.getSizeInBytes()); String expectedName = TEST_NAME.substring(TEST_NAME.length() - 24); assertEquals("BoomFilter.getName() is incorrect", expectedName, bloomFilter.getName()); byte[] nameBytes = new byte[24]; bloomFilter.getDataBuffer().getBytes(bloomFilter.getSizeInBytes() - 32, nameBytes); assertEquals("Name in meta bytes not set correctly.", expectedName, new String(nameBytes, StandardCharsets.UTF_8)); assertEquals("Reader index not set correctly", 0, bloomFilter.getDataBuffer().readerIndex()); assertEquals("Writer index not set correctly", bloomFilter.getSizeInBytes(), bloomFilter.getDataBuffer().writerIndex()); for (long i = 0; i < bloomFilter.getSizeInBytes() - 32; i += 8) { long block = bloomFilter.getDataBuffer().getLong(i); assertEquals("Found unclean buffer state", 0L, block); } } } |
BloomFilter implements AutoCloseable { public boolean isCrossingMaxFPP() { return getExpectedFPP() > (5 * FPP); } BloomFilter(BufferAllocator bufferAllocator, String name, long minSizeBytes); private BloomFilter(ArrowBuf dataBuffer); void setup(); String getName(); long getSizeInBytes(); static BloomFilter prepareFrom(ArrowBuf dataBuffer); ArrowBuf getDataBuffer(); boolean mightContain(ArrowBuf bloomFilterKey, int length); boolean put(ArrowBuf bloomFilterKey, int length); double getExpectedFPP(); boolean isCrossingMaxFPP(); long getOptimalInsertions(); static long getOptimalSize(long expectedInsertions); void merge(BloomFilter that); @VisibleForTesting long getNumBitsSet(); @Override String toString(); @Override void close(); } | @Test public void testIsCrossingMaxFpp() { try (final BloomFilter bloomFilter = new BloomFilter(bfTestAllocator, TEST_NAME, 64); final ArrowBuf keyBuf = bfTestAllocator.buffer(36)) { bloomFilter.setup(); for (int i = 0; i < 1_000_000; i++) { bloomFilter.put(writeKey(keyBuf, UUID.randomUUID().toString()), 36); if (bloomFilter.getExpectedFPP() > 0.05) { break; } } assertTrue(bloomFilter.isCrossingMaxFPP()); } } |
BloomFilter implements AutoCloseable { public static long getOptimalSize(long expectedInsertions) { checkArgument(expectedInsertions > 0); long optimalSize = (long) (-expectedInsertions * Math.log(FPP) / (Math.log(2) * Math.log(2))) / 8; optimalSize = ((optimalSize + 8) / 8) * 8; return optimalSize + META_BYTES_CNT; } BloomFilter(BufferAllocator bufferAllocator, String name, long minSizeBytes); private BloomFilter(ArrowBuf dataBuffer); void setup(); String getName(); long getSizeInBytes(); static BloomFilter prepareFrom(ArrowBuf dataBuffer); ArrowBuf getDataBuffer(); boolean mightContain(ArrowBuf bloomFilterKey, int length); boolean put(ArrowBuf bloomFilterKey, int length); double getExpectedFPP(); boolean isCrossingMaxFPP(); long getOptimalInsertions(); static long getOptimalSize(long expectedInsertions); void merge(BloomFilter that); @VisibleForTesting long getNumBitsSet(); @Override String toString(); @Override void close(); } | @Test public void testGetOptimalSize() { assertEquals(40, BloomFilter.getOptimalSize(1)); assertEquals(40, BloomFilter.getOptimalSize(4)); assertEquals(152, BloomFilter.getOptimalSize(100)); assertEquals(1_232, BloomFilter.getOptimalSize(1_000)); assertEquals(1_198_168, BloomFilter.getOptimalSize(1_000_000)); assertEquals(1_198_132_336, BloomFilter.getOptimalSize(1_000_000_000)); } |
BloomFilter implements AutoCloseable { @Override public void close() { logger.debug("Closing bloomfilter {}'s data buffer. RefCount {}", this.name, dataBuffer.refCnt()); try { dataBuffer.close(); } catch (Exception e) { logger.error("Error while closing bloomfilter " + this.name, e); } } BloomFilter(BufferAllocator bufferAllocator, String name, long minSizeBytes); private BloomFilter(ArrowBuf dataBuffer); void setup(); String getName(); long getSizeInBytes(); static BloomFilter prepareFrom(ArrowBuf dataBuffer); ArrowBuf getDataBuffer(); boolean mightContain(ArrowBuf bloomFilterKey, int length); boolean put(ArrowBuf bloomFilterKey, int length); double getExpectedFPP(); boolean isCrossingMaxFPP(); long getOptimalInsertions(); static long getOptimalSize(long expectedInsertions); void merge(BloomFilter that); @VisibleForTesting long getNumBitsSet(); @Override String toString(); @Override void close(); } | @Test public void testClose() { try (final BloomFilter f1 = new BloomFilter(bfTestAllocator, TEST_NAME, 64)) { f1.setup(); f1.getDataBuffer().retain(); assertEquals(2, f1.getDataBuffer().refCnt()); f1.close(); assertEquals(1, f1.getDataBuffer().refCnt()); } } |
StringFunctionUtil { public static int copyUtf8(ByteBuf in, final int start, final int end, ArrowBuf out) { int i = 0; int errBytes = 0; while (start + i < end) { byte b = in.getByte(start + i); if (b >= 0) { out.setByte(i - errBytes, b); i++; continue; } int seqLen = utf8CharLenNoThrow(in, start + i); if (seqLen == 0 || (start + i + seqLen) > end || !GuavaUtf8.isUtf8(in, start + i, start + i + seqLen)) { errBytes++; i++; } else { for (int j = i; j < i + seqLen; j++) { out.setByte(j - errBytes, in.getByte(start + j)); } i += seqLen; } } return end - start - errBytes; } static int getUTF8CharLength(ByteBuf buffer, int start, int end, final FunctionErrorContext
errCtx); static int getUTF8CharPosition(ByteBuf buffer, int start, int end, int charLength, final
FunctionErrorContext errCtx); static Pattern compilePattern(String regex, FunctionErrorContext errCtx); static Pattern compilePattern(String regex, int flags, FunctionErrorContext errCtx); static int stringLeftMatchUTF8(ByteBuf str, int strStart, int strEnd,
ByteBuf substr, int subStart, int subEnd); static int stringLeftMatchUTF8(ByteBuf str, int strStart, int strEnd,
ByteBuf substr, int subStart, int subEnd, int offset); static int parseBinaryStringNoFormat(ByteBuf str, int strStart, int strEnd, ByteBuf out,
FunctionErrorContext errCtx); static int utf8CharLen(ByteBuf buffer, int idx, final FunctionErrorContext errCtx); static int copyUtf8(ByteBuf in, final int start, final int end, ArrowBuf out); static int copyReplaceUtf8(ByteBuf in, final int start, final int end, ByteBuf out, byte
replacement); } | @Test public void testCopyUtf8() throws Exception { testCopyUtf8Helper(new byte[] {'g', 'o', 'o', 'd', 'v', 'a', 'l'}, "goodval"); testCopyUtf8Helper(new byte[] {'b', 'a', 'd', (byte)0xff, 'v', 'a', 'l'}, "badval"); testCopyUtf8Helper(new byte[] {(byte)0xf9, 'g', 'o', 'o', 'd', ' ', 'p', 'a', 'r', 't'}, "good part"); testCopyUtf8Helper(new byte[] {'t', 'h', 'i', 's', ' ', 'i', 's', ' ', 'o', 'k', (byte)0xfe}, "this is ok"); testCopyUtf8Helper(new byte[] {'f', 'a', 'k', 'e', ' ', (byte) 0xC0, '2', 'B', ' ', 's', 'e', 'q', }, "fake 2B seq"); } |
StringFunctionUtil { public static int copyReplaceUtf8(ByteBuf in, final int start, final int end, ByteBuf out, byte replacement) { int i = 0; while (start + i < end) { byte b = in.getByte(start + i); if (b >= 0) { out.setByte(i, b); i++; continue; } int seqLen = utf8CharLenNoThrow(in, start + i); if (seqLen == 0 || (start + i + seqLen) > end || !GuavaUtf8.isUtf8(in, start + i, start + i + seqLen)) { out.setByte(i, replacement); i++; } else { for (int j = i; j < i + seqLen; j++) { out.setByte(j, in.getByte(start + j)); } i += seqLen; } } return end - start; } static int getUTF8CharLength(ByteBuf buffer, int start, int end, final FunctionErrorContext
errCtx); static int getUTF8CharPosition(ByteBuf buffer, int start, int end, int charLength, final
FunctionErrorContext errCtx); static Pattern compilePattern(String regex, FunctionErrorContext errCtx); static Pattern compilePattern(String regex, int flags, FunctionErrorContext errCtx); static int stringLeftMatchUTF8(ByteBuf str, int strStart, int strEnd,
ByteBuf substr, int subStart, int subEnd); static int stringLeftMatchUTF8(ByteBuf str, int strStart, int strEnd,
ByteBuf substr, int subStart, int subEnd, int offset); static int parseBinaryStringNoFormat(ByteBuf str, int strStart, int strEnd, ByteBuf out,
FunctionErrorContext errCtx); static int utf8CharLen(ByteBuf buffer, int idx, final FunctionErrorContext errCtx); static int copyUtf8(ByteBuf in, final int start, final int end, ArrowBuf out); static int copyReplaceUtf8(ByteBuf in, final int start, final int end, ByteBuf out, byte
replacement); } | @Test public void testCopyReplaceUtf8() throws Exception { testReplaceUtf8Helper(new byte[] {'g', 'o', 'o', 'd', 'v', 'a', 'l'}, (byte)'?', "goodval"); testReplaceUtf8Helper(new byte[] {'b', 'a', 'd', (byte)0xff, 'v', 'a', 'l'}, (byte)'?', "bad?val"); testReplaceUtf8Helper(new byte[] {(byte)0xf9, 'g', 'o', 'o', 'd', ' ', 'p', 'a', 'r', 't'}, (byte)'X', "Xgood part"); testReplaceUtf8Helper(new byte[] {'t', 'h', 'i', 's', ' ', 'i', 's', ' ', 'o', 'k', (byte)0xfe}, (byte)'|', "this is ok|"); testReplaceUtf8Helper(new byte[] {'f', 'a', 'k', 'e', ' ', (byte) 0xC0, '2', 'B', ' ', 's', 'e', 'q', }, (byte)'?', "fake ?2B seq"); } |
IcebergPartitionData implements StructLike, Serializable { public void setInteger(int position, Integer value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testIntSpec() throws Exception{ String columnName = "i"; Integer expectedValue = 12322; PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setInteger(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, Integer.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setString(int position, String value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testStringSpec() throws Exception{ String columnName = "data"; String expectedValue = "abc"; PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setString(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, String.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setLong(int position, Long value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testLongSpec() throws Exception{ String columnName = "id"; Long expectedValue = 123L; PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setLong(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, Long.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setBigDecimal(int position, BigDecimal value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testBigDecimalpec() throws Exception{ String columnName = "dec_9_0"; BigDecimal expectedValue = new BigDecimal(234); PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setBigDecimal(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, BigDecimal.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setFloat(int position, Float value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testFloatSpec() throws Exception{ String columnName = "f"; Float expectedValue = 1.23f; PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setFloat(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, Float.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setDouble(int position, Double value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testDoubleSpec() throws Exception{ String columnName = "d"; Double expectedValue = Double.valueOf(1.23f); PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setDouble(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, Double.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setBoolean(int position, Boolean value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testBooleanSpec() throws Exception{ String columnName = "b"; Boolean expectedValue = true; PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setBoolean(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, Boolean.class, expectedValue); } |
IcebergPartitionData implements StructLike, Serializable { public void setBytes(int position, byte[] value) { set(position, value); } IcebergPartitionData(Types.StructType partitionType); private IcebergPartitionData(IcebergPartitionData toCopy); Type getType(int pos); void clear(); @Override int size(); @Override @SuppressWarnings("unchecked") T get(int pos, Class<T> javaClass); Object get(int pos); @Override void set(int pos, T value); @Override String toString(); IcebergPartitionData copy(); @Override boolean equals(Object o); @Override int hashCode(); static Object[] copyData(Types.StructType type, Object[] data); void setInteger(int position, Integer value); void setLong(int position, Long value); void setFloat(int position, Float value); void setDouble(int position, Double value); void setBoolean(int position, Boolean value); void setString(int position, String value); void setBytes(int position, byte[] value); void setBigDecimal(int position, BigDecimal value); void set(int position, CompleteType type, ValueVector vector, int offset); } | @Test public void testBinarySpec() throws Exception{ String columnName = "bytes"; byte[] expectedValue = "test".getBytes(); PartitionSpec partitionSpec = PartitionSpec .builderFor(schema) .identity(columnName) .build(); IcebergPartitionData icebergPartitionData = new IcebergPartitionData(partitionSpec.partitionType()); icebergPartitionData.setBytes(0, expectedValue); verifyPartitionValue(partitionSpec, icebergPartitionData, columnName, ByteBuffer.class, expectedValue); } |
IcebergFormatMatcher extends FormatMatcher { @Override public boolean matches(FileSystem fs, FileSelection fileSelection, CompressionCodecFactory codecFactory) throws IOException { Path rootDir = Path.of(fileSelection.getSelectionRoot()); Path metaDir = rootDir.resolve(METADATA_DIR_NAME); if (!fs.isDirectory(rootDir) || !fs.exists(metaDir) || !fs.isDirectory(metaDir)) { return false; } Path versionHintPath = metaDir.resolve(VERSION_HINT_FILE_NAME); if (!fs.exists(versionHintPath) || !fs.isFile(versionHintPath)) { return false; } for (FileAttributes file : fs.list(metaDir)) { if (METADATA_FILE_PATTERN.matcher(file.getPath().getName()).matches()) { return true; } } return false; } IcebergFormatMatcher(FormatPlugin plugin); @Override FormatPlugin getFormatPlugin(); @Override boolean matches(FileSystem fs, FileSelection fileSelection, CompressionCodecFactory codecFactory); static final String METADATA_DIR_NAME; } | @Test public void match() throws Exception { IcebergFormatMatcher matcher = new IcebergFormatMatcher(null); FileSystem fs = HadoopFileSystem.getLocal(new Configuration()); File root = tempDir.newFolder(); FileSelection fileSelection = FileSelection.create(fs, Path.of(root.toURI())); boolean matched; assertFalse(matcher.matches(fs, fileSelection, null)); File metadata = new File(root, "metadata"); metadata.mkdir(); assertFalse(matcher.matches(fs, fileSelection, null)); File versionHint = new File(metadata, "version-hint.text"); versionHint.createNewFile(); File metadataJsonNoDot = new File(metadata, "v9metadata.json"); metadataJsonNoDot.createNewFile(); assertFalse(matcher.matches(fs, fileSelection, null)); File metadataJson = new File(metadata, "v9.metadata.json"); metadataJson.createNewFile(); matched = matcher.matches(fs, fileSelection, null); assertTrue(matched); } |
SchemaConverter { public BatchSchema fromIceberg(org.apache.iceberg.Schema icebergSchema) { return new BatchSchema(icebergSchema .columns() .stream() .map(SchemaConverter::fromIcebergColumn) .filter(Objects::nonNull) .collect(Collectors.toList())); } SchemaConverter(); BatchSchema fromIceberg(org.apache.iceberg.Schema icebergSchema); static Field fromIcebergColumn(NestedField field); static CompleteType fromIcebergType(Type type); static CompleteType fromIcebergPrimitiveType(PrimitiveType type); org.apache.iceberg.Schema toIceberg(BatchSchema schema); static NestedField toIcebergColumn(Field field); static Schema getChildSchemaForStruct(Schema schema, String structName); static Schema getChildSchemaForList(Schema schema, String listName); } | @Test public void missingArrowTypes() { org.apache.iceberg.Schema icebergSchema = new org.apache.iceberg.Schema( NestedField.optional(1, "uuid", Types.UUIDType.get()) ); BatchSchema schema = BatchSchema.newBuilder() .addField(new CompleteType(new FixedSizeBinary(16)).toField("uuid")) .build(); BatchSchema result = schemaConverter.fromIceberg(icebergSchema); assertEquals(result, schema); }
@Test public void unsupportedIcebergTypes() { org.apache.iceberg.Schema schema = new org.apache.iceberg.Schema( NestedField.optional(1, "timestamp_nozone_field", Types.TimestampType.withoutZone()) ); expectedEx.expect(UserException.class); expectedEx.expectMessage("conversion from iceberg type to arrow type failed for field timestamp_nozone_field"); SchemaConverter convert = new SchemaConverter(); convert.fromIceberg(schema); } |
SchemaConverter { public org.apache.iceberg.Schema toIceberg(BatchSchema schema) { org.apache.iceberg.Schema icebergSchema = new org.apache.iceberg.Schema(schema .getFields() .stream() .filter(x -> !x.getName().equalsIgnoreCase(WriterPrel.PARTITION_COMPARATOR_FIELD)) .map(x -> toIcebergColumn(x)) .collect(Collectors.toList())); return TypeUtil.assignIncreasingFreshIds(icebergSchema); } SchemaConverter(); BatchSchema fromIceberg(org.apache.iceberg.Schema icebergSchema); static Field fromIcebergColumn(NestedField field); static CompleteType fromIcebergType(Type type); static CompleteType fromIcebergPrimitiveType(PrimitiveType type); org.apache.iceberg.Schema toIceberg(BatchSchema schema); static NestedField toIcebergColumn(Field field); static Schema getChildSchemaForStruct(Schema schema, String structName); static Schema getChildSchemaForList(Schema schema, String listName); } | @Test public void mixed() throws Exception { BatchSchema schema = BatchSchema.newBuilder() .addField(CompleteType.INT.toField("rownum")) .addField(CompleteType.VARCHAR.toField("name")) .addField(CompleteType.INT.toField("age")) .addField(CompleteType.FLOAT.toField("gpa")) .addField(CompleteType.BIGINT.toField("studentnum")) .addField(CompleteType.TIMESTAMP.toField("create_time")) .addField(CompleteType.VARCHAR.asList().toField("interests")) .addField(CompleteType.struct( CompleteType.VARCHAR.toField("color"), CompleteType.VARCHAR.toField("sport"), CompleteType.VARCHAR.toField("food") ).toField("favorites")) .build(); org.apache.iceberg.Schema expectedSchema = new org.apache.iceberg.Schema( NestedField.optional(1, "rownum", Types.IntegerType.get()), NestedField.optional(2, "name", Types.StringType.get()), NestedField.optional(3, "age", Types.IntegerType.get()), NestedField.optional(4, "gpa", Types.FloatType.get()), NestedField.optional(5, "studentnum", Types.LongType.get()), NestedField.optional(6, "create_time", Types.TimestampType.withZone()), NestedField.optional(7, "interests", Types.ListType.ofOptional(9, Types.StringType.get())), NestedField.optional(8, "favorites", Types.StructType.of( NestedField.optional(10, "color", Types.StringType.get()), NestedField.optional(11, "sport", Types.StringType.get()), NestedField.optional(12, "food", Types.StringType.get()) )) ); org.apache.iceberg.Schema icebergResult = schemaConverter.toIceberg(schema); assertEquals(expectedSchema.toString(), icebergResult.toString()); TemporaryFolder folder = new TemporaryFolder(); folder.create(); String rootPath = folder.getRoot().toString(); Configuration conf = new Configuration(); IcebergCatalog catalog = new IcebergCatalog(rootPath, conf); catalog.beginCreateTable(schema, Collections.emptyList()); catalog.endCreateTable(); Table table = new HadoopTables(conf).load(rootPath); assertEquals(expectedSchema.toString(), table.schema().toString()); }
@Test public void testPartitionComparatorField() { BatchSchema inputschema = BatchSchema.newBuilder() .addField(CompleteType.BIT.toField("boolean")) .addField(CompleteType.INT.toField("int")) .addField(CompleteType.BIT.toField(WriterPrel.PARTITION_COMPARATOR_FIELD)) .build(); org.apache.iceberg.Schema expectedSchema = new org.apache.iceberg.Schema( NestedField.optional(1, "boolean", Types.BooleanType.get()), NestedField.optional(2, "int", Types.IntegerType.get())); SchemaConverter convert = new SchemaConverter(); assertEquals(convert.toIceberg(inputschema).toString(), expectedSchema.toString()); }
@Test public void unsupportedArrowTypes() { BatchSchema inputSchema = BatchSchema.newBuilder() .addField(CompleteType.union( CompleteType.INT.toField("int_field"), CompleteType.BIGINT.toField("bigint_field") ).toField("union_field")) .build(); expectedEx.expect(UserException.class); expectedEx.expectMessage("conversion from arrow type to iceberg type failed for field union_field"); SchemaConverter convert = new SchemaConverter(); convert.toIceberg(inputSchema); } |
DatasetVersionResource extends BaseResourceWithAllocator { @GET @Produces(APPLICATION_JSON) public Dataset getDataset() throws DatasetVersionNotFoundException, DatasetNotFoundException, NamespaceException { return getCurrentDataset(); } @Inject DatasetVersionResource(
SabotContext context,
QueryExecutor executor,
DatasetVersionMutator datasetService,
JobsService jobsService,
NamespaceService namespaceService,
JoinRecommender joinRecommender,
@Context SecurityContext securityContext,
@PathParam("cpath") DatasetPath datasetPath,
@PathParam("version") DatasetVersion version,
BufferAllocatorFactory allocatorFactory
); DatasetVersionResource(
QueryExecutor executor,
DatasetVersionMutator datasetService,
JobsService jobsService,
Recommenders recommenders,
Transformer transformer,
JoinRecommender joinRecommender,
DatasetTool datasetTool,
HistogramGenerator histograms,
SecurityContext securityContext,
DatasetPath datasetPath,
DatasetVersion version,
BufferAllocator allocator
); protected DatasetVersionResource(
QueryExecutor executor,
DatasetVersionMutator datasetService,
JobsService jobsService,
Recommenders recommenders,
Transformer transformer,
JoinRecommender joinRecommender,
DatasetTool datasetTool,
HistogramGenerator histograms,
SecurityContext securityContext,
DatasetPath datasetPath,
DatasetVersion version,
BufferAllocatorFactory allocatorFactory
); @GET @Produces(APPLICATION_JSON) Dataset getDataset(); @GET @Path("preview") @Produces(APPLICATION_JSON) InitialPreviewResponse getDatasetForVersion(
@QueryParam("tipVersion") DatasetVersion tipVersion,
@QueryParam("limit") Integer limit); @GET @Path("review") @Produces(APPLICATION_JSON) InitialPreviewResponse reviewDatasetVersion(
@QueryParam("jobId") String jobId,
@QueryParam("tipVersion") DatasetVersion tipVersion,
@QueryParam("limit") Integer limit); @POST @Path("transformAndPreview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) InitialPreviewResponse transformAndPreview(
/* Body */ TransformBase transform,
@QueryParam("newVersion") DatasetVersion newVersion,
@QueryParam("limit") @DefaultValue("50") int limit); @POST @Path("transformAndRun") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) InitialTransformAndRunResponse transformAndRun(
/* Body */ TransformBase transform,
@QueryParam("newVersion") DatasetVersion newVersion
); @GET @Path("run") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) InitialRunResponse run(@QueryParam("tipVersion") DatasetVersion tipVersion); @POST @Path("transformPeek") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) InitialPendingTransformResponse transformDataSetPreview(
/* Body */ TransformBase transform,
@QueryParam("newVersion") DatasetVersion newVersion,
@QueryParam("limit") @DefaultValue("50") int limit); @POST @Path("save") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) DatasetUIWithHistory saveAsDataSet(
@QueryParam("as") DatasetPath asDatasetPath,
@QueryParam("savedTag") String savedTag // null for the first save
); DatasetUI save(VirtualDatasetUI vds, DatasetPath asDatasetPath, String savedTag, NamespaceAttribute... attributes); @POST @Path("extract") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Cards<ExtractRule> getExtractCards(
/* Body */ Selection selection); @POST @Path("extract_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ExtractRule> getExtractCard(
/* Body */ PreviewReq<ExtractRule, Selection> req); @POST @Path("extract_map") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Cards<ExtractMapRule> getExtractMapCards(
/* Body */ MapSelection mapSelection); @POST @Path("extract_map_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ExtractMapRule> getExtractMapCard(
/* Body */ PreviewReq<ExtractMapRule, MapSelection> req); @POST @Path("extract_list") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Cards<ExtractListRule> getExtractListCards(
/* Body */ Selection selection); @POST @Path("extract_list_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ExtractListRule> getExtractListCard(
/* Body */ PreviewReq<ExtractListRule, Selection> req); @POST @Path("split") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Cards<SplitRule> getSplitCards(
/* Body */ Selection selection); @POST @Path("split_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<SplitRule> getSplitCard(
/* Body */ PreviewReq<SplitRule, Selection> req); @POST @Path("/editOriginalSql") @Produces(APPLICATION_JSON) InitialPreviewResponse reapplyDatasetAndPreview(); @POST @Path("/reapplyAndSave") @Produces(APPLICATION_JSON) DatasetUIWithHistory reapplySave(
@QueryParam("as") DatasetPath asDatasetPath
); @POST @Path("replace") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceCards getReplaceCards(
/* Body */ Selection selection); @POST @Path("replace_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ReplacePatternRule> getReplaceCard(
/* Body */ PreviewReq<ReplacePatternRule, Selection> req); @POST @Path("replace_values_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceValuesCard getReplaceValuesCard(
/* Body */ ReplaceValuesPreviewReq req); @POST @Path("keeponly") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceCards getKeeponlyCards(
/* Body */ Selection selection); @POST @Path("keeponly_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ReplacePatternRule> getKeeponlyCard(
/* Body */ PreviewReq<ReplacePatternRule, Selection> req); @POST @Path("keeponly_values_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceValuesCard getKeeponlyValuesCard(
/* Body */ ReplaceValuesPreviewReq req); @POST @Path("exclude") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceCards getExcludeCards(
/* Body */ Selection selection); @POST @Path("exclude_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) Card<ReplacePatternRule> getExcludeCard(
/* Body */ PreviewReq<ReplacePatternRule, Selection> req); @POST @Path("exclude_values_preview") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) ReplaceValuesCard getExcludeValuesCard(
/* Body */ ReplaceValuesPreviewReq req); @GET @Path("history") @Produces(APPLICATION_JSON) History getHistory(@QueryParam("tipVersion") DatasetVersion tipVersion); @POST @Path("clean") @Produces(APPLICATION_JSON) @Consumes(APPLICATION_JSON) CleanDataCard getCleanDataCard(
ColumnForCleaning col); @GET @Path("join_recs") @Produces(APPLICATION_JSON) JoinRecommendations getJoinRecommendations(); @GET @Path("parents") @Produces(APPLICATION_JSON) List<ParentDatasetUI> getParents(); static final List<DataType> AVAILABLE_TYPES_FOR_CLEANING; } | @Test public void testBrokenVDSEditOriginalSQL() throws Exception { Dataset parentVDS = createVDS(Arrays.asList("dsvTest", "badVDSParent"),"select version, commit_id from sys.version"); parentVDS = expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(parentVDS)), new GenericType<Dataset>() {}); Dataset newVDS = createVDS(Arrays.asList("dsvTest", "badVDS"),"select version from dsvTest.badVDSParent"); newVDS = expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(newVDS)), new GenericType<Dataset>() {}); Dataset updatedParentVDS = new Dataset( parentVDS.getId(), Dataset.DatasetType.VIRTUAL_DATASET, parentVDS.getPath(), null, null, parentVDS.getTag(), parentVDS.getAccelerationRefreshPolicy(), "select commit_id from sys.version", parentVDS.getSqlContext(), parentVDS.getFormat(), null ); expectSuccess(getBuilder(getPublicAPI(3).path("catalog").path(updatedParentVDS.getId())).buildPut(Entity.json(updatedParentVDS)), new GenericType<Dataset>() {}); String dsPath = String.join(".", newVDS.getPath()); DatasetVersion datasetVersion = DatasetVersion.newVersion(); WebTarget target = getAPIv2() .path("datasets") .path("new_untitled") .queryParam("parentDataset", dsPath) .queryParam("newVersion", datasetVersion) .queryParam("limit", 120); ApiErrorModel apiErrorModel = expectStatus(Response.Status.BAD_REQUEST, getBuilder(target).buildPost(Entity.json(null)), new GenericType<ApiErrorModel<InvalidQueryException.Details>>() {}); InvalidQueryException.Details details = (InvalidQueryException.Details) apiErrorModel.getDetails(); target = getAPIv2() .path("dataset") .path(dsPath) .path("version") .path(details.getDatasetSummary().getDatasetVersion().getVersion()) .path("preview") .queryParam("view", "explore") .queryParam("limit", "0"); InitialPreviewResponse initialPreviewResponse = expectSuccess(getBuilder(target).buildGet(), new GenericType<InitialPreviewResponse>() {}); assertEquals(newVDS.getSql(), initialPreviewResponse.getDataset().getSql()); }
@Test public void testRenameShouldNotBreakHistory() throws Exception { Dataset parentVDS = createVDS(Arrays.asList("dsvTest", "renameParentVDS"),"select * from sys.version"); Dataset vds = expectSuccess(getBuilder(getPublicAPI(3).path("catalog")).buildPost(Entity.json(parentVDS)), new GenericType<Dataset>() {}); String parentDataset = String.join(".", parentVDS.getPath()); DatasetVersion datasetVersion = DatasetVersion.newVersion(); WebTarget target = getAPIv2() .path("datasets") .path("new_untitled") .queryParam("parentDataset", parentDataset) .queryParam("newVersion", datasetVersion) .queryParam("limit", 120); InitialPreviewResponse initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType<InitialPreviewResponse>() {}); target = getAPIv2() .path("dataset") .path("tmp.UNTITLED") .path("version") .path(datasetVersion.getVersion()) .path("save") .queryParam("as", "dsvTest.renameVDS"); DatasetUIWithHistory dswh = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType<DatasetUIWithHistory>() {}); DatasetVersion datasetVersion2 = DatasetVersion.newVersion(); String dsPath = String.join(".", dswh.getDataset().getFullPath()); target = getAPIv2() .path("dataset") .path(dsPath) .path("version") .path(dswh.getDataset().getDatasetVersion().getVersion()) .path("transformAndPreview") .queryParam("newVersion", datasetVersion2); TransformUpdateSQL transformSql = new TransformUpdateSQL(); transformSql.setSql("SELECT \"version\" FROM dsvTest.renameParentVDS"); initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(transformSql)), new GenericType<InitialPreviewResponse>() {}); target = getAPIv2() .path("dataset") .path(dsPath) .path("version") .path(initialPreviewResponse.getDataset().getDatasetVersion().getVersion()) .path("save") .queryParam("as", "dsvTest.renameVDS2"); DatasetUIWithHistory dswh2 = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType<DatasetUIWithHistory>() {}); DatasetVersionMutator mutator = l(DatasetVersionMutator.class); VirtualDatasetUI renameDataset = mutator.renameDataset(new DatasetPath(dswh2.getDataset().getFullPath()), new DatasetPath(Arrays.asList("dsvTest", "renameVDS2-new"))); parentDataset = String.join(".", renameDataset.getFullPathList()); datasetVersion = DatasetVersion.newVersion(); target = getAPIv2() .path("datasets") .path("new_untitled") .queryParam("parentDataset", parentDataset) .queryParam("newVersion", datasetVersion) .queryParam("limit", 0); initialPreviewResponse = expectSuccess(getBuilder(target).buildPost(Entity.json(null)), new GenericType<InitialPreviewResponse>() {}); InitialPreviewResponse reapplyResult = reapply(getDatasetVersionPath(initialPreviewResponse.getDataset())); } |
HardAssignmentCreator { public <T extends CompleteWork> ListMultimap<Integer, T> getMappings( final List<NodeEndpoint> endpoints, final List<T> units) throws PhysicalOperatorSetupException { verify(endpoints, units, units.size() >= endpoints.size(), "There should be at least one work unit for each hard affinity node."); final ListMultimap<String, Integer> endpointsOnHostMap = ArrayListMultimap.create(); int index = 0; for(NodeEndpoint incoming : endpoints) { endpointsOnHostMap.put(incoming.getAddress(), index); index++; } final Map<String, Iterator<Integer>> endpointIteratorOnHostMap = Maps.newHashMap(); for(Map.Entry<String, Collection<Integer>> entry: endpointsOnHostMap.asMap().entrySet()) { endpointIteratorOnHostMap.put(entry.getKey(), Iterables.cycle(entry.getValue()).iterator()); } final ListMultimap<Integer, T> mappings = ArrayListMultimap.create(); for(T unit: units) { final List<EndpointAffinity> affinities = unit.getAffinity(); verify(endpoints, units, affinities.size() == 1, "Expected the hard affinity work unit to have affinity to only one endpoint"); final EndpointAffinity endpointAffinity = affinities.get(0); final String host = endpointAffinity.getEndpoint().getAddress(); final Iterator<Integer> endpointsOnHost = endpointIteratorOnHostMap.get(host); if (endpointsOnHost == null) { verify(endpoints, units, false, "There are no endpoints in assigned list running on host %s", host); } final int endpointId = endpointIteratorOnHostMap.get(host).next(); mappings.put(endpointId, unit); } for(int i = 0; i < endpoints.size(); i++) { if (!mappings.containsKey(i)) { verify(endpoints, units, false, "Endpoint %s has no assigned work.", endpoints.get(i)); } } return mappings; } ListMultimap<Integer, T> getMappings(
final List<NodeEndpoint> endpoints, final List<T> units); static HardAssignmentCreator INSTANCE; } | @Test public void simpleOneFileOnAHost() throws Exception { final List<CompleteWork> workUnits = asList( newWork("/10.0.0.1/table/foo1", 1024, ENDPOINT_1_1, 1.00) ); ListMultimap<Integer, CompleteWork> mappings; List<NodeEndpoint> endpoints; endpoints = asList(ENDPOINT_1_1); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_2); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); verifyAssignmentFails(workUnits, ENDPOINT_2_1); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_1_2); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_2_2); }
@Test public void simpleTwoFileOneOnEachHost() throws Exception { final List<CompleteWork> workUnits = asList( newWork("/10.0.0.1/table/foo1", 1024, ENDPOINT_1_1, 0.33), newWork("/10.0.0.2/table/foo2", 2048, ENDPOINT_2_2, 0.66) ); ListMultimap<Integer, CompleteWork> mappings; List<NodeEndpoint> endpoints; endpoints = asList(ENDPOINT_1_1, ENDPOINT_2_2); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_2, ENDPOINT_2_1); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_1, ENDPOINT_2_1); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_2, ENDPOINT_2_2); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_1_2); verifyAssignmentFails(workUnits, ENDPOINT_1_1); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_1_2, ENDPOINT_2_1); }
@Test public void twoFilesOnSameHost() throws Exception { final List<CompleteWork> workUnits = asList( newWork("/10.0.0.1/table/foo1", 1024, ENDPOINT_1_1, 0.33), newWork("/10.0.0.1/table/foo2", 2048, ENDPOINT_1_2, 0.66) ); ListMultimap<Integer, CompleteWork> mappings; List<NodeEndpoint> endpoints; endpoints = asList(ENDPOINT_1_1, ENDPOINT_1_2); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_2); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_2_1); }
@Test public void oneOrMoreFilesOnEachHost() throws Exception { final List<CompleteWork> workUnits = asList( newWork("/10.0.0.1/table/foo", 1024, ENDPOINT_1_1, 1024f/48124f), newWork("/10.0.0.1/table/bar", 4096, ENDPOINT_1_2, 4096f/48124f), newWork("/10.0.0.1/table/fb", 8192, ENDPOINT_1_2, 8192f/48124f), newWork("/10.0.0.2/table/foo", 2048, ENDPOINT_2_2, 2048f/48124f), newWork("/10.0.0.2/table/bar", 4096, ENDPOINT_2_1, 4096f/48124f), newWork("/10.0.0.3/table/foo", 16384, ENDPOINT_3_1, 16384f/48124f), newWork("/10.0.0.3/table/bar", 2046, ENDPOINT_3_2, 2046f/48124f), newWork("/10.0.0.3/table/bar2", 6144, ENDPOINT_3_2, 6144f/48124f), newWork("/10.0.0.3/table/bar3", 2046, ENDPOINT_3_2, 2046f/48124f), newWork("/10.0.0.4/table/bar", 2046, ENDPOINT_4_1, 2046f/48124f) ); ListMultimap<Integer, CompleteWork> mappings; List<NodeEndpoint> endpoints; endpoints = ENDPOINTS.subList(0, 7); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); endpoints = asList(ENDPOINT_1_1, ENDPOINT_2_1, ENDPOINT_3_1, ENDPOINT_4_1); mappings = INSTANCE.getMappings(endpoints, workUnits); verifyAssignments(mappings, endpoints, workUnits); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_2_1, ENDPOINT_3_1); verifyAssignmentFails(workUnits, ENDPOINT_1_1, ENDPOINT_2_1, ENDPOINT_3_1, ENDPOINT_4_1, ENDPOINT_4_2); } |
StoragePluginUtils { public static String generateSourceErrorMessage(final String storagePluginName, String errorMessage) { return String.format("Source '%s' returned error '%s'", storagePluginName, errorMessage); } private StoragePluginUtils(); static String generateSourceErrorMessage(final String storagePluginName, String errorMessage); static String generateSourceErrorMessage(final String storagePluginName, String errorMessage, Object... args); static UserException.Builder message(UserException.Builder builder, String sourceName, String errorMessage, Object... args); } | @Test public void testGenerateSourceErrorMessage() { final String sourceName = "test-source"; final String errorMessage = "Failed to establish connection"; Assert.assertEquals("Source 'test-source' returned error 'Failed to establish connection'", StoragePluginUtils.generateSourceErrorMessage(sourceName, errorMessage)); }
@Test public void testGenerateSourceErrorMessageFromFormatString() { final String sourceName = "test-source"; final String errorFmtString = "Returned status code %s from cluster"; Assert.assertEquals("Source 'test-source' returned error 'Returned status code 500 from cluster'", StoragePluginUtils.generateSourceErrorMessage(sourceName, errorFmtString, "500")); } |
StoragePluginUtils { public static UserException.Builder message(UserException.Builder builder, String sourceName, String errorMessage, Object... args) { return builder.message(generateSourceErrorMessage(sourceName, errorMessage), args) .addContext("plugin", sourceName); } private StoragePluginUtils(); static String generateSourceErrorMessage(final String storagePluginName, String errorMessage); static String generateSourceErrorMessage(final String storagePluginName, String errorMessage, Object... args); static UserException.Builder message(UserException.Builder builder, String sourceName, String errorMessage, Object... args); } | @Test public void testAddContextAndErrorMessageToUserException() { final UserException.Builder builder = UserException.validationError(); final String errorMessageFormatString = "Invalid username: %s"; final String sourceName = "fictitious-source"; final UserException userException = StoragePluginUtils.message( builder, sourceName, errorMessageFormatString, "invalid-user").buildSilently(); Assert.assertEquals("Source 'fictitious-source' returned error 'Invalid username: invalid-user'", userException.getMessage()); Assert.assertEquals("plugin fictitious-source", userException.getContextStrings().get(0)); } |
ManagedSchemaField { public boolean isTextField() { return isTextFieldType(type); } private ManagedSchemaField(final String name, final String type, final int length, final int scale, final boolean isUnbounded); static ManagedSchemaField newUnboundedLenField(final String name, final String type); static ManagedSchemaField newFixedLenField(final String name, final String type, final int length, final int scale); String getName(); String getType(); int getLength(); int getScale(); boolean isTextField(); boolean isUnbounded(); @Override String toString(); } | @Test public void testIsTextField() { ManagedSchemaField varcharField = ManagedSchemaField.newFixedLenField("varchar_col", "varchar(20)", 20, 0); assertTrue(varcharField.isTextField()); ManagedSchemaField charField = ManagedSchemaField.newFixedLenField("char_col", "char(20)", 20, 0); assertTrue(charField.isTextField()); ManagedSchemaField stringField = ManagedSchemaField.newFixedLenField("string_col", "String", CompleteType.DEFAULT_VARCHAR_PRECISION, 0); assertTrue(stringField.isTextField()); ManagedSchemaField decimalField = ManagedSchemaField.newUnboundedLenField("decimal_col", "decimal"); assertFalse(decimalField.isTextField()); } |
ImpersonationUtil { public static UserGroupInformation createProxyUgi(String proxyUserName) { try { if (Strings.isNullOrEmpty(proxyUserName)) { throw new IllegalArgumentException("Invalid value for proxy user name"); } if (proxyUserName.equals(getProcessUserName()) || SYSTEM_USERNAME.equals(proxyUserName)) { return getProcessUserUGI(); } return CACHE.get(new Key(proxyUserName, UserGroupInformation.getLoginUser())); } catch (IOException | ExecutionException e) { final String errMsg = "Failed to create proxy user UserGroupInformation object: " + e.getMessage(); logger.error(errMsg, e); throw new RuntimeException(errMsg, e); } } private ImpersonationUtil(); static String resolveUserName(String username); static UserGroupInformation createProxyUgi(String proxyUserName); static String getProcessUserName(); static UserGroupInformation getProcessUserUGI(); static FileSystem createFileSystem(String proxyUserName, Configuration fsConf, Path path); } | @Test public void testNullUser() throws Exception { thrown.expect(IllegalArgumentException.class); ImpersonationUtil.createProxyUgi(null); }
@Test public void testEmptyUser() throws Exception { thrown.expect(IllegalArgumentException.class); ImpersonationUtil.createProxyUgi(""); } |
FormatPluginOptionExtractor { @VisibleForTesting Collection<FormatPluginOptionsDescriptor> getOptions() { return optionsByTypeName.values(); } FormatPluginOptionExtractor(ScanResult scanResult); FormatPluginConfig createConfigForTable(TableInstance t); List<Function> getFunctions(final List<String> tableSchemaPath, final FileSystemPlugin plugin, final SchemaConfig schemaConfig); } | @Test public void test() { FormatPluginOptionExtractor e = new FormatPluginOptionExtractor(CLASSPATH_SCAN_RESULT); Collection<FormatPluginOptionsDescriptor> options = e.getOptions(); for (FormatPluginOptionsDescriptor d : options) { assertEquals(d.pluginConfigClass.getAnnotation(JsonTypeName.class).value(), d.typeName); switch (d.typeName) { case "text": assertEquals(TextFormatConfig.class, d.pluginConfigClass); assertEquals( "(type: String, lineDelimiter: String, fieldDelimiter: String, quote: String, escape: String, " + "comment: String, skipFirstLine: boolean, extractHeader: boolean, " + "autoGenerateColumnNames: boolean, trimHeader: boolean, outputExtension: String)", d.presentParams() ); break; case "named": assertEquals(NamedFormatPluginConfig.class, d.pluginConfigClass); assertEquals("(type: String, name: String)", d.presentParams()); break; case "json": assertEquals(d.typeName, "(type: String, outputExtension: String, prettyPrint: boolean)", d.presentParams()); break; case "parquet": assertEquals(d.typeName, "(type: String, autoCorrectCorruptDates: boolean, outputExtension: String)", d.presentParams()); break; case "arrow": assertEquals(d.typeName, "(type: String, outputExtension: String)", d.presentParams()); break; case "sequencefile": case "avro": assertEquals(d.typeName, "(type: String)", d.presentParams()); break; case "excel": assertEquals(d.typeName, "(type: String, sheet: String, extractHeader: boolean, hasMergedCells: boolean, xls: boolean)", d.presentParams()); break; case "iceberg": assertEquals(d.typeName, "(type: String, metaStoreType: IcebergMetaStoreType, dataFormatType: FileType, dataFormatConfig: FormatPluginConfig)", d.presentParams()); break; default: fail("add validation for format plugin type " + d.typeName); } } } |
EasyScanOperatorCreator implements ProducerOperator.Creator<EasySubScan> { static boolean selectsAllColumns(final BatchSchema datasetSchema, final List<SchemaPath> projectedColumns) { final Set<String> columnsInTable = FluentIterable.from(datasetSchema) .transform( new Function<Field, String>() { @Override public String apply(Field input) { return input.getName(); }}) .filter( new Predicate<String>() { @Override public boolean apply(String input) { return !input.equals(IncrementalUpdateUtils.UPDATE_COLUMN); }}) .toSet(); final Set<String> selectedColumns = FluentIterable.from(projectedColumns) .transform( new Function<SchemaPath, String>() { @Override public String apply(SchemaPath input) { return input.getAsUnescapedPath(); } }) .toSet(); return columnsInTable.equals(selectedColumns); } @Override ProducerOperator create(FragmentExecutionContext fragmentExecContext, final OperatorContext context, EasySubScan config); } | @Test public void trueWhenAllColumnsAreSelected() { BatchSchema schema = mock(BatchSchema.class); when(schema.iterator()) .thenReturn(Lists.newArrayList(Field.nullable("a1", new ArrowType.Bool())).iterator()); assertTrue(EasyScanOperatorCreator.selectsAllColumns(schema, Lists.<SchemaPath>newArrayList(SchemaPath.getSimplePath("a1")))); }
@Test public void selectionIgnoresIncremental() { BatchSchema schema = mock(BatchSchema.class); when(schema.iterator()) .thenReturn(Lists.newArrayList(Field.nullable("a1", new ArrowType.Bool()), Field.nullable(IncrementalUpdateUtils.UPDATE_COLUMN, new ArrowType.Bool())).iterator()); assertTrue(EasyScanOperatorCreator.selectsAllColumns(schema, Lists.<SchemaPath>newArrayList(SchemaPath.getSimplePath("a1")))); }
@Test public void falseWhenAllColumnsAreNotSelected() { BatchSchema schema = mock(BatchSchema.class); when(schema.iterator()) .thenReturn(Lists.newArrayList(Field.nullable("a1", new ArrowType.Bool()), Field.nullable("a2", new ArrowType.Bool())).iterator()); assertFalse(EasyScanOperatorCreator.selectsAllColumns(schema, Lists.<SchemaPath>newArrayList(SchemaPath.getSimplePath("a1")))); }
@Test public void falseWhenChildrenAreSelected() { BatchSchema schema = mock(BatchSchema.class); when(schema.iterator()) .thenReturn(Lists.newArrayList( new Field("a1", new FieldType(true, new ArrowType.Struct(), null), Lists.newArrayList(Field.nullable("a2", new ArrowType.Bool()))), Field.nullable("a3", new ArrowType.Bool())).iterator()); assertFalse(EasyScanOperatorCreator.selectsAllColumns(schema, Lists.newArrayList(SchemaPath.getSimplePath("a1"), SchemaPath.getCompoundPath("a1", "a2"), SchemaPath.getSimplePath("a3")))); } |
DremioFileSystemCache { public FileSystem get(URI uri, Configuration conf, List<String> uniqueConnectionProps) throws IOException{ final Key key = new Key(uri, conf, uniqueConnectionProps); FileSystem fs; synchronized (this) { fs = map.get(key); } if (fs != null) { return fs; } final String disableCacheName = String.format("fs.%s.impl.disable.cache", uri.getScheme()); final boolean disableCache = conf.getBoolean(disableCacheName, false); if (disableCache || key.uniqueConnectionPropValues == null || key.uniqueConnectionPropValues.isEmpty()) { return FileSystem.get(uri, conf); } final Configuration cloneConf = new Configuration(conf); cloneConf.set(disableCacheName, "true"); fs = FileSystem.get(uri, cloneConf); cloneConf.setBoolean(disableCacheName, disableCache); synchronized (this) { FileSystem oldfs = map.get(key); if (oldfs != null) { fs.close(); return oldfs; } if (map.isEmpty() && !ShutdownHookManager.get().isShutdownInProgress()) { ShutdownHookManager.get().addShutdownHook(clientFinalizer, SHUTDOWN_HOOK_PRIORITY); } map.put(key, fs); if (conf.getBoolean(FS_AUTOMATIC_CLOSE_KEY, FS_AUTOMATIC_CLOSE_DEFAULT)) { toAutoClose.add(key); } return fs; } } FileSystem get(URI uri, Configuration conf, List<String> uniqueConnectionProps); synchronized void closeAll(boolean onlyAutomatic); } | @Test public void withUniqueConnProps() throws Exception { final DremioFileSystemCache dfsc = new DremioFileSystemCache(); final URI uri = URI.create("file: final List<String> uniqueProps = ImmutableList.of("prop1", "prop2"); Configuration conf1 = new Configuration(); FileSystem fs1 = dfsc.get(uri, conf1, uniqueProps); Configuration conf2 = new Configuration(conf1); conf2.set("prop1", "prop1Val"); FileSystem fs2 = dfsc.get(uri, conf2, uniqueProps); assertTrue(fs1 != fs2); FileSystem fs3 = dfsc.get(uri, conf2, uniqueProps); assertTrue(fs2 == fs3); FileSystem fs4 = getAs("newUser", dfsc, uri, conf2, uniqueProps); assertTrue(fs2 != fs4); assertTrue(fs1 != fs4); FileSystem fs5 = dfsc.get(uri, conf1, null); assertTrue(fs1 != fs5); FileSystem fs6 = dfsc.get(uri, conf1, null); assertTrue(fs5 == fs6); }
@Test public void withoutUniqueConnProps() throws Exception { final DremioFileSystemCache dfsc = new DremioFileSystemCache(); final URI uri = URI.create("file: Configuration conf1 = new Configuration(); FileSystem fs1 = dfsc.get(uri, conf1, null); Configuration conf2 = new Configuration(conf1); conf2.set("blah", "boo"); FileSystem fs2 = dfsc.get(uri, conf2, null); assertTrue(fs1 == fs2); FileSystem fs3 = getAs("newUser", dfsc, uri, conf1, null); assertTrue(fs1 != fs3); }
@Test public void withoutUniqueConnPropsWithCacheExplicitlyDisabled() throws Exception { final DremioFileSystemCache dfsc = new DremioFileSystemCache(); final URI uri = URI.create("file: Configuration conf1 = new Configuration(); final String disableCacheName = String.format("fs.%s.impl.disable.cache", uri.getScheme()); conf1.setBoolean(disableCacheName, true); FileSystem fs1 = dfsc.get(uri, conf1, null); Configuration conf2 = new Configuration(conf1); conf2.set("blah", "boo"); FileSystem fs2 = dfsc.get(uri, conf2, null); assertTrue(fs1 != fs2); FileSystem fs3 = getAs("newUser", dfsc, uri, conf1, null); assertTrue(fs1 != fs3); assertTrue(fs1 != fs3); } |
DataJsonOutput { public static final boolean isNumberAsString(DatabindContext context) { Object attr = context.getAttribute(DataJsonOutput.DREMIO_JOB_DATA_NUMBERS_AS_STRINGS_ATTRIBUTE); return attr instanceof Boolean && ((Boolean)attr).booleanValue(); } DataJsonOutput(JsonGenerator gen, boolean convertNumbersToStrings); static final ObjectWriter setNumbersAsStrings(ObjectWriter writer, boolean isEnabled); static final boolean isNumberAsString(DatabindContext context); void writeStartArray(); void writeEndArray(); void writeStartObject(); void writeEndObject(); void writeFieldName(String name); void writeVarChar(String value); void writeBoolean(boolean value); void writeDecimal(FieldReader reader, JsonOutputContext context); void writeTinyInt(FieldReader reader, JsonOutputContext context); void writeSmallInt(FieldReader reader, JsonOutputContext context); void writeInt(FieldReader reader, JsonOutputContext context); void writeBigInt(FieldReader reader, JsonOutputContext context); void writeFloat(FieldReader reader, JsonOutputContext context); void writeDouble(FieldReader reader, JsonOutputContext context); void writeVarChar(FieldReader reader, JsonOutputContext context); void writeVar16Char(FieldReader reader, JsonOutputContext context); void writeVarBinary(FieldReader reader, JsonOutputContext context); void writeBit(FieldReader reader, JsonOutputContext context); void writeDateMilli(FieldReader reader, JsonOutputContext context); void writeDate(FieldReader reader, JsonOutputContext context); void writeTimeMilli(FieldReader reader, JsonOutputContext context); void writeTime(FieldReader reader, JsonOutputContext context); void writeTimeStampMilli(FieldReader reader, JsonOutputContext context); void writeIntervalYear(FieldReader reader, JsonOutputContext context); void writeIntervalDay(FieldReader reader, JsonOutputContext context); void writeNull(JsonOutputContext context); void writeUnion(FieldReader reader, JsonOutputContext context); void writeMap(FieldReader reader, JsonOutputContext context); void writeList(FieldReader reader, JsonOutputContext context); static final DateTimeFormatter FORMAT_DATE; static final DateTimeFormatter FORMAT_TIMESTAMP; static final DateTimeFormatter FORMAT_TIME; static final String DREMIO_JOB_DATA_NUMBERS_AS_STRINGS_ATTRIBUTE; } | @Test public void test() { Mockito.when(context.getAttribute(DataJsonOutput.DREMIO_JOB_DATA_NUMBERS_AS_STRINGS_ATTRIBUTE)).thenReturn(this.inputValue); assertEquals(this.expectedValue, DataJsonOutput.isNumberAsString(context)); } |
TimedRunnable implements Runnable { @Override public final void run() { long start = System.nanoTime(); threadStart=start; try{ value = runInner(); }catch(Exception e){ this.e = e; }finally{ timeNanos = System.nanoTime() - start; } } @Override final void run(); long getThreadStart(); long getTimeSpentNanos(); final V getValue(); static List<V> run(final String activity, final Logger logger, final List<TimedRunnable<V>> runnables, int parallelism); static List<V> run(final String activity, final Logger logger, final List<TimedRunnable<V>> runnables,
int parallelism, long timeout); } | @Test public void withoutAnyTasksTriggeringTimeout() throws Exception { List<TimedRunnable<Void>> tasks = Lists.newArrayList(); for(int i=0; i<100; i++){ tasks.add(new TestTask(2000)); } TimedRunnable.run("Execution without triggering timeout", logger, tasks, 16); }
@Test public void withTasksExceedingTimeout() throws Exception { UserException ex = null; try { List<TimedRunnable<Void>> tasks = Lists.newArrayList(); for (int i = 0; i < 100; i++) { if ((i & (i + 1)) == 0) { tasks.add(new TestTask(2000)); } else { tasks.add(new TestTask(20000)); } } TimedRunnable.run("Execution with some tasks triggering timeout", logger, tasks, 16); } catch (UserException e) { ex = e; } assertNotNull("Expected a UserException", ex); assertThat(ex.getMessage(), containsString("Waited for 93750ms, but tasks for 'Execution with some tasks triggering timeout' are not " + "complete. Total runnable size 100, parallelism 16.")); }
@Test public void withManyTasks() throws Exception { List<TimedRunnable<Void>> tasks = Lists.newArrayList(); for (int i = 0; i < 150000; i++) { tasks.add(new TestTask(0)); } TimedRunnable.run("Execution with lots of tasks", logger, tasks, 16); }
@Test public void withOverriddenHighTimeout() throws Exception { List<TimedRunnable<Void>> tasks = Lists.newArrayList(); for(int i=0; i<10; i++){ tasks.add(new TestTask(20_000)); } TimedRunnable.run("Execution without triggering timeout", logger, tasks, 2, 150_000); } |
QueryContext implements AutoCloseable, ResourceSchedulingContext, OptimizerRulesContext { public OptionManager getOptions() { return optionManager; } QueryContext(
final UserSession session,
final SabotContext sabotContext,
QueryId queryId
); QueryContext(
final UserSession session,
final SabotContext sabotContext,
QueryId queryId,
Optional<Boolean> checkMetadataValidity
); QueryContext(
final UserSession session,
final SabotContext sabotContext,
QueryId queryId,
QueryPriority priority,
long maxAllocation,
Predicate<DatasetConfig> datasetValidityChecker
); private QueryContext(
final UserSession session,
final SabotContext sabotContext,
QueryId queryId,
QueryPriority priority,
long maxAllocation,
Predicate<DatasetConfig> datasetValidityChecker,
Optional<Boolean> checkMetadataValidity
); CatalogService getCatalogService(); Catalog getCatalog(); AccelerationManager getAccelerationManager(); SubstitutionProviderFactory getSubstitutionProviderFactory(); RuleSet getInjectedRules(PlannerPhase phase); @Override QueryId getQueryId(); @Override PlannerSettings getPlannerSettings(); UserSession getSession(); @Override BufferAllocator getAllocator(); @Override String getQueryUserName(); OptionManager getOptions(); QueryOptionManager getQueryOptionManager(); SessionOptionManager getSessionOptionManager(); SystemOptionManager getSystemOptionManager(); ExecutionControls getExecutionControls(); @Override NodeEndpoint getCurrentEndpoint(); LogicalPlanPersistence getLpPersistence(); @Override Collection<NodeEndpoint> getActiveEndpoints(); SabotConfig getConfig(); OptionList getNonDefaultOptions(); @Override FunctionImplementationRegistry getFunctionRegistry(); boolean isUserAuthenticationEnabled(); ScanResult getScanResult(); OperatorTable getOperatorTable(); @Override QueryContextInformation getQueryContextInfo(); @Override ContextInformation getContextInformation(); @Override ArrowBuf getManagedBuffer(); @Override PartitionExplorer getPartitionExplorer(); @Override int registerFunctionErrorContext(FunctionErrorContext errorContext); @Override FunctionErrorContext getFunctionErrorContext(int errorContextId); @Override FunctionErrorContext getFunctionErrorContext(); MaterializationDescriptorProvider getMaterializationProvider(); Provider<WorkStats> getWorkStatsProvider(); WorkloadType getWorkloadType(); @Override BufferManager getBufferManager(); @Override ValueHolder getConstantValueHolder(String value, MinorType type, Function<ArrowBuf, ValueHolder> holderInitializer); void setGroupResourceInformation(GroupResourceInformation groupResourceInformation); GroupResourceInformation getGroupResourceInformation(); @Override void close(); @Override CompilationOptions getCompilationOptions(); ExecutorService getExecutorService(); } | @Test public void testOptionManagerSetup() throws Exception { try (final QueryContext queryContext = new QueryContext(session(), getSabotContext(), UserBitShared.QueryId.getDefaultInstance());) { final OptionManagerWrapper optionManager = (OptionManagerWrapper) queryContext.getOptions(); final List<OptionManager> optionManagerList = optionManager.getOptionManagers(); assertEquals(4, optionManagerList.size()); assertTrue(optionManagerList.get(0) instanceof QueryOptionManager); assertTrue(optionManagerList.get(1) instanceof SessionOptionManager); assertTrue(optionManagerList.get(2) instanceof EagerCachingOptionManager); assertTrue(optionManagerList.get(3) instanceof DefaultOptionManager); } } |
VectorAccessibleSerializable extends AbstractStreamSerializable { public static void readIntoArrowBuf(InputStream inputStream, ArrowBuf outputBuffer, long numBytesToRead) throws IOException { final byte[] buffer = REUSABLE_LARGE_BUFFER.get(); while(numBytesToRead > 0) { int len = (int) Math.min(buffer.length, numBytesToRead); final int numBytesRead = inputStream.read(buffer, 0, len); if (numBytesRead == -1 && numBytesToRead > 0) { throw new EOFException("Unexpected end of stream while reading."); } outputBuffer.writeBytes(buffer, 0, numBytesRead); numBytesToRead -= numBytesRead; } } VectorAccessibleSerializable(BufferAllocator allocator); VectorAccessibleSerializable(BufferAllocator allocator, boolean useCodec, BufferAllocator decompressAllocator); VectorAccessibleSerializable(WritableBatch batch, BufferAllocator allocator); VectorAccessibleSerializable(WritableBatch batch, SelectionVector2 sv2, BufferAllocator allocator, boolean useCodec); @Override void readFromStream(InputStream input); @Override void writeToStream(OutputStream output); void clear(); VectorContainer get(); SelectionVector2 getSv2(); static void readIntoArrowBuf(InputStream inputStream, ArrowBuf outputBuffer, long numBytesToRead); long compressionTime(); long uncompressionTime(); static void readFromStream(SeekableInputStream input, final ArrowBuf outputBuffer, final int bytesToRead); static final int RAW_CHUNK_SIZE_TO_COMPRESS; } | @Test public void testReadIntoArrowBuf() throws Exception { try (final ArrowBuf buffer = allocator.buffer(256)) { final InputStream inputStream = mock(InputStream.class); when(inputStream.read(any(byte[].class))).thenReturn(0); readIntoArrowBuf(inputStream, buffer, 0); assertEquals(0, buffer.writerIndex()); } try (final ArrowBuf buffer = allocator.buffer(256)) { final InputStream inputStream = mock(InputStream.class); when(inputStream.read(any(byte[].class), any(int.class), any(int.class))).thenAnswer(new Answer() { @Override public Integer answer(InvocationOnMock invocation) throws Throwable { byte[] byteBuf = invocation.getArgumentAt(0, byte[].class); int start = invocation.getArgumentAt(1, int.class); int length = invocation.getArgumentAt(2, int.class); for(int i = start; i < Math.min(length, byteBuf.length); i++) { byteBuf[i] = (byte)i; } return Math.min(length, byteBuf.length); } }); readIntoArrowBuf(inputStream, buffer, 256); assertEquals(256, buffer.writerIndex()); for(int i=0; i<256; i++) { assertEquals((byte)i, buffer.getByte(i)); } } try (final ArrowBuf buffer = allocator.buffer(256)) { final InputStream inputStream = mock(InputStream.class); when(inputStream.read(any(byte[].class), any(int.class), any(int.class))).thenAnswer(new Answer() { @Override public Integer answer(InvocationOnMock invocation) throws Throwable { byte[] byteBuf = invocation.getArgumentAt(0, byte[].class); int start = invocation.getArgumentAt(1, int.class); int length = invocation.getArgumentAt(2, int.class); int i=start; int toFill = Math.min(byteBuf.length, 20); toFill = Math.min(toFill, length); while(i<toFill) { byteBuf[i] = (byte)i; i++; } return i; } }); readIntoArrowBuf(inputStream, buffer, 256); assertEquals(256, buffer.writerIndex()); for(int i=0; i<256; i++) { assertEquals((byte)(i%20), buffer.getByte(i)); } } try (final ArrowBuf buffer = allocator.buffer(256)) { final InputStream inputStream = mock(InputStream.class); when(inputStream.read(any(byte[].class), any(int.class), any(int.class))).thenReturn(-1); try { readIntoArrowBuf(inputStream, buffer, 256); fail("Expected above call to fail"); } catch (EOFException ex) { } } } |
OptionManagerWrapper extends BaseOptionManager { @Override public OptionList getNonDefaultOptions() { final OptionList optionList = new OptionList(); for (OptionManager optionManager : optionManagers) { OptionList nonDefaultOptions = optionManager.getNonDefaultOptions(); optionList.merge(nonDefaultOptions); } return optionList; } OptionManagerWrapper(OptionValidatorListing optionValidatorListing, List<OptionManager> optionManagers); OptionValidatorListing getOptionValidatorListing(); @VisibleForTesting List<OptionManager> getOptionManagers(); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionValue.OptionType type); @Override boolean deleteAllOptions(OptionValue.OptionType type); @Override OptionValue getOption(String name); @Override OptionList getDefaultOptions(); @Override OptionList getNonDefaultOptions(); OptionValidator getValidator(String name); @Override Iterator<OptionValue> iterator(); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); } | @Test public void testGetNonDefaultOptions() throws Exception { OptionManager optionManager = OptionManagerWrapper.Builder.newBuilder() .withOptionValidatorProvider(optionValidatorListing) .withOptionManager(defaultOptionManager) .withOptionManager(systemOptionManager) .withOptionManager(sessionOptionManager) .withOptionManager(queryOptionManager) .build(); int initialOptionsCount = defaultOptionManager.getNonDefaultOptions().size() + systemOptionManager.getNonDefaultOptions().size() + sessionOptionManager.getNonDefaultOptions().size() + queryOptionManager.getNonDefaultOptions().size(); List<OptionValue> optionValues = Arrays.asList( OptionValue.createLong(OptionValue.OptionType.SYSTEM, SLICE_TARGET, 10), OptionValue.createLong(OptionValue.OptionType.SESSION, SLICE_TARGET, 15), OptionValue.createLong(OptionValue.OptionType.QUERY, SLICE_TARGET, 20), OptionValue.createBoolean(OptionValue.OptionType.SESSION, ENABLE_VERBOSE_ERRORS_KEY, true), OptionValue.createBoolean(OptionValue.OptionType.QUERY, ENABLE_VERBOSE_ERRORS_KEY, true) ); optionValues.forEach(optionManager::setOption); OptionList nonDefaultOptions = optionManager.getNonDefaultOptions(); assertEquals(initialOptionsCount + optionValues.size(), nonDefaultOptions.size()); for (OptionValue optionValue : optionValues) { assertTrue(nonDefaultOptions.contains(optionValue)); } for (OptionValue nonDefaultOption : nonDefaultOptions) { assertNotEquals(nonDefaultOption, defaultOptionManager.getOption(nonDefaultOption.getName())); } } |
OptionManagerWrapper extends BaseOptionManager { @Override public OptionList getDefaultOptions() { final OptionList optionList = new OptionList(); for (OptionManager optionManager : optionManagers) { OptionList defaultOptions = optionManager.getDefaultOptions(); optionList.merge(defaultOptions); } return optionList; } OptionManagerWrapper(OptionValidatorListing optionValidatorListing, List<OptionManager> optionManagers); OptionValidatorListing getOptionValidatorListing(); @VisibleForTesting List<OptionManager> getOptionManagers(); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionValue.OptionType type); @Override boolean deleteAllOptions(OptionValue.OptionType type); @Override OptionValue getOption(String name); @Override OptionList getDefaultOptions(); @Override OptionList getNonDefaultOptions(); OptionValidator getValidator(String name); @Override Iterator<OptionValue> iterator(); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); } | @Test public void testGetDefaultOptions() throws Exception { OptionManager optionManager = OptionManagerWrapper.Builder.newBuilder() .withOptionValidatorProvider(optionValidatorListing) .withOptionManager(defaultOptionManager) .withOptionManager(systemOptionManager) .withOptionManager(sessionOptionManager) .withOptionManager(queryOptionManager) .build(); OptionList defaultOptions = optionManager.getDefaultOptions(); assertEquals(defaultOptionManager.getDefaultOptions().size(), defaultOptions.size()); for (OptionValue defaultOption : defaultOptions) { assertEquals(defaultOption, optionValidatorListing.getValidator(defaultOption.getName()).getDefault()); } } |
OptionManagerWrapper extends BaseOptionManager { @Override public Iterator<OptionValue> iterator() { final OptionList resultList = new OptionList(); final Map<String, OptionValue> optionsMap = CaseInsensitiveMap.newHashMap(); final OptionList defaultOptions = getDefaultOptions(); defaultOptions.forEach(optionValue -> optionsMap.put(optionValue.getName(), optionValue)); final List<OptionManager> reversedOptionManagers = Lists.reverse(optionManagers); for (OptionManager optionManager : reversedOptionManagers) { OptionList optionList = optionManager.getNonDefaultOptions(); for (OptionValue optionValue : optionList) { if (optionValue.getType() == optionsMap.get(optionValue.getName()).getType()) { optionsMap.put(optionValue.getName(), optionValue); } else { resultList.add(optionValue); } } } resultList.addAll(optionsMap.values()); return resultList.iterator(); } OptionManagerWrapper(OptionValidatorListing optionValidatorListing, List<OptionManager> optionManagers); OptionValidatorListing getOptionValidatorListing(); @VisibleForTesting List<OptionManager> getOptionManagers(); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionValue.OptionType type); @Override boolean deleteAllOptions(OptionValue.OptionType type); @Override OptionValue getOption(String name); @Override OptionList getDefaultOptions(); @Override OptionList getNonDefaultOptions(); OptionValidator getValidator(String name); @Override Iterator<OptionValue> iterator(); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); } | @Test public void testIterator() throws Exception { OptionManager optionManager = OptionManagerWrapper.Builder.newBuilder() .withOptionValidatorProvider(optionValidatorListing) .withOptionManager(defaultOptionManager) .withOptionManager(systemOptionManager) .withOptionManager(sessionOptionManager) .withOptionManager(queryOptionManager) .build(); int initialSystemOptionCount = systemOptionManager.getNonDefaultOptions().size(); int initialOptionsCount = defaultOptionManager.getNonDefaultOptions().size() + initialSystemOptionCount + sessionOptionManager.getNonDefaultOptions().size() + queryOptionManager.getNonDefaultOptions().size(); int defaultOptionsCount = optionValidatorListing.getValidatorList().size(); List<OptionValue> optionValues = Arrays.asList( OptionValue.createLong(OptionValue.OptionType.SYSTEM, SLICE_TARGET, 10), OptionValue.createLong(OptionValue.OptionType.SESSION, SLICE_TARGET, 15), OptionValue.createLong(OptionValue.OptionType.QUERY, SLICE_TARGET, 20), OptionValue.createBoolean(OptionValue.OptionType.SESSION, ENABLE_VERBOSE_ERRORS_KEY, true), OptionValue.createBoolean(OptionValue.OptionType.QUERY, ENABLE_VERBOSE_ERRORS_KEY, true) ); AtomicInteger systemOptionsCount = new AtomicInteger(initialSystemOptionCount); optionValues.forEach(optionValue -> { optionManager.setOption(optionValue); if (optionValue.getType().equals(OptionValue.OptionType.SYSTEM)) { systemOptionsCount.addAndGet(1); } }); OptionList iteratorResult = new OptionList(); optionManager.iterator().forEachRemaining(iteratorResult::add); assertEquals(initialOptionsCount + defaultOptionsCount + optionValues.size() - systemOptionsCount.get(), iteratorResult.size()); } |
TransformBase { public Transform wrap() { return acceptor.wrap(this); } final T accept(TransformVisitor<T> visitor); Transform wrap(); @Override String toString(); static TransformBase unwrap(Transform t); static Converter<TransformBase, Transform> converter(); static final Acceptor<TransformBase, TransformVisitor<?>, Transform> acceptor; } | @Test public void testConvert() throws Exception { TransformBase transform = new TransformField("source", "new", false, new FieldConvertCase(LOWER_CASE).wrap()); validate(transform); } |
OptionValueProtoUtils { public static OptionValueProto toOptionValueProto(OptionValue optionValue) { checkArgument(optionValue.getType() == OptionValue.OptionType.SYSTEM, String.format("Invalid OptionType. OptionType must be 'SYSTEM', was given '%s'", optionValue.getType())); final OptionValue.Kind kind = optionValue.getKind(); final OptionValueProto.Builder builder = OptionValueProto.newBuilder() .setName(optionValue.getName()); switch (kind) { case BOOLEAN: builder.setBoolVal(optionValue.getBoolVal()); break; case LONG: builder.setNumVal(optionValue.getNumVal()); break; case STRING: builder.setStringVal(optionValue.getStringVal()); break; case DOUBLE: builder.setFloatVal(optionValue.getFloatVal()); break; default: throw new IllegalArgumentException("Invalid OptionValue kind"); } return builder.build(); } private OptionValueProtoUtils(); static OptionValueProto toOptionValueProto(OptionValue optionValue); static OptionValue toOptionValue(OptionValueProto value); static OptionValueProtoList toOptionValueProtoList(Collection<OptionValueProto> optionValueProtos); } | @Test public void testBoolOptionToProto() { final OptionValue option = OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, "test.option", true); final OptionValueProto optionProto = OptionValueProtoUtils.toOptionValueProto(option); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testLongOptionToProto() { final OptionValue option = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test.option", 1234); final OptionValueProto optionProto = OptionValueProtoUtils.toOptionValueProto(option); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testStringOptionToProto() { final OptionValue option = OptionValue.createString(OptionValue.OptionType.SYSTEM, "test.option", "test-option"); final OptionValueProto optionProto = OptionValueProtoUtils.toOptionValueProto(option); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testDoubleOptionToProto() { final OptionValue option = OptionValue.createDouble(OptionValue.OptionType.SYSTEM, "test.option", 1234.1234); final OptionValueProto optionProto = OptionValueProtoUtils.toOptionValueProto(option); assertTrue(verifyEquivalent(option, optionProto)); } |
OptionValueProtoUtils { public static OptionValue toOptionValue(OptionValueProto value) { switch (value.getOptionValCase()) { case NUM_VAL: return OptionValue.createLong( OptionValue.OptionType.SYSTEM, value.getName(), value.getNumVal() ); case STRING_VAL: return OptionValue.createString( OptionValue.OptionType.SYSTEM, value.getName(), value.getStringVal() ); case BOOL_VAL: return OptionValue.createBoolean( OptionValue.OptionType.SYSTEM, value.getName(), value.getBoolVal() ); case FLOAT_VAL: return OptionValue.createDouble( OptionValue.OptionType.SYSTEM, value.getName(), value.getFloatVal() ); case OPTIONVAL_NOT_SET: default: throw new IllegalArgumentException("Invalid OptionValue kind"); } } private OptionValueProtoUtils(); static OptionValueProto toOptionValueProto(OptionValue optionValue); static OptionValue toOptionValue(OptionValueProto value); static OptionValueProtoList toOptionValueProtoList(Collection<OptionValueProto> optionValueProtos); } | @Test public void testBoolOptionFromProto() { final OptionValueProto optionProto = OptionValueProto.newBuilder() .setName("test.option") .setBoolVal(true) .build(); final OptionValue option = OptionValueProtoUtils.toOptionValue(optionProto); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testLongOptionFromProto() { final OptionValueProto optionProto = OptionValueProto.newBuilder() .setName("test.option") .setNumVal(1234) .build(); final OptionValue option = OptionValueProtoUtils.toOptionValue(optionProto); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testStringOptionFromProto() { final OptionValueProto optionProto = OptionValueProto.newBuilder() .setName("test.option") .setStringVal("test-option") .build(); final OptionValue option = OptionValueProtoUtils.toOptionValue(optionProto); assertTrue(verifyEquivalent(option, optionProto)); }
@Test public void testFloatOptionFromProto() { final OptionValueProto optionProto = OptionValueProto.newBuilder() .setName("test.option") .setFloatVal(1234.1234) .build(); final OptionValue option = OptionValueProtoUtils.toOptionValue(optionProto); assertTrue(verifyEquivalent(option, optionProto)); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public OptionValue getOption(final String name) { final OptionValueProto value = getOptionProto(name); return value == null ? null : OptionValueProtoUtils.toOptionValue(value); } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testGet() { registerTestOption(OptionValue.Kind.LONG, "test-option", "0"); OptionValue optionValue = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option", 123); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Collections.singletonList(OptionValueProtoUtils.toOptionValueProto(optionValue))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); assertEquals(optionValue, som.getOption(optionValue.getName())); verify(kvStore, times(1)).get(eq(OPTIONS_KEY)); assertNull(som.getOption("not-a-real-option")); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public boolean setOption(final OptionValue value) { checkArgument(value.getType() == OptionType.SYSTEM, "OptionType must be SYSTEM."); final String name = value.getName().toLowerCase(Locale.ROOT); final OptionValidator validator = optionValidatorListing.getValidator(name); validator.validate(value); final Map<String, OptionValueProto> optionMap = new HashMap<>(); getOptionProtoList().forEach(optionProto -> optionMap.put(optionProto.getName(), optionProto)); if (optionMap.containsKey(name) && optionMap.get(name).equals(OptionValueProtoUtils.toOptionValueProto(value))) { return true; } if (value.equals(validator.getDefault())) { if (optionMap.containsKey(value.getName())) { optionMap.remove(value.getName()); } else { return true; } } optionMap.put(name, OptionValueProtoUtils.toOptionValueProto(value)); options.put(OPTIONS_KEY, OptionValueProtoUtils.toOptionValueProtoList(optionMap.values())); notifyListeners(); return true; } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testSet() { registerTestOption(OptionValue.Kind.LONG, "already-added-option", "0"); OptionValue toAddOptionDefault = registerTestOption(OptionValue.Kind.STRING, "to-add-option", "default-value"); OptionValue alreadyAddedOption = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "already-added-option", 123); OptionValue toAddOption = OptionValue.createString(OptionValue.OptionType.SYSTEM, "to-add-option", "some-value"); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Collections.singletonList(OptionValueProtoUtils.toOptionValueProto(alreadyAddedOption))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); som.setOption(alreadyAddedOption); verify(kvStore, times(0)).put(any(), any()); som.setOption(toAddOptionDefault); verify(kvStore, times(0)).put(any(), any()); som.setOption(toAddOption); ArgumentCaptor<OptionValueProtoList> argument = ArgumentCaptor.forClass(OptionValueProtoList.class); verify(kvStore, times(1)).put(eq(OPTIONS_KEY), argument.capture()); assertThat(argument.getValue().getOptionsList(), containsInAnyOrder(OptionValueProtoUtils.toOptionValueProto(toAddOption), OptionValueProtoUtils.toOptionValueProto(alreadyAddedOption)) ); OptionValue overridingOption = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "already-added-option", 999); som.setOption(overridingOption); verify(kvStore, times(1)).put(OPTIONS_KEY, OptionValueProtoList.newBuilder() .addAllOptions(Collections.singletonList(OptionValueProtoUtils.toOptionValueProto(overridingOption))) .build() ); } |
TransformBase { public final <T> T accept(TransformVisitor<T> visitor) throws VisitorException { return acceptor.accept(visitor, this); } final T accept(TransformVisitor<T> visitor); Transform wrap(); @Override String toString(); static TransformBase unwrap(Transform t); static Converter<TransformBase, Transform> converter(); static final Acceptor<TransformBase, TransformVisitor<?>, Transform> acceptor; } | @Test public void testVisitor() { TransformBase transform = new TransformExtract("source", "new", DatasetsUtil.pattern("\\d+", 0, IndexType.INDEX), false); String name = transform.accept(new TransformVisitor<String>() { @Override public String visit(TransformLookup lookup) throws Exception { return "lookup"; } @Override public String visit(TransformJoin join) throws Exception { return "join"; } @Override public String visit(TransformSort sort) throws Exception { return "sort"; } @Override public String visit(TransformSorts sortMultiple) throws Exception { return "sortMultiple"; } @Override public String visit(TransformDrop drop) throws Exception { return "drop"; } @Override public String visit(TransformRename rename) throws Exception { return "rename"; } @Override public String visit(TransformConvertCase convertCase) throws Exception { return "convertCase"; } @Override public String visit(TransformTrim trim) throws Exception { return "trim"; } @Override public String visit(TransformExtract extract) throws Exception { return "extract"; } @Override public String visit(TransformAddCalculatedField addCalculatedField) throws Exception { return "addCalculatedField"; } @Override public String visit(TransformUpdateSQL updateSQL) throws Exception { return "updateSQL"; } @Override public String visit(TransformField field) throws Exception { return "field"; } @Override public String visit(TransformConvertToSingleType convertToSingleType) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(TransformSplitByDataType splitByDataType) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(TransformGroupBy groupBy) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(TransformFilter filter) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(TransformCreateFromParent createFromParent) throws Exception { throw new UnsupportedOperationException("NYI"); } }); assertEquals("extract", name); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public boolean deleteOption(final String rawName, OptionType type) { checkArgument(type == OptionType.SYSTEM, "OptionType must be SYSTEM."); final String name = rawName.toLowerCase(Locale.ROOT); optionValidatorListing.getValidator(name); final Pointer<Boolean> needUpdate = new Pointer<>(false); final List<OptionValueProto> newOptionValueProtoList = getOptionProtoList().stream() .filter(optionValueProto -> { if (name.equals(optionValueProto.getName())) { needUpdate.value = true; return false; } return true; }) .collect(Collectors.toList()); if (needUpdate.value) { options.put(OPTIONS_KEY, OptionValueProtoUtils.toOptionValueProtoList(newOptionValueProtoList)); } notifyListeners(); return true; } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testDelete() { registerTestOption(OptionValue.Kind.LONG, "added-option-0", "0"); registerTestOption(OptionValue.Kind.LONG, "added-option-1", "1"); registerTestOption(OptionValue.Kind.STRING, "not-added-option", "default-value"); OptionValue optionValue0 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "added-option-0", 100); OptionValue optionValue1 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "added-option-1", 111); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Arrays.asList( OptionValueProtoUtils.toOptionValueProto(optionValue0), OptionValueProtoUtils.toOptionValueProto(optionValue1))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); som.deleteOption("not-added-option", OptionValue.OptionType.SYSTEM); verify(kvStore, times(0)).put(any(), any()); som.deleteOption("added-option-0", OptionValue.OptionType.SYSTEM); verify(kvStore, times(1)).put(OPTIONS_KEY, OptionValueProtoList.newBuilder() .addAllOptions(Collections.singletonList(OptionValueProtoUtils.toOptionValueProto(optionValue1))) .build() ); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public boolean deleteAllOptions(OptionType type) { checkArgument(type == OptionType.SYSTEM, "OptionType must be SYSTEM."); options.put(OPTIONS_KEY, OptionValueProtoList.newBuilder().build()); notifyListeners(); return true; } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testDeleteAll() { registerTestOption(OptionValue.Kind.LONG, "test-option-0", "0"); registerTestOption(OptionValue.Kind.LONG, "test-option-1", "1"); OptionValue optionValue0 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-0", 100); OptionValue optionValue1 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-1", 111); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Arrays.asList( OptionValueProtoUtils.toOptionValueProto(optionValue0), OptionValueProtoUtils.toOptionValueProto(optionValue1))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); som.deleteAllOptions(OptionValue.OptionType.SYSTEM); verify(kvStore, times(1)).put(OPTIONS_KEY, OptionValueProtoList.newBuilder() .addAllOptions(Collections.emptyList()) .build() ); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public OptionList getNonDefaultOptions() { final OptionList nonDefaultOptions = new OptionList(); getOptionProtoList().forEach( entry -> nonDefaultOptions.add(OptionValueProtoUtils.toOptionValue(entry)) ); return nonDefaultOptions; } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testGetNonDefaultOptions() { registerTestOption(OptionValue.Kind.LONG, "test-option-0", "0"); registerTestOption(OptionValue.Kind.LONG, "test-option-1", "1"); OptionValue optionValue0 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-0", 100); OptionValue optionValue1 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-1", 111); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Arrays.asList( OptionValueProtoUtils.toOptionValueProto(optionValue0), OptionValueProtoUtils.toOptionValueProto(optionValue1))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); assertThat(som.getNonDefaultOptions(), containsInAnyOrder(optionValue0, optionValue1)); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public Iterator<OptionValue> iterator() { return getOptionProtoList().stream() .map(OptionValueProtoUtils::toOptionValue) .iterator(); } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testIterator() { registerTestOption(OptionValue.Kind.LONG, "test-option-0", "0"); registerTestOption(OptionValue.Kind.LONG, "test-option-1", "1"); OptionValue optionValue0 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-0", 100); OptionValue optionValue1 = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "test-option-1", 111); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Arrays.asList( OptionValueProtoUtils.toOptionValueProto(optionValue0), OptionValueProtoUtils.toOptionValueProto(optionValue1))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); assertThat(Lists.from(som.iterator()), containsInAnyOrder(optionValue0, optionValue1)); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public boolean isSet(String name){ return getOptionProto(name) != null; } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testIsSet() { registerTestOption(OptionValue.Kind.LONG, "set-option", "0"); registerTestOption(OptionValue.Kind.LONG, "not-set-option", "1"); OptionValue optionValue = OptionValue.createLong(OptionValue.OptionType.SYSTEM, "set-option", 123); OptionValueProtoList optionList = OptionValueProtoList.newBuilder() .addAllOptions(Collections.singletonList(OptionValueProtoUtils.toOptionValueProto(optionValue))) .build(); when(kvStore.get(OPTIONS_KEY)).thenReturn(optionList); assertTrue(som.isSet("set-option")); assertFalse(som.isSet("not-set-option")); } |
SystemOptionManager extends BaseOptionManager implements Service, ProjectOptionManager { @Override public boolean isValid(String name){ return optionValidatorListing.isValid(name); } SystemOptionManager(OptionValidatorListing optionValidatorListing,
LogicalPlanPersistence lpPersistence,
final Provider<LegacyKVStoreProvider> storeProvider,
boolean inMemory); @Override void start(); @Override boolean isValid(String name); @Override boolean isSet(String name); @Override Iterator<OptionValue> iterator(); @Override OptionValue getOption(final String name); @Override boolean setOption(final OptionValue value); @Override boolean deleteOption(final String rawName, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override void addOptionChangeListener(OptionChangeListener optionChangeListener); @Override OptionList getNonDefaultOptions(); @Override void close(); } | @Test public void testIsValid() { registerTestOption(OptionValue.Kind.LONG, "valid-option", "0"); assertTrue(som.isValid("valid-option")); assertFalse(som.isValid("invalid-option")); } |
EagerCachingOptionManager extends InMemoryOptionManager { @Override public double getOption(DoubleValidator validator) { return getOption(validator.getOptionName()).getFloatVal(); } EagerCachingOptionManager(OptionManager delegate); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override double getOption(DoubleValidator validator); @Override long getOption(LongValidator validator); @Override String getOption(StringValidator validator); @Override OptionValidatorListing getOptionValidatorListing(); } | @Test public void testGetOption() { final OptionManager eagerCachingOptionManager = new EagerCachingOptionManager(optionManager); assertEquals(optionValueA, eagerCachingOptionManager.getOption(optionValueA.getName())); verify(optionManager, times(0)).getOption(optionValueA.getName()); } |
EagerCachingOptionManager extends InMemoryOptionManager { @Override public boolean setOption(OptionValue value) { return super.setOption(value) && delegate.setOption(value); } EagerCachingOptionManager(OptionManager delegate); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override double getOption(DoubleValidator validator); @Override long getOption(LongValidator validator); @Override String getOption(StringValidator validator); @Override OptionValidatorListing getOptionValidatorListing(); } | @Test public void testSetOption() { final OptionManager eagerCachingOptionManager = new EagerCachingOptionManager(optionManager); final OptionValue newOption = OptionValue.createBoolean(OptionValue.OptionType.SYSTEM, "newOption", true); eagerCachingOptionManager.setOption(newOption); verify(optionManager, times(1)).setOption(newOption); } |
EagerCachingOptionManager extends InMemoryOptionManager { @Override public boolean deleteOption(String name, OptionType type) { return super.deleteOption(name, type) && delegate.deleteOption(name, type); } EagerCachingOptionManager(OptionManager delegate); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override double getOption(DoubleValidator validator); @Override long getOption(LongValidator validator); @Override String getOption(StringValidator validator); @Override OptionValidatorListing getOptionValidatorListing(); } | @Test public void testDeleteOption() { final OptionManager eagerCachingOptionManager = new EagerCachingOptionManager(optionManager); eagerCachingOptionManager.deleteOption(optionValueC.getName(), OptionValue.OptionType.SYSTEM); assertNull(eagerCachingOptionManager.getOption(optionValueC.getName())); verify(optionManager, times(1)).deleteOption(optionValueC.getName(), OptionValue.OptionType.SYSTEM); } |
EagerCachingOptionManager extends InMemoryOptionManager { @Override public boolean deleteAllOptions(OptionType type) { return super.deleteAllOptions(type) && delegate.deleteAllOptions(type); } EagerCachingOptionManager(OptionManager delegate); @Override boolean setOption(OptionValue value); @Override boolean deleteOption(String name, OptionType type); @Override boolean deleteAllOptions(OptionType type); @Override double getOption(DoubleValidator validator); @Override long getOption(LongValidator validator); @Override String getOption(StringValidator validator); @Override OptionValidatorListing getOptionValidatorListing(); } | @Test public void testDeleteAllOptions() { final OptionManager eagerCachingOptionManager = new EagerCachingOptionManager(optionManager); eagerCachingOptionManager.deleteAllOptions(OptionValue.OptionType.SYSTEM); assertNull(eagerCachingOptionManager.getOption(optionValueA.getName())); assertNull(eagerCachingOptionManager.getOption(optionValueB.getName())); assertNull(eagerCachingOptionManager.getOption(optionValueC.getName())); verify(optionManager, times(1)).deleteAllOptions(OptionValue.OptionType.SYSTEM); } |
FieldTransformationBase { public final <T> T accept(FieldTransformationVisitor<T> visitor) throws VisitorException { return acceptor.accept(visitor, this); } final T accept(FieldTransformationVisitor<T> visitor); FieldTransformation wrap(); @Override String toString(); static FieldTransformationBase unwrap(FieldTransformation t); static Converter<FieldTransformationBase, FieldTransformation> converter(); static final Acceptor<FieldTransformationBase, FieldTransformationVisitor<?>, FieldTransformation> acceptor; } | @Test public void testVisitor() { FieldTransformationBase exp = new FieldConvertToJSON(); String name = exp.accept(new FieldTransformationBase.FieldTransformationVisitor<String>() { @Override public String visit(FieldConvertCase col) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldTrim changeCase) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldExtract extract) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertFloatToInteger trim) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertFloatToDecimal calculatedField) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertDateToText fieldTransformation) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertNumberToDate numberToDate) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertDateToNumber dateToNumber) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertTextToDate textToDate) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertListToText fieldTransformation) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertToJSON fieldTransformation) throws Exception { return "json"; } @Override public String visit(FieldUnnestList unnest) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldReplacePattern replacePattern) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldReplaceCustom replacePattern) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldReplaceValue replacePattern) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldReplaceRange replaceRange) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldExtractMap extract) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldExtractList extract) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldSplit split) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldSimpleConvertToType toType) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertToTypeIfPossible toTypeIfPossible) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertToTypeWithPatternIfPossible toTypeIfPossible) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(FieldConvertFromJSON fromJson) throws Exception { throw new UnsupportedOperationException("NYI"); } }); assertEquals("json", name); } |
FSDataInputStreamWrapper extends FSInputStream { public static FSInputStream of(FSDataInputStream in) throws IOException { if (in.getWrappedStream() instanceof ByteBufferReadable) { return new FSDataInputStreamWrapper(in); } return new ByteArrayFSInputStream(in); } private FSDataInputStreamWrapper(FSDataInputStream in); static FSInputStream of(FSDataInputStream in); @Override int read(); @Override int read(byte[] b); @Override int read(byte[] b, int off, int len); @Override int read(ByteBuffer dst); @Override int read(long position, ByteBuffer dst); @Override long getPosition(); @Override void setPosition(long position); @Override long skip(long n); @Override int available(); @Override void close(); @Override void mark(int readlimit); @Override void reset(); @Override boolean markSupported(); } | @Test public void test() throws Exception { Class<?> byteBufferPositionedReadableClass = getClass("org.apache.hadoop.fs.ByteBufferPositionedReadable"); assumeNonMaprProfile(); final IOException ioException = new IOException("test io exception"); final FSError fsError = newFSError(ioException); FSDataInputStream fdis = new FSDataInputStream(mock(InputStream.class, withSettings().extraInterfaces(Seekable.class, byteBufferPositionedReadableClass == null ? AutoCloseable.class : byteBufferPositionedReadableClass, PositionedReadable.class, ByteBufferReadable.class).defaultAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { throw fsError; } }))); FSInputStream fdisw = FSDataInputStreamWrapper.of(fdis); Object[] params = getDummyArguments(method); try { method.invoke(fdisw, params); } catch(InvocationTargetException e) { if (byteBufferPositionedReadableClass == null) { assertThat(e.getTargetException(), anyOf(is(instanceOf(IOException.class)), is(instanceOf(UnsupportedOperationException.class)))); } else { assertThat(e.getTargetException(), is(instanceOf(IOException.class))); } if (e.getTargetException() instanceof IOException) { assertThat((IOException) e.getTargetException(), is(sameInstance(ioException))); } } } |
PermissionCheckCache { public boolean hasAccess(final String username, final NamespaceKey namespaceKey, final DatasetConfig config, final MetadataStatsCollector metadataStatsCollector, final SourceConfig sourceConfig) { final Stopwatch permissionCheck = Stopwatch.createStarted(); if (authTtlMs.get() == 0) { boolean hasAccess = checkPlugin(username, namespaceKey, config, sourceConfig); permissionCheck.stop(); metadataStatsCollector.addDatasetStat(namespaceKey.getSchemaPath(), PermissionCheckAccessType.PERMISSION_CACHE_MISS.name(), permissionCheck.elapsed(TimeUnit.MILLISECONDS)); return hasAccess; } final Key key = new Key(username, namespaceKey); final long now = System.currentTimeMillis(); final Callable<Value> loader = () -> { final boolean hasAccess = checkPlugin(username, namespaceKey, config, sourceConfig); if (!hasAccess) { throw NoAccessException.INSTANCE; } return new Value(true, now); }; Value value; try { PermissionCheckAccessType permissionCheckAccessType; value = getFromPermissionsCache(key, loader); if (now == value.createdAt) { permissionCheckAccessType = PermissionCheckAccessType.PERMISSION_CACHE_MISS; } else { permissionCheckAccessType = PermissionCheckAccessType.PERMISSION_CACHE_HIT; } if (now - value.createdAt > authTtlMs.get()) { permissionsCache.invalidate(key); value = getFromPermissionsCache(key, loader); permissionCheckAccessType = PermissionCheckAccessType.PERMISSION_CACHE_EXPIRED; } permissionCheck.stop(); metadataStatsCollector.addDatasetStat(namespaceKey.getSchemaPath(), permissionCheckAccessType.name(), permissionCheck.elapsed(TimeUnit.MILLISECONDS)); return value.hasAccess; } catch (ExecutionException e) { throw new RuntimeException("Permission check loader should not throw a checked exception", e.getCause()); } catch (UncheckedExecutionException e) { final Throwable cause = e.getCause(); if (cause instanceof UserException) { throw (UserException) cause; } throw UserException.permissionError(cause) .message("Access denied reading dataset %s.", namespaceKey.toString()) .build(logger); } } PermissionCheckCache(
Provider<StoragePlugin> plugin,
Provider<Long> authTtlMs,
final long maximumSize); boolean hasAccess(final String username, final NamespaceKey namespaceKey, final DatasetConfig config, final MetadataStatsCollector metadataStatsCollector, final SourceConfig sourceConfig); } | @Test public void throwsProperly() throws Exception { final String username = "throwsProperly"; final StoragePlugin plugin = mock(StoragePlugin.class); final SourceConfig sourceConfig = new SourceConfig(); final PermissionCheckCache checks = new PermissionCheckCache(DirectProvider.wrap(plugin), DirectProvider.wrap(1000L), 1000); when(plugin.hasAccessPermission(anyString(), any(NamespaceKey.class), any(DatasetConfig.class))) .thenThrow(new RuntimeException("you shall not pass")); try { checks.hasAccess(username, new NamespaceKey(Lists.newArrayList("what")), null, new MetadataStatsCollector(), sourceConfig); fail(); } catch (UserException e) { assertEquals(UserBitShared.DremioPBError.ErrorType.PERMISSION, e.getErrorType()); assertEquals("Access denied reading dataset what.", e.getMessage()); } } |
DatasetManager { public DremioTable getTable( NamespaceKey key, MetadataRequestOptions options, boolean ignoreColumnCount ){ final ManagedStoragePlugin plugin; final DatasetConfig config = getConfig(key); if(config != null) { key = new NamespaceKey(config.getFullPathList()); } plugin = plugins.getPlugin(key.getRoot(), false); if(plugin != null) { if(config == null || config.getType() != DatasetType.VIRTUAL_DATASET) { return getTableFromPlugin(key, config, plugin, options, ignoreColumnCount); } } if(config == null) { return null; } if(config.getType() != DatasetType.VIRTUAL_DATASET) { return null; } return createTableFromVirtualDataset(config, options); } DatasetManager(
PluginRetriever plugins,
NamespaceService userNamespaceService,
OptionManager optionManager
); DremioTable getTable(
NamespaceKey key,
MetadataRequestOptions options,
boolean ignoreColumnCount
); DremioTable getTable(
String datasetId,
MetadataRequestOptions options
); boolean createOrUpdateDataset(
ManagedStoragePlugin plugin,
NamespaceKey datasetPath,
DatasetConfig newConfig,
NamespaceAttribute... attributes
); void createDataset(NamespaceKey key, ManagedStoragePlugin plugin, Function<DatasetConfig, DatasetConfig> datasetMutator); } | @Test public void testAccessUsernameOverride() throws Exception { final NamespaceKey namespaceKey = new NamespaceKey("test"); final ViewExpansionContext viewExpansionContext = mock(ViewExpansionContext.class); when(viewExpansionContext.getQueryUser()).thenReturn("newaccessuser"); final SchemaConfig schemaConfig = mock(SchemaConfig.class); when(schemaConfig.getUserName()).thenReturn("username"); when(schemaConfig.getViewExpansionContext()).thenReturn(viewExpansionContext); final MetadataStatsCollector statsCollector = mock(MetadataStatsCollector.class); final MetadataRequestOptions metadataRequestOptions = mock(MetadataRequestOptions.class); when(metadataRequestOptions.getSchemaConfig()).thenReturn(schemaConfig); when(metadataRequestOptions.getStatsCollector()).thenReturn(statsCollector); final ReadDefinition readDefinition = new ReadDefinition(); readDefinition.setSplitVersion(0L); final DatasetConfig datasetConfig = new DatasetConfig(); datasetConfig.setType(DatasetType.PHYSICAL_DATASET); datasetConfig.setId(new EntityId("test")); datasetConfig.setFullPathList(Collections.singletonList("test")); datasetConfig.setReadDefinition(readDefinition); datasetConfig.setTotalNumSplits(0); class FakeSource extends ConnectionConf<FakeSource, StoragePlugin> implements ImpersonationConf { @Override public StoragePlugin newPlugin(SabotContext context, String name, Provider<StoragePluginId> pluginIdProvider) { return null; } @Override public String getAccessUserName(String delegatedUser, String queryUserName) { return queryUserName; } } final FakeSource fakeSource = new FakeSource(); final ManagedStoragePlugin managedStoragePlugin = mock(ManagedStoragePlugin.class); when(managedStoragePlugin.getId()).thenReturn(mock(StoragePluginId.class)); doReturn(fakeSource).when(managedStoragePlugin).getConnectionConf(); when(managedStoragePlugin.isCompleteAndValid(any(), any())).thenReturn(true); doThrow(new RuntimeException("Wrong username")) .when(managedStoragePlugin).checkAccess(namespaceKey, datasetConfig, "username", metadataRequestOptions); final PluginRetriever pluginRetriever = mock(PluginRetriever.class); when(pluginRetriever.getPlugin(namespaceKey.getRoot(), false)).thenReturn(managedStoragePlugin); final NamespaceService namespaceService = mock(NamespaceService.class); when(namespaceService.getDataset(namespaceKey)).thenReturn(datasetConfig); final OptionManager optionManager = mock(OptionManager.class); final DatasetManager datasetManager = new DatasetManager(pluginRetriever, namespaceService, optionManager); datasetManager.getTable(namespaceKey, metadataRequestOptions, false); }
@Test public void ignoreColumnCountOnDrop() throws Exception { final NamespaceKey namespaceKey = new NamespaceKey("test"); final ViewExpansionContext viewExpansionContext = mock(ViewExpansionContext.class); when(viewExpansionContext.getQueryUser()).thenReturn("newaccessuser"); final SchemaConfig schemaConfig = mock(SchemaConfig.class); when(schemaConfig.getUserName()).thenReturn("username"); when(schemaConfig.getViewExpansionContext()).thenReturn(viewExpansionContext); final MetadataStatsCollector statsCollector = mock(MetadataStatsCollector.class); final MetadataRequestOptions metadataRequestOptions = mock(MetadataRequestOptions.class); when(metadataRequestOptions.getSchemaConfig()).thenReturn(schemaConfig); when(metadataRequestOptions.getStatsCollector()).thenReturn(statsCollector); final ReadDefinition readDefinition = new ReadDefinition(); readDefinition.setSplitVersion(0L); final DatasetConfig datasetConfig = new DatasetConfig(); datasetConfig.setType(DatasetType.PHYSICAL_DATASET); datasetConfig.setId(new EntityId("test")); datasetConfig.setFullPathList(ImmutableList.of("test", "file", "foobar")); datasetConfig.setReadDefinition(readDefinition); datasetConfig.setTotalNumSplits(0); final ManagedStoragePlugin managedStoragePlugin = mock(ManagedStoragePlugin.class); when(managedStoragePlugin.getId()).thenReturn(mock(StoragePluginId.class)); when(managedStoragePlugin.isCompleteAndValid(any(), any())).thenReturn(false); when(managedStoragePlugin.getDefaultRetrievalOptions()).thenReturn(DatasetRetrievalOptions.DEFAULT); when(managedStoragePlugin.getDatasetHandle(any(), any(), any())).thenAnswer(invocation -> { Assert.assertEquals(invocation.getArgumentAt(2, DatasetRetrievalOptions.class).maxMetadataLeafColumns(), Integer.MAX_VALUE); return Optional.empty(); }); final PluginRetriever pluginRetriever = mock(PluginRetriever.class); when(pluginRetriever.getPlugin(namespaceKey.getRoot(), false)).thenReturn(managedStoragePlugin); final NamespaceService namespaceService = mock(NamespaceService.class); when(namespaceService.getDataset(namespaceKey)).thenReturn(datasetConfig); final OptionManager optionManager = mock(OptionManager.class); final DatasetManager datasetManager = new DatasetManager(pluginRetriever, namespaceService, optionManager); datasetManager.getTable(namespaceKey, metadataRequestOptions, true); } |
SourceMetadataManager implements AutoCloseable { UpdateStatus refreshDataset(NamespaceKey datasetKey, DatasetRetrievalOptions options) throws ConnectorException, NamespaceException { options.withFallback(bridge.getDefaultRetrievalOptions()); final NamespaceService namespace = bridge.getNamespaceService(); final DatasetSaver saver = getSaver(); DatasetConfig knownConfig = null; try { knownConfig = namespace.getDataset(datasetKey); } catch (NamespaceNotFoundException ignored) { } final DatasetConfig currentConfig = knownConfig; final boolean exists = currentConfig != null; final boolean isExtended = exists && currentConfig.getReadDefinition() != null; final EntityPath entityPath; if (exists) { entityPath = new EntityPath(currentConfig.getFullPathList()); } else { entityPath = MetadataObjectsUtils.toEntityPath(datasetKey); } logger.debug("Dataset '{}' is being synced (exists: {}, isExtended: {})", datasetKey, exists, isExtended); final SourceMetadata sourceMetadata = bridge.getMetadata(); final Optional<DatasetHandle> handle = sourceMetadata.getDatasetHandle(entityPath, options.asGetDatasetOptions(currentConfig)); if (!handle.isPresent()) { if (!exists) { throw new DatasetNotFoundException(entityPath); } if (!options.deleteUnavailableDatasets()) { logger.debug("Dataset '{}' unavailable, but not deleted", datasetKey); return UpdateStatus.UNCHANGED; } try { namespace.deleteDataset(datasetKey, currentConfig.getTag()); logger.trace("Dataset '{}' deleted", datasetKey); return UpdateStatus.DELETED; } catch (NamespaceException e) { logger.debug("Dataset '{}' delete failed", datasetKey, e); return UpdateStatus.UNCHANGED; } } final DatasetHandle datasetHandle = handle.get(); if (!options.forceUpdate() && exists && isExtended && sourceMetadata instanceof SupportsReadSignature) { final SupportsReadSignature supportsReadSignature = (SupportsReadSignature) sourceMetadata; final DatasetMetadata currentExtended = new DatasetMetadataAdapter(currentConfig); final ByteString readSignature = currentConfig.getReadDefinition().getReadSignature(); final MetadataValidity metadataValidity = supportsReadSignature.validateMetadata( readSignature == null ? BytesOutput.NONE : os -> ByteString.writeTo(os, readSignature), datasetHandle, currentExtended); if (metadataValidity == MetadataValidity.VALID) { logger.trace("Dataset '{}' metadata is valid, skipping", datasetKey); return UpdateStatus.UNCHANGED; } } final DatasetConfig datasetConfig; if (exists) { datasetConfig = currentConfig; } else { datasetConfig = MetadataObjectsUtils.newShallowConfig(datasetHandle); } saver.save(datasetConfig, datasetHandle, sourceMetadata, false, options); logger.trace("Dataset '{}' metadata saved to namespace", datasetKey); return UpdateStatus.CHANGED; } SourceMetadataManager(
NamespaceKey sourceName,
SchedulerService scheduler,
boolean isMaster,
LegacyKVStore<NamespaceKey, SourceInternalData> sourceDataStore,
final ManagedStoragePlugin.MetadataBridge bridge,
final OptionManager options,
final CatalogServiceMonitor monitor,
final Provider<MetadataRefreshInfoBroadcaster> broadcasterProvider
); void setMetadataSyncInfo(UpdateLastRefreshDateRequest request); @Override void close(); long getLastFullRefreshDateMs(); long getLastNamesRefreshDateMs(); } | @Test public void deleteUnavailableDataset() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())) .thenReturn( new DatasetConfig() .setTag("0") .setReadDefinition(new ReadDefinition()) .setFullPathList(ImmutableList.of("one", "two")) ); boolean[] deleted = new boolean[] {false}; doAnswer(invocation -> { deleted[0] = true; return null; }).when(ns).deleteDataset(any(), anyString()); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.empty()); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy().setDeleteUnavailableDatasets(false)); when(msp.getMaxMetadataColumns()) .thenReturn(MAX_COLUMNS); when(msp.getMaxNestedLevels()) .thenReturn(MAX_NESTED_LEVELS); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster); assertEquals(DatasetCatalog.UpdateStatus.DELETED, manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT)); assertTrue(deleted[0]); }
@Test public void doNotDeleteUnavailableDataset() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())) .thenReturn(new DatasetConfig() .setReadDefinition(new ReadDefinition()) .setFullPathList(ImmutableList.of("one", "two"))); doThrow(new IllegalStateException("should not invoke deleteDataset()")) .when(ns) .deleteDataset(any(), anyString()); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.empty()); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy().setDeleteUnavailableDatasets(false)); when(msp.getMaxMetadataColumns()) .thenReturn(MAX_COLUMNS); when(msp.getMaxNestedLevels()) .thenReturn(MAX_NESTED_LEVELS); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster); assertEquals(DatasetCatalog.UpdateStatus.UNCHANGED, manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT.toBuilder() .setDeleteUnavailableDatasets(false) .build())); }
@Test public void deleteUnavailableDatasetWithoutDefinition() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())) .thenReturn( new DatasetConfig() .setTag("0") .setFullPathList(ImmutableList.of("one", "two")) ); boolean[] deleted = new boolean[] {false}; doAnswer(invocation -> { deleted[0] = true; return null; }).when(ns).deleteDataset(any(), anyString()); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.empty()); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy().setDeleteUnavailableDatasets(false)); when(msp.getMaxMetadataColumns()) .thenReturn(MAX_COLUMNS); when(msp.getMaxNestedLevels()) .thenReturn(MAX_NESTED_LEVELS); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster); assertEquals(DatasetCatalog.UpdateStatus.DELETED, manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT)); assertTrue(deleted[0]); }
@Test public void doNotDeleteUnavailableDatasetWithoutDefinition() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())).thenReturn(new DatasetConfig() .setFullPathList(ImmutableList.of("one", "two"))); doThrow(new IllegalStateException("should not invoke deleteDataset()")) .when(ns) .deleteDataset(any(), anyString()); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.empty()); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy().setDeleteUnavailableDatasets(false)); when(msp.getMaxMetadataColumns()) .thenReturn(MAX_COLUMNS); when(msp.getMaxNestedLevels()) .thenReturn(MAX_NESTED_LEVELS); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster); assertEquals(DatasetCatalog.UpdateStatus.UNCHANGED, manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT.toBuilder() .setDeleteUnavailableDatasets(false) .build())); }
@Test public void checkForceUpdate() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())).thenReturn(null); DatasetMetadataSaver saver = mock(DatasetMetadataSaver.class); doNothing().when(saver).saveDataset(any(), anyBoolean(), any(), any()); when(ns.newDatasetMetadataSaver(any(), any(), any(), anyLong())) .thenReturn(saver); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); DatasetHandle handle = () -> new EntityPath(Lists.newArrayList("one")); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.of(handle)); when(sp.provideSignature(any(), any())) .thenReturn(BytesOutput.NONE); final boolean[] forced = new boolean[]{false}; doAnswer(invocation -> { forced[0] = true; return DatasetMetadata.of(DatasetStats.of(0, ScanCostFactor.OTHER.getFactor()), new Schema(new ArrayList<>())); }).when(sp).getDatasetMetadata(any(DatasetHandle.class), any(PartitionChunkListing.class), any(), any()); when(sp.listPartitionChunks(any(), any(), any())) .thenReturn(Collections::emptyIterator); when(sp.validateMetadata(any(), any(), any())) .thenReturn(SupportsReadSignature.MetadataValidity.VALID); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy().setDeleteUnavailableDatasets(false)); when(msp.getMaxMetadataColumns()) .thenReturn(MAX_COLUMNS); when(msp.getMaxNestedLevels()) .thenReturn(MAX_NESTED_LEVELS); when(msp.getNamespaceService()).thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster ); manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT.toBuilder() .setForceUpdate(true) .build()); assertTrue(forced[0]); }
@Test public void dataSetPathCaseSensitivity() throws Exception { final String qualifier = "inspector"; final String original = "testPath"; final String capital = "TESTPATH"; final ImmutableList<String> fullPathList = ImmutableList.of(qualifier, original); final EntityPath originalPath = new EntityPath(fullPathList); final EntityPath capitalPath = new EntityPath(ImmutableList.of(qualifier, capital)); final DatasetHandle datasetHandle = () -> originalPath; final NamespaceKey dataSetKey = new NamespaceKey(ImmutableList.of(qualifier, capital)); ExtendedStoragePlugin mockStoragePlugin = mock(ExtendedStoragePlugin.class); when(mockStoragePlugin.listDatasetHandles()) .thenReturn(Collections::emptyIterator); when(mockStoragePlugin.getDatasetHandle(eq(capitalPath), any(), any())) .thenReturn(Optional.empty()); when(mockStoragePlugin.getDatasetHandle(eq(originalPath), any(), any())) .thenReturn(Optional.of(datasetHandle)); when(mockStoragePlugin.getState()) .thenReturn(SourceState.GOOD); when(mockStoragePlugin.listPartitionChunks(any(), any(), any())) .thenReturn(Collections::emptyIterator); when(mockStoragePlugin.validateMetadata(any(), any(), any())) .thenReturn(SupportsReadSignature.MetadataValidity.VALID); when(mockStoragePlugin.provideSignature(any(), any())) .thenReturn(BytesOutput.NONE); final boolean[] forced = new boolean[]{false}; doAnswer(invocation -> { forced[0] = true; return DatasetMetadata.of(DatasetStats.of(0, ScanCostFactor.OTHER.getFactor()), new Schema(new ArrayList<>())); }).when(mockStoragePlugin).getDatasetMetadata(any(DatasetHandle.class), any(PartitionChunkListing.class), any(), any()); NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())) .thenReturn(MetadataObjectsUtils.newShallowConfig(datasetHandle)); DatasetMetadataSaver saver = mock(DatasetMetadataSaver.class); doNothing().when(saver).saveDataset(any(), anyBoolean(), any(), any()); when(ns.newDatasetMetadataSaver(any(), any(), any(), anyLong())) .thenReturn(saver); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(mockStoragePlugin); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy()); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( dataSetKey, mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster ); assertEquals(DatasetCatalog.UpdateStatus.CHANGED, manager.refreshDataset(dataSetKey, DatasetRetrievalOptions.DEFAULT.toBuilder() .build()) ); }
@Test public void exceedMaxColumnLimit() throws Exception { NamespaceService ns = mock(NamespaceService.class); when(ns.getDataset(any())) .thenReturn(null); ExtendedStoragePlugin sp = mock(ExtendedStoragePlugin.class); DatasetHandle handle = () -> new EntityPath(Lists.newArrayList("one")); when(sp.getDatasetHandle(any(), any(), any())) .thenReturn(Optional.of(handle)); when(sp.listPartitionChunks(any(), any(), any())) .thenReturn(Collections::emptyIterator); when(sp.validateMetadata(any(), eq(handle), any())) .thenReturn(SupportsReadSignature.MetadataValidity.INVALID); doThrow(new ColumnCountTooLargeException(1)) .when(sp) .getDatasetMetadata(eq(handle), any(PartitionChunkListing.class), any(), any()); ManagedStoragePlugin.MetadataBridge msp = mock(ManagedStoragePlugin.MetadataBridge.class); when(msp.getMetadata()) .thenReturn(sp); when(msp.getMetadataPolicy()) .thenReturn(new MetadataPolicy()); when(msp.getNamespaceService()) .thenReturn(ns); SourceMetadataManager manager = new SourceMetadataManager( new NamespaceKey("joker"), mock(SchedulerService.class), true, mock(LegacyKVStore.class), msp, optionManager, CatalogServiceMonitor.DEFAULT, () -> broadcaster ); thrownException.expect(new UserExceptionMatcher(UserBitShared.DremioPBError.ErrorType.VALIDATION, "exceeded the maximum number of fields of 1")); manager.refreshDataset(new NamespaceKey(""), DatasetRetrievalOptions.DEFAULT.toBuilder() .setForceUpdate(true) .setMaxMetadataLeafColumns(1) .build()); } |
ExpressionBase { public final <T> T accept(ExpressionVisitor<T> visitor) throws VisitorException { return acceptor.accept(visitor, this); } final T accept(ExpressionVisitor<T> visitor); Expression wrap(); @Override String toString(); static ExpressionBase unwrap(Expression t); static Converter<ExpressionBase, Expression> converter(); static final Acceptor<ExpressionBase, ExpressionVisitor<?>, Expression> acceptor; } | @Test public void testVisitor() { ExpressionBase exp = new ExpCalculatedField("foo"); String name = exp.accept(new ExpressionBase.ExpressionVisitor<String>() { @Override public String visit(ExpColumnReference col) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpConvertCase changeCase) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpExtract extract) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpTrim trim) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpCalculatedField calculatedField) throws Exception { return "calc"; } @Override public String visit(ExpFieldTransformation fieldTransformation) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpConvertType convertType) throws Exception { throw new UnsupportedOperationException("NYI"); } @Override public String visit(ExpMeasure measure) throws Exception { throw new UnsupportedOperationException("NYI"); } }); assertEquals("calc", name); } |
NamespaceListing implements DatasetHandleListing { @VisibleForTesting Iterator<DatasetHandle> newIterator(Iterator<NamespaceKey> keyIterator) { return new TransformingIterator(keyIterator); } NamespaceListing(
NamespaceService namespaceService,
NamespaceKey sourceKey,
SourceMetadata sourceMetadata,
DatasetRetrievalOptions options
); @Override Iterator<? extends DatasetHandle> iterator(); } | @Test public void emptyIterator() { final NamespaceListing listing = new NamespaceListing(null, null, null, null); try { listing.newIterator(Collections.emptyIterator()).next(); fail(); } catch (NoSuchElementException expected) { } assertFalse(listing.newIterator(Collections.emptyIterator()).hasNext()); } |
CatalogServiceImpl implements CatalogService { @VisibleForTesting public boolean refreshSource(NamespaceKey source, MetadataPolicy metadataPolicy, UpdateType updateType) throws NamespaceException { ManagedStoragePlugin plugin = getPlugins().get(source.getRoot()); if (plugin == null){ throw UserException.validationError().message("Unknown source %s", source.getRoot()).build(logger); } else if (MissingPluginConf.TYPE.equals(plugin.getConfig().getType())) { return false; } return plugin.refresh(updateType, metadataPolicy); } CatalogServiceImpl(
Provider<SabotContext> context,
Provider<SchedulerService> scheduler,
Provider<? extends Provider<ConnectionConf<?, ?>>> sysTableConfProvider,
Provider<FabricService> fabric,
Provider<ConnectionReader> connectionReaderProvider,
Provider<BufferAllocator> bufferAllocator,
Provider<LegacyKVStoreProvider> kvStoreProvider,
Provider<DatasetListingService> datasetListingService,
Provider<OptionManager> optionManager,
Provider<MetadataRefreshInfoBroadcaster> broadcasterProvider,
DremioConfig config,
EnumSet<Role> roles
); @VisibleForTesting CatalogServiceImpl(
Provider<SabotContext> context,
Provider<SchedulerService> scheduler,
Provider<? extends Provider<ConnectionConf<?, ?>>> sysTableConfProvider,
Provider<FabricService> fabric,
Provider<ConnectionReader> connectionReaderProvider,
Provider<BufferAllocator> bufferAllocator,
Provider<LegacyKVStoreProvider> kvStoreProvider,
Provider<DatasetListingService> datasetListingService,
Provider<OptionManager> optionManager,
Provider<MetadataRefreshInfoBroadcaster> broadcasterProvider,
DremioConfig config,
EnumSet<Role> roles,
final CatalogServiceMonitor monitor
); @Override void start(); @VisibleForTesting void deleteExcept(Set<String> rootsToSaveSet); @VisibleForTesting boolean refreshSource(NamespaceKey source, MetadataPolicy metadataPolicy, UpdateType updateType); @VisibleForTesting void synchronizeSources(); @Override void close(); boolean createSourceIfMissingWithThrow(SourceConfig config); @VisibleForTesting void deleteSource(String name); @VisibleForTesting ManagedStoragePlugin getManagedSource(String name); @SuppressWarnings("unchecked") @Override T getSource(StoragePluginId pluginId); @Override SourceState getSourceState(String name); @SuppressWarnings("unchecked") @Override T getSource(String name); @Override Catalog getCatalog(MetadataRequestOptions requestOptions); @Override boolean isSourceConfigMetadataImpacting(SourceConfig config); @Override RuleSet getStorageRules(OptimizerRulesContext context, PlannerPhase phase); @VisibleForTesting Catalog getSystemUserCatalog(); static final long CATALOG_SYNC; static final String CATALOG_SOURCE_DATA_NAMESPACE; } | @Test public void refreshSourceMetadata_EmptySource() throws Exception { doMockDatasets(mockUpPlugin, ImmutableList.of()); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); List<NamespaceKey> datasets = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(0, datasets.size()); assertNoDatasetsAfterSourceDeletion(); }
@Test public void refreshSourceMetadata_FirstTime() throws Exception { doMockDatasets(mockUpPlugin, mockDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); List<NamespaceKey> actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(5, actualDatasetKeys.size()); assertDatasetsAreEqual(mockDatasets, actualDatasetKeys); assertFoldersExist(Lists.newArrayList(MOCK_UP + ".fld1", MOCK_UP + ".fld2", MOCK_UP + ".fld2.fld21")); assertDatasetSchemasDefined(actualDatasetKeys); assertNoDatasetsAfterSourceDeletion(); }
@Test public void refreshSourceMetadata_FirstTime_UpdateWithNewDatasets() throws Exception { doMockDatasets(mockUpPlugin, mockDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); List<NamespaceKey> actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(5, actualDatasetKeys.size()); List<DatasetHandle> testDatasets = Lists.newArrayList(mockDatasets); testDatasets.add(newDataset(MOCK_UP + ".ds4")); testDatasets.add(newDataset(MOCK_UP + ".fld1.ds13")); testDatasets.add(newDataset(MOCK_UP + ".fld2.fld21.ds212")); testDatasets.add(newDataset(MOCK_UP + ".fld5.ds51")); doMockDatasets(mockUpPlugin, testDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(9, actualDatasetKeys.size()); assertDatasetsAreEqual(testDatasets, actualDatasetKeys); assertFoldersExist(Lists.newArrayList(MOCK_UP + ".fld1", MOCK_UP + ".fld2", MOCK_UP + ".fld2.fld21", MOCK_UP + ".fld5")); assertDatasetSchemasDefined(actualDatasetKeys); assertNoDatasetsAfterSourceDeletion(); }
@Test public void refreshSourceMetadata_FirstTime_MultipleUpdatesWithNewDatasetsDeletedDatasets() throws Exception { doMockDatasets(mockUpPlugin, mockDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); List<DatasetHandle> testDatasets = Lists.newArrayList(); testDatasets.add(newDataset(MOCK_UP + ".fld1.ds11")); testDatasets.add(newDataset(MOCK_UP + ".fld2.fld22.ds222")); testDatasets.add(newDataset(MOCK_UP + ".fld2.ds22")); testDatasets.add(newDataset(MOCK_UP + ".ds4")); testDatasets.add(newDataset(MOCK_UP + ".fld5.ds51")); doMockDatasets(mockUpPlugin, testDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); List<NamespaceKey> actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(5, actualDatasetKeys.size()); assertDatasetsAreEqual(testDatasets, actualDatasetKeys); assertFoldersExist(Lists.newArrayList(MOCK_UP + ".fld1", MOCK_UP + ".fld2", MOCK_UP + ".fld2.fld22", MOCK_UP + ".fld5")); assertFoldersDoNotExist(Lists.newArrayList(MOCK_UP + ".fld2.fld21")); assertDatasetSchemasDefined(actualDatasetKeys); testDatasets = Lists.newArrayList(); testDatasets.add(newDataset(MOCK_UP + ".fld1.ds11")); testDatasets.add(newDataset(MOCK_UP + ".fld2.ds22")); testDatasets.add(newDataset(MOCK_UP + ".fld2.ds23")); testDatasets.add(newDataset(MOCK_UP + ".ds5")); testDatasets.add(newDataset(MOCK_UP + ".fld5.ds51")); testDatasets.add(newDataset(MOCK_UP + ".fld5.ds52")); testDatasets.add(newDataset(MOCK_UP + ".fld6.ds61")); doMockDatasets(mockUpPlugin, testDatasets); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); catalogService.refreshSource(mockUpKey, CatalogService.REFRESH_EVERYTHING_NOW, CatalogServiceImpl.UpdateType.FULL); actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(7, actualDatasetKeys.size()); assertDatasetsAreEqual(testDatasets, actualDatasetKeys); assertFoldersExist(Lists.newArrayList(MOCK_UP + ".fld1", MOCK_UP + ".fld2", MOCK_UP + ".fld5", MOCK_UP + ".fld6")); assertFoldersDoNotExist(Lists.newArrayList((MOCK_UP + ".fld2.fld22"))); assertDatasetSchemasDefined(actualDatasetKeys); assertNoDatasetsAfterSourceDeletion(); }
@Test public void refreshSourceNames() throws Exception { doMockDatasets(mockUpPlugin, mockDatasets); catalogService.refreshSource(mockUpKey, CatalogService.DEFAULT_METADATA_POLICY, CatalogServiceImpl.UpdateType.NAMES); assertEquals(5, Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)).size()); List<DatasetHandle> testDatasets = Lists.newArrayList(mockDatasets); testDatasets.add(newDataset(MOCK_UP + ".fld1.ds13")); testDatasets.add(newDataset(MOCK_UP + ".fld2.fld21.ds212")); testDatasets.add(newDataset(MOCK_UP + ".fld2.ds23")); testDatasets.add(newDataset(MOCK_UP + ".ds4")); testDatasets.add(newDataset(MOCK_UP + ".fld5.ds51")); doMockDatasets(mockUpPlugin, testDatasets); catalogService.refreshSource(mockUpKey, CatalogService.DEFAULT_METADATA_POLICY, CatalogServiceImpl.UpdateType.NAMES); List<NamespaceKey> actualDatasetKeys = Lists.newArrayList(namespaceService.getAllDatasets(mockUpKey)); assertEquals(10, actualDatasetKeys.size()); assertDatasetsAreEqual(testDatasets, actualDatasetKeys); assertFoldersExist(Lists.newArrayList(MOCK_UP + ".fld1", MOCK_UP + ".fld2", MOCK_UP + ".fld2.fld21", MOCK_UP + ".fld5")); assertDatasetSchemasNotDefined(actualDatasetKeys); assertNoDatasetsAfterSourceDeletion(); } |
DayOfWeekFromSundayDateTimeField extends PreciseDurationDateTimeField { @Override public int get(long instant) { return map(chronology.dayOfWeek().get(instant)); } DayOfWeekFromSundayDateTimeField(Chronology chronology, DurationField days); @Override int get(long instant); @Override String getAsText(int fieldValue, Locale locale); @Override String getAsShortText(int fieldValue, Locale locale); @Override DurationField getRangeDurationField(); @Override int getMinimumValue(); @Override int getMaximumValue(); @Override int getMaximumTextLength(Locale locale); @Override int getMaximumShortTextLength(Locale locale); @Override String toString(); } | @Test public void get() { assertEquals(instance.get(1526173261000L), 1); assertEquals(instance.get(1526259661000L), 2); assertEquals(instance.get(1526086861000L), 7); } |
DayOfWeekFromSundayDateTimeField extends PreciseDurationDateTimeField { @Override public String getAsText(int fieldValue, Locale locale) { return chronology.dayOfWeek().getAsText(reverse(fieldValue), locale); } DayOfWeekFromSundayDateTimeField(Chronology chronology, DurationField days); @Override int get(long instant); @Override String getAsText(int fieldValue, Locale locale); @Override String getAsShortText(int fieldValue, Locale locale); @Override DurationField getRangeDurationField(); @Override int getMinimumValue(); @Override int getMaximumValue(); @Override int getMaximumTextLength(Locale locale); @Override int getMaximumShortTextLength(Locale locale); @Override String toString(); } | @Test public void getAsText() { assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1526173261000L))); assertTrue("Monday".equalsIgnoreCase(instance.getAsText(1526259661000L))); assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(1526086861000L))); }
@Test public void getAsTextFieldValue() { assertTrue("Sunday".equalsIgnoreCase(instance.getAsText(1, Locale.getDefault()))); assertTrue("Monday".equalsIgnoreCase(instance.getAsText(2, Locale.getDefault()))); assertTrue("Saturday".equalsIgnoreCase(instance.getAsText(7, Locale.getDefault()))); } |
DayOfWeekFromSundayDateTimeField extends PreciseDurationDateTimeField { @Override public String getAsShortText(int fieldValue, Locale locale) { return chronology.dayOfWeek().getAsShortText(reverse(fieldValue), locale); } DayOfWeekFromSundayDateTimeField(Chronology chronology, DurationField days); @Override int get(long instant); @Override String getAsText(int fieldValue, Locale locale); @Override String getAsShortText(int fieldValue, Locale locale); @Override DurationField getRangeDurationField(); @Override int getMinimumValue(); @Override int getMaximumValue(); @Override int getMaximumTextLength(Locale locale); @Override int getMaximumShortTextLength(Locale locale); @Override String toString(); } | @Test public void getAsShortText() { assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1526173261000L))); assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(1526259661000L))); assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(1526086861000L))); }
@Test public void getAsShortTextFieldValue() { assertTrue("Sun".equalsIgnoreCase(instance.getAsShortText(1, Locale.getDefault()))); assertTrue("Mon".equalsIgnoreCase(instance.getAsShortText(2, Locale.getDefault()))); assertTrue("Sat".equalsIgnoreCase(instance.getAsShortText(7, Locale.getDefault()))); } |
ExtractListRecommender extends Recommender<ExtractListRule, Selection> { @Override public List<ExtractListRule> getRules(Selection selection, DataType selColType) { checkArgument(selColType == DataType.LIST, "Extract list items is supported only on LIST type columns"); JsonSelection jsonSelection; try { jsonSelection = new JSONElementLocator(selection.getCellText()).locate(selection.getOffset(), selection.getOffset() + selection.getLength()); } catch (IOException e) { throw new ClientErrorException(String.format("invalid JSON: %s", selection.getCellText()), e); } ArrayJsonPathElement start = extractArrayIndex(jsonSelection.getStart()); ArrayJsonPathElement end = extractArrayIndex(jsonSelection.getEnd()); List<ExtractListRule> rules = new ArrayList<>(); if (start == end) { rules.add(new ExtractListRule(single).setSingle(new ExtractRuleSingle(start.getPosition()))); } else { ListSelection[] selections = { new ListSelection(fromTheStart(start), fromTheStart(end)), new ListSelection(fromTheStart(start), fromTheEnd(end)), new ListSelection(fromTheEnd(start), fromTheStart(end)), new ListSelection(fromTheEnd(start), fromTheEnd(end)) }; for (ListSelection listSelection : selections) { rules.add((new ExtractListRule(multiple) .setMultiple( new ExtractRuleMultiple(listSelection) ))); } } return rules; } @Override List<ExtractListRule> getRules(Selection selection, DataType selColType); @Override TransformRuleWrapper<ExtractListRule> wrapRule(ExtractListRule rule); } | @Test public void ruleSuggestionsSingleElement() throws Exception { List<ExtractListRule> rules = recommender.getRules(new Selection("foo", "[ \"foo\", \"bar\", \"baz\" ]", 3, 3), DataType.LIST); assertEquals(1, rules.size()); assertEquals(ExtractListRuleType.single, rules.get(0).getType()); assertEquals(0, rules.get(0).getSingle().getIndex().intValue()); }
@Test public void ruleSuggestionsMultiElement() throws Exception { List<ExtractListRule> rules = recommender.getRules(new Selection("foo", "[ \"foo\", \"bar\", \"baz\" ]", 3, 10), DataType.LIST); assertEquals(4, rules.size()); compare(new Offset(0, FROM_THE_START), new Offset(1, FROM_THE_START), rules.get(0)); compare(new Offset(0, FROM_THE_START), new Offset(1, FROM_THE_END), rules.get(1)); compare(new Offset(2, FROM_THE_END), new Offset(1, FROM_THE_START), rules.get(2)); compare(new Offset(2, FROM_THE_END), new Offset(1, FROM_THE_END), rules.get(3)); } |
MorePosixFilePermissions { public static Set<PosixFilePermission> fromOctalMode(int mode) { Preconditions.checkArgument(0 <= mode && mode <= MAX_MODE, "mode should be between 0 and 0777"); final Set<PosixFilePermission> result = EnumSet.noneOf(PosixFilePermission.class); int mask = 1 << (PERMISSIONS_LENGTH - 1); for (PosixFilePermission permission: PERMISSIONS) { if ((mode & mask) != 0) { result.add(permission); } mask = mask >> 1; } return result; } private MorePosixFilePermissions(); static Set<PosixFilePermission> fromOctalMode(int mode); static Set<PosixFilePermission> fromOctalMode(String mode); } | @Test public void testFromOctalModeWithIllegalMode() { assertFails(() -> MorePosixFilePermissions.fromOctalMode(-1)); assertFails(() -> MorePosixFilePermissions.fromOctalMode(512)); assertFails(() -> MorePosixFilePermissions.fromOctalMode(Integer.MIN_VALUE)); assertFails(() -> MorePosixFilePermissions.fromOctalMode(Integer.MAX_VALUE)); assertFails(() -> MorePosixFilePermissions.fromOctalMode("-1")); assertFails(() -> MorePosixFilePermissions.fromOctalMode("8")); assertFails(() -> MorePosixFilePermissions.fromOctalMode("")); assertFails(() -> MorePosixFilePermissions.fromOctalMode("foo")); } |
Path implements Comparable<Path> { public static Path of(URI uri) { return new Path(uri); } private Path(URI uri); static Path of(URI uri); static Path of(String path); static Path mergePaths(Path path1, Path path2); static Path withoutSchemeAndAuthority(Path path); String getName(); Path getParent(); Path resolve(Path path); Path resolve(String path); boolean isAbsolute(); int depth(); URI toURI(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object obj); @Override int compareTo(Path that); static URI toURI(String uri); static String toString(Path path); static final String SEPARATOR; static final char SEPARATOR_CHAR; } | @Test public void testParent() { checkParent(Path.of("/foo/bar"), Path.of("/foo")); checkParent(Path.of("/foo"), Path.of("/")); checkParent(Path.of("/"), null); checkParent(Path.of("foo/bar"), Path.of("foo")); checkParent(Path.of("foo"), Path.of(".")); }
@Test public void testResolveOfPath() { checkResolveOfPath(Path.of("hdfs: checkResolveOfPath(Path.of("hdfs: checkResolveOfPath(Path.of("hdfs: checkResolveOfPath(Path.of("hdfs: checkResolveOfPath(Path.of("."), Path.of("foo"), Path.of("foo")); checkResolveOfPath(Path.of("foo"), Path.of("."), Path.of("foo")); checkResolveOfPath(Path.of("/"), Path.of("."), Path.of("/")); }
@Test public void testResolveOfString() { checkResolveOfString(Path.of("hdfs: checkResolveOfString(Path.of("hdfs: checkResolveOfString(Path.of("hdfs: checkResolveOfString(Path.of("hdfs: checkResolveOfString(Path.of("."), "foo", Path.of("foo")); checkResolveOfString(Path.of("foo"), ".", Path.of("foo")); checkResolveOfString(Path.of("/foo"), ".", Path.of("/foo")); checkResolveOfString(Path.of("/"), ".", Path.of("/")); } |
Path implements Comparable<Path> { public static Path mergePaths(Path path1, Path path2) { final String path1Path = path1.uri.getPath(); final String path2Path = path2.uri.getPath(); if (path2Path.isEmpty()) { return path1; } final StringBuilder finalPath = new StringBuilder(path1Path.length() + path2Path.length() + 1); finalPath.append(path1Path); if (!path1Path.isEmpty() && path1Path.charAt(path1Path.length() - 1) != SEPARATOR_CHAR) { finalPath.append(SEPARATOR_CHAR); } if (path2Path.charAt(0) != SEPARATOR_CHAR) { finalPath.append(path2Path); } else { finalPath.append(path2Path.substring(1)); } try { return of(new URI(path1.uri.getScheme(), path1.uri.getAuthority(), finalPath.toString(), null, null)); } catch (URISyntaxException e) { throw new IllegalArgumentException(); } } private Path(URI uri); static Path of(URI uri); static Path of(String path); static Path mergePaths(Path path1, Path path2); static Path withoutSchemeAndAuthority(Path path); String getName(); Path getParent(); Path resolve(Path path); Path resolve(String path); boolean isAbsolute(); int depth(); URI toURI(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object obj); @Override int compareTo(Path that); static URI toURI(String uri); static String toString(Path path); static final String SEPARATOR; static final char SEPARATOR_CHAR; } | @Test public void testMergePaths() { checkMergePaths(Path.of("hdfs: checkMergePaths(Path.of("hdfs: checkMergePaths(Path.of("hdfs: checkMergePaths(Path.of("hdfs: checkMergePaths(Path.of("."), Path.of("foo"), Path.of("foo")); checkMergePaths(Path.of("foo"), Path.of("."), Path.of("foo")); checkMergePaths(Path.of("/"), Path.of("."), Path.of("/")); } |
Path implements Comparable<Path> { public String getName() { final String path = uri.getPath(); int index = path.lastIndexOf(SEPARATOR_CHAR); return path.substring(index + 1); } private Path(URI uri); static Path of(URI uri); static Path of(String path); static Path mergePaths(Path path1, Path path2); static Path withoutSchemeAndAuthority(Path path); String getName(); Path getParent(); Path resolve(Path path); Path resolve(String path); boolean isAbsolute(); int depth(); URI toURI(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object obj); @Override int compareTo(Path that); static URI toURI(String uri); static String toString(Path path); static final String SEPARATOR; static final char SEPARATOR_CHAR; } | @Test public void testGetName() { checkGetName(Path.of("/foo/bar"), "bar"); checkGetName(Path.of("/foo/bar baz"), "bar baz"); checkGetName(Path.of(" checkGetName(Path.of("foo/bar"), "bar"); checkGetName(Path.of("hdfs: checkGetName(Path.of("hdfs: checkGetName(Path.of("file:/foo/bar baz"), "bar baz"); checkGetName(Path.of("webhdfs: } |
Path implements Comparable<Path> { public boolean isAbsolute() { return uri.getPath().startsWith(SEPARATOR); } private Path(URI uri); static Path of(URI uri); static Path of(String path); static Path mergePaths(Path path1, Path path2); static Path withoutSchemeAndAuthority(Path path); String getName(); Path getParent(); Path resolve(Path path); Path resolve(String path); boolean isAbsolute(); int depth(); URI toURI(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object obj); @Override int compareTo(Path that); static URI toURI(String uri); static String toString(Path path); static final String SEPARATOR; static final char SEPARATOR_CHAR; } | @Test public void testIsAbsolute() { checkIsAbsolute(Path.of("/"), true); checkIsAbsolute(Path.of("/foo"), true); checkIsAbsolute(Path.of("/foo/bar"), true); checkIsAbsolute(Path.of("foo"), false); checkIsAbsolute(Path.of("foo/bar"), false); checkIsAbsolute(Path.of(URI.create("")), false); checkIsAbsolute(Path.of("."), false); } |
Path implements Comparable<Path> { public int depth() { final String path = uri.getPath(); if (path.charAt(0) == SEPARATOR_CHAR && path.length() == 1) { return 0; } int depth = 0; for (int i = 0 ; i < path.length(); i++) { if (path.charAt(i) == SEPARATOR_CHAR) { depth++; } } return depth; } private Path(URI uri); static Path of(URI uri); static Path of(String path); static Path mergePaths(Path path1, Path path2); static Path withoutSchemeAndAuthority(Path path); String getName(); Path getParent(); Path resolve(Path path); Path resolve(String path); boolean isAbsolute(); int depth(); URI toURI(); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object obj); @Override int compareTo(Path that); static URI toURI(String uri); static String toString(Path path); static final String SEPARATOR; static final char SEPARATOR_CHAR; } | @Test public void testDepth() { checkDepth(Path.of("/"), 0); checkDepth(Path.of("/foo"), 1); checkDepth(Path.of("/foo/bar"), 2); checkDepth(Path.of("foo"), 0); } |
ServiceRegistry implements Service { public <T extends Service> T replace(@Nullable T service) { if (service == null) { return null; } final Service toReplace = wrapService(service); for(ListIterator<Service> it = services.listIterator(); it.hasNext(); ) { Service s = it.next(); if (toReplace.equals(s)) { it.remove(); try { s.close(); } catch (Exception e) { logger.warn("Exception when closing service {}", s, e); } it.add(toReplace); return service; } } throw new IllegalArgumentException("Trying to replace an unregistered service"); } ServiceRegistry(); @VisibleForTesting ServiceRegistry(boolean timerEnabled); T register(@Nullable T service); T replace(@Nullable T service); @Override void start(); @Override synchronized void close(); } | @Test public void testReplace() throws Exception { doTestReplace(false); } |
DremioConfig extends NestedConfig { public static DremioConfig create() { return create(null); } private DremioConfig(SabotConfig sabot, Config unresolved, Config reference, String thisNode); @Override DremioConfig withValue(String path, ConfigValue value); DremioConfig withSabotValue(String path, ConfigValue value); DremioConfig withSabotValue(String path, Object value); SabotConfig getSabotConfig(); boolean isMasterlessEnabled(); DremioConfig withValue(String path, Object value); URI getURI(String path); static DremioConfig create(); static DremioConfig create(final URL userConfigPath); static DremioConfig create(final URL userConfigPath, SabotConfig sabotConfig); String getThisNode(); static Path getPluginsRootPath(); static final String LOCAL_WRITE_PATH_STRING; static final String DIST_WRITE_PATH_STRING; static final String NODE_TAG; static final String ENABLE_COORDINATOR_BOOL; static final String ENABLE_MASTER_BOOL; static final String ENABLE_EXECUTOR_BOOL; static final String EMBEDDED_MASTER_ZK_ENABLED_BOOL; static final String EMBEDDED_MASTER_ZK_ENABLED_PORT_INT; static final String EMBEDDED_MASTER_ZK_ENABLED_PATH_STRING; static final String WEB_ENABLED_BOOL; static final String WEB_AUTH_TYPE; static final String WEB_PORT_INT; static final String WEB_TOKEN_CACHE_SIZE; static final String SCHEDULER_SERVICE_THREAD_COUNT; static final String WEB_TOKEN_CACHE_EXPIRATION; static final String TASK_ON_IDLE_LOAD_SHED; static final String TASK_RESCHEDULE_ON_UNBLOCK; static final String TASK_EAGER_LOAD_SHED; static final String KERBEROS_PRINCIPAL; static final String KERBEROS_KEYTAB_PATH; static final String JOBS_ENABLED_BOOL; static final String NO_OP_CLUSTER_COORDINATOR_ENABLED; static final String WEB_UI_SERVICE_CONFIG; static final String PLUGINS_ROOT_PATH_PROPERTY; static final String LEGACY_STORE_VIEWS_ENABLED; static final String CLIENT_PORT_INT; static final String SERVER_PORT_INT; static final String CONDUIT_PORT_INT; static final String AUTOUPGRADE; static final String REGISTRATION_ADDRESS; static final String DB_PATH_STRING; static final String ACCELERATOR_PATH_STRING; static final String DOWNLOADS_PATH_STRING; static final String UPLOADS_PATH_STRING; static final String RESULTS_PATH_STRING; static final String SCRATCH_PATH_STRING; static final String SPILLING_PATH_STRING; static final String ZOOKEEPER_QUORUM; static final String ZK_CLIENT_SESSION_TIMEOUT; static final String ZK_CLIENT_RETRY_UNLIMITED; static final String ZK_CLIENT_RETRY_LIMIT; static final String ZK_CLIENT_INITIAL_TIMEOUT_MS; static final String YARN_ENABLED_BOOL; static final String YARN_JVM_OPTIONS; static final String YARN_CLASSPATH; static final String YARN_APP_CLASSPATH; static final String YARN_APP_CLASSPATH_PREFIX; static final String EC2_EFS_MOUNT_TARGET_IP_ADDRESS; static final String MIGRATION_ENABLED; static final String NETTY_REFLECTIONS_ACCESSIBLE; static final String DEBUG_OPTIONS; static final String DEBUG_YARN_ENABLED; static final String DEBUG_ENABLED_BOOL; static final String DEBUG_PREPOPULATE_BOOL; static final String DEBUG_AUTOPORT_BOOL; static final String DEBUG_SINGLE_NODE_BOOL; static final String DEBUG_ALLOW_TEST_APIS_BOOL; static final String DEBUG_USE_MEMORY_STRORAGE_BOOL; static final String DEBUG_FORCE_REMOTE_BOOL; static final String DEBUG_ADD_DEFAULT_USER; static final String DEBUG_ALLOW_NEWER_KVSTORE; static final String DEBUG_DISABLE_MASTER_ELECTION_SERVICE_BOOL; static final String DEBUG_DIST_ASYNC_ENABLED; static final String DEBUG_DIST_CACHING_ENABLED; static final String DEBUG_DIST_MAX_CACHE_SPACE_PERCENT; static final String DEBUG_UPLOADS_ASYNC_ENABLED; static final String DEBUG_SUPPORT_ASYNC_ENABLED; static final String DEBUG_JOBS_ASYNC_ENABLED; static final String DEBUG_SCRATCH_ASYNC_ENABLED; static final String DEBUG_DOWNLOAD_ASYNC_ENABLED; static final String DEBUG_LOGS_ASYNC_ENABLED; static final String DEBUG_DIST_S3_FILE_STATUS_CHECK; static final String FABRIC_MEMORY_RESERVATION; static final String SSL_ENABLED; static final String SSL_KEY_STORE_TYPE; static final String SSL_KEY_STORE_PATH; static final String SSL_KEY_STORE_PASSWORD; static final String SSL_KEY_PASSWORD; static final String SSL_TRUST_STORE_TYPE; static final String SSL_TRUST_STORE_PATH; static final String SSL_TRUST_STORE_PASSWORD; static final String SSL_DISABLE_HOST_VERIFICATION; static final String SSL_AUTO_GENERATED_CERTIFICATE; static final String WEB_SSL_PREFIX; static final String DATASTORE_TYPE; static final String DATASTORE_CONFIG; static final String LIVENESS_ENABLED; static final String LIVENESS_PORT; static final String POLL_TIMEOUT_MS; static final String POLL_INTERVAL_MS; static final String MISSED_POLLS_BEFORE_KILL; static final String MAX_KILL_ATTEMPTS; static final String KILL_REATTEMPT_INTERVAL_MS; static final String REMOTE_DATASTORE_RPC_TIMEOUT_SECS; } | @Test public void initialize() { DremioConfig config = DremioConfig.create(); }
@Test public void fileOverride() { properties.clear(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_BOOL); properties.clear(DremioConfig.LOCAL_WRITE_PATH_STRING); properties.clear(DremioConfig.DB_PATH_STRING); properties.clear("dremd.write"); DremioConfig config = DremioConfig.create(getClass().getResource("/test-dremio.conf")); assertEquals(false, config.getBoolean(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_BOOL)); assertEquals("/tmp/crazydir/db", config.getString(DremioConfig.DB_PATH_STRING)); assertEquals("pdfs: }
@Test public void distOverride() { properties.clear(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_BOOL); properties.clear(DremioConfig.LOCAL_WRITE_PATH_STRING); properties.clear(DremioConfig.DB_PATH_STRING); properties.clear("dremd.write"); DremioConfig config = DremioConfig.create(getClass().getResource("/test-dremio4.conf")); assertEquals("/tmp/foobar/db", config.getString(DremioConfig.DB_PATH_STRING)); assertEquals("pdfs: assertEquals("pdfs: }
@Test public void systemOverFile(){ final String path = "my.fave.path"; properties.set(DremioConfig.DB_PATH_STRING, path); DremioConfig config = DremioConfig.create(getClass().getResource("/test-dremio.conf")); assertEquals(path, config.getString(DremioConfig.DB_PATH_STRING)); }
@Test public void arrayProperties() throws Exception { DremioConfig config = DremioConfig.create(getClass().getResource("/test-dremio3.conf")); String property = (config.getStringList(DremioConfig.SPILLING_PATH_STRING)).toString(); final String path = property; properties.set(DremioConfig.SPILLING_PATH_STRING, path); DremioConfig configNew = DremioConfig.create(getClass().getResource("/test-dremio.conf")); assertEquals(path, configNew.getStringList(DremioConfig.SPILLING_PATH_STRING).toString()); assertEquals(config.getStringList(DremioConfig.SPILLING_PATH_STRING), configNew.getStringList(DremioConfig .SPILLING_PATH_STRING)); }
@Test public void badProperty() { exception.expect(RuntimeException.class); exception.expectMessage("mistyped-property"); @SuppressWarnings("unused") DremioConfig config = DremioConfig.create(getClass().getResource("/test-dremio2.conf")); }
@Test public void legacySystemProp() { String name = "dremio_autoPort"; properties.set(name, "true"); assertEquals(true, DremioConfig.create().getBoolean(DremioConfig.DEBUG_AUTOPORT_BOOL)); }
@Test public void newSystemProp() { String name = DremioConfig.DEBUG_AUTOPORT_BOOL; properties.set(name, "false"); assertEquals(false, DremioConfig.create().getBoolean(name)); properties.set(name, "true"); assertEquals(true, DremioConfig.create().getBoolean(name)); }
@Test public void newSystemPropWithDependency() { String name = DremioConfig.LOCAL_WRITE_PATH_STRING; properties.set(name, "my.special.path"); assertEquals("my.special.path/db", DremioConfig.create().getString(DremioConfig.DB_PATH_STRING)); } |
ExtractListRecommender extends Recommender<ExtractListRule, Selection> { @Override public TransformRuleWrapper<ExtractListRule> wrapRule(ExtractListRule rule) { switch (rule.getType()) { case single: return new ExtractListSingleTransformRuleWrapper(rule); case multiple: return new ExtractListMultipleTransformRuleWrapper(rule); default: throw UserException.unsupportedError() .message("Unsupported list extract type: " + rule.getType()) .build(logger); } } @Override List<ExtractListRule> getRules(Selection selection, DataType selColType); @Override TransformRuleWrapper<ExtractListRule> wrapRule(ExtractListRule rule); } | @Test public void testExtractListCardGen() throws Exception { { ExtractListRule rule = new ExtractRuleSingle(0).wrap(); TransformRuleWrapper<ExtractListRule> wrapper = recommender.wrapRule(rule); assertEquals("\"tbl name\".foo[0] IS NOT NULL", wrapper.getMatchFunctionExpr("\"tbl name\".foo")); assertEquals("\"tbl name\".foo[0]", wrapper.getFunctionExpr("\"tbl name\".foo")); assertEquals("Element: 0", wrapper.describe()); validate(dataFile, wrapper, new Object[0], list((Object)"aa", "ddd", "fffff", null), list(true, true, true, false), null ); } { ExtractListRule rule = new ExtractRuleMultiple(new ListSelection(new Offset(1, FROM_THE_START), new Offset(2, FROM_THE_START))).wrap(); TransformRuleWrapper<ExtractListRule> wrapper = recommender.wrapRule(rule); assertEquals("array_length(sublist(tbl.foo, 2, 2)) > 0", wrapper.getMatchFunctionExpr("tbl.foo")); assertEquals("sublist(tbl.foo, 2, 2)", wrapper.getFunctionExpr("tbl.foo")); assertEquals("Elements: 1 - 2", wrapper.describe()); validate(dataFile, wrapper, new Object[0], list((Object)list("bbb", "cccc"), list("e"), null, null), list(true, true, false, false), null ); } { ExtractListRule rule = new ExtractRuleMultiple(new ListSelection(new Offset(1, FROM_THE_END), new Offset(0, FROM_THE_END))).wrap(); TransformRuleWrapper<ExtractListRule> wrapper = recommender.wrapRule(rule); assertEquals("array_length(sublist(tbl.foo, -2, 2)) > 0", wrapper.getMatchFunctionExpr("tbl.foo")); assertEquals("sublist(tbl.foo, -2, 2)", wrapper.getFunctionExpr("tbl.foo")); assertEquals("Elements: 1 - 0 (both from the end)", wrapper.describe()); validate(dataFile, wrapper, new Object[0], list((Object)list("bbb", "cccc"), list("ddd", "e"), null, null), list(true, true, false, false), null ); } { ExtractListRule rule = new ExtractRuleMultiple(new ListSelection(new Offset(1, FROM_THE_START), new Offset(1, FROM_THE_END))).wrap(); TransformRuleWrapper<ExtractListRule> wrapper = recommender.wrapRule(rule); assertEquals("array_length(sublist(tbl.foo, 2, array_length(tbl.foo) - 2)) > 0", wrapper.getMatchFunctionExpr("tbl.foo")); assertEquals("sublist(tbl.foo, 2, array_length(tbl.foo) - 2)", wrapper.getFunctionExpr("tbl.foo")); assertEquals("Elements: 1 - 1 (from the end)", wrapper.describe()); validate(dataFile, wrapper, new Object[0], list((Object)list("bbb"), null, null, null), list(true, false, false, false), null ); } { ExtractListRule rule = new ExtractRuleMultiple(new ListSelection(new Offset(2, FROM_THE_END), new Offset(2, FROM_THE_START))).wrap(); TransformRuleWrapper<ExtractListRule> wrapper = recommender.wrapRule(rule); assertEquals("array_length(sublist(tbl.foo, -3, -array_length(tbl.foo) + 6)) > 0", wrapper.getMatchFunctionExpr("tbl.foo")); assertEquals("sublist(tbl.foo, -3, -array_length(tbl.foo) + 6)", wrapper.getFunctionExpr("tbl.foo")); assertEquals("Elements: 2 (from the end) - 2", wrapper.describe()); validate(dataFile, wrapper, new Object[0], list((Object)list("aa", "bbb", "cccc"), null, null, null), list(true, false, false, false), null ); } } |
MemoryDebugInfo { public static String getDetailsOnAllocationFailure(OutOfMemoryException exception, BufferAllocator allocator) { BufferAllocator failedAtAllocator = null; StringBuilder sb = new StringBuilder(); Optional<AllocationOutcomeDetails> outcomeDetails = exception.getOutcomeDetails(); if (outcomeDetails.isPresent()) { sb.append(outcomeDetails.get().toString()); failedAtAllocator = outcomeDetails.get().getFailedAllocator(); } String summary; if (failedAtAllocator == null) { summary = getSummaryFromRoot(allocator); } else if (failedAtAllocator.getParentAllocator() == null) { summary = getSummaryFromRoot(failedAtAllocator); } else { summary = getSummary(failedAtAllocator, 3); } sb.append("\nAllocator dominators:\n"); sb.append(summary); return sb.toString(); } static String getSummaryFromRoot(BufferAllocator allocator); static String getDetailsOnAllocationFailure(OutOfMemoryException exception, BufferAllocator allocator); } | @Test public void testWithFullNode() { try ( BufferAllocator root = allocatorRule.newAllocator("test-memory-debug-info", 0, 1024 * 1024); BufferAllocator child = root.newChildAllocator("child", 0, 16 * 1024); BufferAllocator grandChild1 = child.newChildAllocator("grandchild1", 0, 64 * 1024); BufferAllocator grandChild2 = child.newChildAllocator("grandchild2", 0, 64 * 1024)) { try (ArrowBuf buf = grandChild2.buffer(32 * 1024)) { assertTrue("expected allocation above limit to fail", false); } catch (OutOfMemoryException e) { String info = MemoryDebugInfo.getDetailsOnAllocationFailure(e, grandChild2); assertTrue(info.contains("child")); assertTrue(info.contains("grandchild1")); assertTrue(info.contains("grandchild2")); assertFalse(info.contains("test-memory-debug-info")); } } }
@Test public void testWithRoot() { try ( BufferAllocator root = allocatorRule.newAllocator("test-memory-debug-info", 0, 1024 * 1024); BufferAllocator child = root.newChildAllocator("child", 0, Integer.MAX_VALUE); BufferAllocator grandChild1 = child.newChildAllocator("grandchild1", 0, Integer.MAX_VALUE); BufferAllocator grandChild2 = child.newChildAllocator("grandchild2", 0, Integer.MAX_VALUE)) { try (ArrowBuf buf = grandChild2.buffer(2 * 1024 * 1024)) { assertTrue("expected allocation above limit to fail", false); } catch (OutOfMemoryException e) { String info = MemoryDebugInfo.getDetailsOnAllocationFailure(e, grandChild2); assertTrue(info.contains("test-memory-debug-info")); assertTrue(info.contains("child")); assertTrue(info.contains("grandchild1")); assertTrue(info.contains("grandchild2")); } } }
@Test public void testPrune() throws Exception { try (RollbackCloseable closeable = new RollbackCloseable(true)) { BufferAllocator root = allocatorRule.newAllocator("test-memory-debug-info", 0, 1024 * 1024); closeable.add(root); BufferAllocator twig = root.newChildAllocator("twig",0, 1024 * 1024); closeable.add(twig); for (int i = 0; i < 20; ++i) { boolean isBig = (i % 2 == 0); BufferAllocator child = twig.newChildAllocator((isBig ? "big" : "small") + i, 0, Integer.MAX_VALUE); closeable.add(child); if (isBig) { closeable.add(child.buffer(8192)); } else { closeable.add(child.buffer(4096)); } } try (ArrowBuf buf = twig.buffer(1024 * 1024)) { assertTrue("expected allocation above limit to fail", false); } catch (OutOfMemoryException e) { String info = MemoryDebugInfo.getDetailsOnAllocationFailure(e, twig); assertTrue(!info.contains("test-memory-debug-info")); assertTrue(info.contains("twig")); assertTrue(info.contains("big")); assertTrue(!info.contains("small")); } } } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.