id
int64 1
194k
| buggy
stringlengths 23
37.5k
| fixed
stringlengths 6
37.4k
|
---|---|---|
1,101 |
@JsonProperty("finishUser") Duration finishUser,
@JsonProperty("memoryReservation") DataSize memoryReservation,
@JsonProperty("systemMemoryReservation") DataSize systemMemoryReservation,
@JsonProperty("blockedReason") Optional<BlockedReason> blockedReason,
@JsonProperty("info") OperatorInfo info)
<BUG>{
checkArgument(operatorId >= 0, "operatorId is negative");</BUG>
this.operatorId = operatorId;
this.planNodeId = requireNonNull(planNodeId, "planNodeId is null");
this.operatorType = requireNonNull(operatorType, "operatorType is null");
|
@JsonProperty("finishUser") Duration finishUser,
@JsonProperty("memoryReservation") DataSize memoryReservation,
@JsonProperty("systemMemoryReservation") DataSize systemMemoryReservation,
@JsonProperty("blockedReason") Optional<BlockedReason> blockedReason,
@JsonProperty("info") OperatorInfo info)
{
this.pipelineId = pipelineId;
checkArgument(operatorId >= 0, "operatorId is negative");
this.operatorId = operatorId;
this.planNodeId = requireNonNull(planNodeId, "planNodeId is null");
this.operatorType = requireNonNull(operatorType, "operatorType is null");
|
1,102 |
OperatorInfo info = operator.getInfo();
if (base != null && info != null && base.getClass() == info.getClass()) {
base = mergeInfo(base, info);
}
}
<BUG>return new OperatorStats(
operatorId,</BUG>
planNodeId,
operatorType,
totalDrivers,
|
OperatorInfo info = operator.getInfo();
if (base != null && info != null && base.getClass() == info.getClass()) {
base = mergeInfo(base, info);
}
}
return new OperatorStats(
pipelineId,
operatorId,
planNodeId,
operatorType,
totalDrivers,
|
1,103 |
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
public class DriverFactory
implements Closeable
<BUG>{
private final boolean inputDriver;</BUG>
private final boolean outputDriver;
private final List<OperatorFactory> operatorFactories;
private final Optional<PlanNodeId> sourceId;
|
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
public class DriverFactory
implements Closeable
{
private final int pipelineId;
private final boolean inputDriver;
private final boolean outputDriver;
private final List<OperatorFactory> operatorFactories;
private final Optional<PlanNodeId> sourceId;
|
1,104 |
private final boolean outputDriver;
private final List<OperatorFactory> operatorFactories;
private final Optional<PlanNodeId> sourceId;
private final OptionalInt driverInstances;
private boolean closed;
<BUG>public DriverFactory(boolean inputDriver, boolean outputDriver, List<OperatorFactory> operatorFactories, OptionalInt driverInstances)
{
this.inputDriver = inputDriver;</BUG>
this.outputDriver = outputDriver;
|
private final boolean outputDriver;
private final List<OperatorFactory> operatorFactories;
private final Optional<PlanNodeId> sourceId;
private final OptionalInt driverInstances;
private boolean closed;
public DriverFactory(int pipelineId, boolean inputDriver, boolean outputDriver, List<OperatorFactory> operatorFactories, OptionalInt driverInstances)
{
this.pipelineId = pipelineId;
this.inputDriver = inputDriver;
this.outputDriver = outputDriver;
|
1,105 |
.filter(SourceOperatorFactory.class::isInstance)
.map(SourceOperatorFactory.class::cast)
.map(SourceOperatorFactory::getSourceId)
.collect(toImmutableList());
checkArgument(sourceIds.size() <= 1, "Expected at most one source operator in driver facotry, but found %s", sourceIds);
<BUG>this.sourceId = sourceIds.isEmpty() ? Optional.empty() : Optional.of(sourceIds.get(0));
}</BUG>
public boolean isInputDriver()
{
return inputDriver;
|
.filter(SourceOperatorFactory.class::isInstance)
.map(SourceOperatorFactory.class::cast)
.map(SourceOperatorFactory::getSourceId)
.collect(toImmutableList());
checkArgument(sourceIds.size() <= 1, "Expected at most one source operator in driver facotry, but found %s", sourceIds);
this.sourceId = sourceIds.isEmpty() ? Optional.empty() : Optional.of(sourceIds.get(0));
}
public int getPipelineId()
{
return pipelineId;
}
public boolean isInputDriver()
{
return inputDriver;
|
1,106 |
false,
Optional.empty(),
1_500_000,
1,
new PagesIndex.TestingFactory());
<BUG>DriverContext driverContext = taskContext.addPipelineContext(false, false).addDriverContext();
Driver driver = new DriverFactory(false, false, ImmutableList.of(ordersTableScan, hashBuilder), OptionalInt.empty()).createDriver(driverContext);
</BUG>
while (!hashBuilder.getLookupSourceFactory().createLookupSource().isDone()) {
|
false,
Optional.empty(),
1_500_000,
1,
new PagesIndex.TestingFactory());
DriverContext driverContext = taskContext.addPipelineContext(0, false, false).addDriverContext();
Driver driver = new DriverFactory(0, false, false, ImmutableList.of(ordersTableScan, hashBuilder), OptionalInt.empty()).createDriver(driverContext);
while (!hashBuilder.getLookupSourceFactory().createLookupSource().isDone()) {
|
1,107 |
lookupSourceFactory = hashBuilder.getLookupSourceFactory();
}
OperatorFactory lineItemTableScan = createTableScanOperator(0, new PlanNodeId("test"), "lineitem", "orderkey", "quantity");
OperatorFactory joinOperator = LOOKUP_JOIN_OPERATORS.innerJoin(1, new PlanNodeId("test"), lookupSourceFactory, lineItemTableScan.getTypes(), Ints.asList(0), Optional.empty(), Optional.empty());
NullOutputOperatorFactory output = new NullOutputOperatorFactory(2, new PlanNodeId("test"), joinOperator.getTypes());
<BUG>DriverFactory driverFactory = new DriverFactory(true, true, ImmutableList.of(lineItemTableScan, joinOperator, output), OptionalInt.empty());
DriverContext driverContext = taskContext.addPipelineContext(true, true).addDriverContext();
</BUG>
Driver driver = driverFactory.createDriver(driverContext);
|
lookupSourceFactory = hashBuilder.getLookupSourceFactory();
}
OperatorFactory lineItemTableScan = createTableScanOperator(0, new PlanNodeId("test"), "lineitem", "orderkey", "quantity");
OperatorFactory joinOperator = LOOKUP_JOIN_OPERATORS.innerJoin(1, new PlanNodeId("test"), lookupSourceFactory, lineItemTableScan.getTypes(), Ints.asList(0), Optional.empty(), Optional.empty());
NullOutputOperatorFactory output = new NullOutputOperatorFactory(2, new PlanNodeId("test"), joinOperator.getTypes());
DriverFactory driverFactory = new DriverFactory(1, true, true, ImmutableList.of(lineItemTableScan, joinOperator, output), OptionalInt.empty());
DriverContext driverContext = taskContext.addPipelineContext(1, true, true).addDriverContext();
Driver driver = driverFactory.createDriver(driverContext);
|
1,108 |
public OperatorStats getOperatorStats()
{
Supplier<OperatorInfo> infoSupplier = this.infoSupplier.get();
OperatorInfo info = Optional.ofNullable(infoSupplier).map(Supplier::get).orElse(null);
long inputPositionsCount = inputPositions.getTotalCount();
<BUG>return new OperatorStats(
operatorId,</BUG>
planNodeId,
operatorType,
1,
|
public OperatorStats getOperatorStats()
{
Supplier<OperatorInfo> infoSupplier = this.infoSupplier.get();
OperatorInfo info = Optional.ofNullable(infoSupplier).map(Supplier::get).orElse(null);
long inputPositionsCount = inputPositions.getTotalCount();
return new OperatorStats(
driverContext.getPipelineContext().getPipelineId(),
operatorId,
planNodeId,
operatorType,
1,
|
1,109 |
import static java.util.concurrent.TimeUnit.NANOSECONDS;
@ThreadSafe
public class PipelineContext
{
private final TaskContext taskContext;
<BUG>private final Executor executor;
private final boolean inputPipeline;</BUG>
private final boolean outputPipeline;
private final List<DriverContext> drivers = new CopyOnWriteArrayList<>();
private final AtomicInteger completedDrivers = new AtomicInteger();
|
import static java.util.concurrent.TimeUnit.NANOSECONDS;
@ThreadSafe
public class PipelineContext
{
private final TaskContext taskContext;
private final Executor executor;
private final int pipelineId;
private final boolean inputPipeline;
private final boolean outputPipeline;
private final List<DriverContext> drivers = new CopyOnWriteArrayList<>();
private final AtomicInteger completedDrivers = new AtomicInteger();
|
1,110 |
private final CounterStat processedInputDataSize = new CounterStat();
private final CounterStat processedInputPositions = new CounterStat();
private final CounterStat outputDataSize = new CounterStat();
private final CounterStat outputPositions = new CounterStat();
private final ConcurrentMap<Integer, OperatorStats> operatorSummaries = new ConcurrentHashMap<>();
<BUG>public PipelineContext(TaskContext taskContext, Executor executor, boolean inputPipeline, boolean outputPipeline)
{
this.inputPipeline = inputPipeline;</BUG>
this.outputPipeline = outputPipeline;
|
private final CounterStat processedInputDataSize = new CounterStat();
private final CounterStat processedInputPositions = new CounterStat();
private final CounterStat outputDataSize = new CounterStat();
private final CounterStat outputPositions = new CounterStat();
private final ConcurrentMap<Integer, OperatorStats> operatorSummaries = new ConcurrentHashMap<>();
public PipelineContext(int pipelineId, TaskContext taskContext, Executor executor, boolean inputPipeline, boolean outputPipeline)
{
this.pipelineId = pipelineId;
this.inputPipeline = inputPipeline;
this.outputPipeline = outputPipeline;
|
1,111 |
.flatMap(driver -> driver.getBlockedReasons().stream())
.collect(ImmutableCollectors.toImmutableSet());
boolean fullyBlocked = drivers.stream()
.filter(driver -> driver.getEndTime() == null && driver.getStartTime() != null)
.allMatch(DriverStats::isFullyBlocked);
<BUG>return new PipelineStats(
executionStartTime.get(),</BUG>
lastExecutionStartTime.get(),
lastExecutionEndTime.get(),
inputPipeline,
|
.flatMap(driver -> driver.getBlockedReasons().stream())
.collect(ImmutableCollectors.toImmutableSet());
boolean fullyBlocked = drivers.stream()
.filter(driver -> driver.getEndTime() == null && driver.getStartTime() != null)
.allMatch(DriverStats::isFullyBlocked);
return new PipelineStats(
pipelineId,
executionStartTime.get(),
lastExecutionStartTime.get(),
lastExecutionEndTime.get(),
inputPipeline,
|
1,112 |
false,
Optional.empty(),
1_500_000,
1,
new PagesIndex.TestingFactory());
<BUG>DriverFactory hashBuildDriverFactory = new DriverFactory(true, true, ImmutableList.of(ordersTableScan, hashBuilder), OptionalInt.empty());
Driver hashBuildDriver = hashBuildDriverFactory.createDriver(taskContext.addPipelineContext(true, true).addDriverContext());
</BUG>
hashBuildDriverFactory.close();
|
false,
Optional.empty(),
1_500_000,
1,
new PagesIndex.TestingFactory());
DriverFactory hashBuildDriverFactory = new DriverFactory(0, true, true, ImmutableList.of(ordersTableScan, hashBuilder), OptionalInt.empty());
Driver hashBuildDriver = hashBuildDriverFactory.createDriver(taskContext.addPipelineContext(0, true, true).addDriverContext());
hashBuildDriverFactory.close();
|
1,113 |
private final AtomicInteger pendingCreation = new AtomicInteger();
private final AtomicBoolean noMoreSplits = new AtomicBoolean();
private DriverSplitRunnerFactory(DriverFactory driverFactory)
{
this.driverFactory = driverFactory;
<BUG>this.pipelineContext = taskContext.addPipelineContext(driverFactory.isInputDriver(), driverFactory.isOutputDriver());
</BUG>
}
private DriverSplitRunner createDriverRunner(@Nullable ScheduledSplit partitionedSplit, boolean partitioned)
{
|
private final AtomicInteger pendingCreation = new AtomicInteger();
private final AtomicBoolean noMoreSplits = new AtomicBoolean();
private DriverSplitRunnerFactory(DriverFactory driverFactory)
{
this.driverFactory = driverFactory;
this.pipelineContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver());
}
private DriverSplitRunner createDriverRunner(@Nullable ScheduledSplit partitionedSplit, boolean partitioned)
{
|
1,114 |
driversBuilder.add(source);
hashChannel = Optional.of(2);
}
HashBuilderOperatorFactory hashBuilder = new HashBuilderOperatorFactory(2, new PlanNodeId("test"), source.getTypes(), ImmutableList.of(0, 1), ImmutableMap.of(), Ints.asList(0), hashChannel, false, Optional.empty(), 1_500_000, 1, new PagesIndex.TestingFactory());
driversBuilder.add(hashBuilder);
<BUG>DriverFactory hashBuildDriverFactory = new DriverFactory(true, false, driversBuilder.build(), OptionalInt.empty());
Driver hashBuildDriver = hashBuildDriverFactory.createDriver(taskContext.addPipelineContext(true, false).addDriverContext());
</BUG>
hashBuildDriverFactory.close();
|
driversBuilder.add(source);
hashChannel = Optional.of(2);
}
HashBuilderOperatorFactory hashBuilder = new HashBuilderOperatorFactory(2, new PlanNodeId("test"), source.getTypes(), ImmutableList.of(0, 1), ImmutableMap.of(), Ints.asList(0), hashChannel, false, Optional.empty(), 1_500_000, 1, new PagesIndex.TestingFactory());
driversBuilder.add(hashBuilder);
DriverFactory hashBuildDriverFactory = new DriverFactory(0, true, false, driversBuilder.build(), OptionalInt.empty());
Driver hashBuildDriver = hashBuildDriverFactory.createDriver(taskContext.addPipelineContext(0, true, false).addDriverContext());
hashBuildDriverFactory.close();
|
1,115 |
private synchronized void initializeStateIfNecessary()
{
if (pipelineContext == null) {
TaskContext taskContext = taskContextReference.get();
checkState(taskContext != null, "Task context must be set before index can be built");
<BUG>pipelineContext = taskContext.addPipelineContext(false, false);
}</BUG>
if (indexSnapshotLoader == null) {
indexSnapshotLoader = new IndexSnapshotLoader(
indexBuildDriverFactoryProvider,
|
private synchronized void initializeStateIfNecessary()
{
if (pipelineContext == null) {
TaskContext taskContext = taskContextReference.get();
checkState(taskContext != null, "Task context must be set before index can be built");
pipelineContext = taskContext.addPipelineContext(indexBuildDriverFactoryProvider.getPipelineId(), false, false);
}
if (indexSnapshotLoader == null) {
indexSnapshotLoader = new IndexSnapshotLoader(
indexBuildDriverFactoryProvider,
|
1,116 |
package com.facebook.presto.execution;
import com.facebook.presto.Session;
import com.facebook.presto.execution.StateMachine.StateChangeListener;
import com.facebook.presto.execution.scheduler.SplitSchedulerStats;
import com.facebook.presto.operator.BlockedReason;
<BUG>import com.facebook.presto.operator.OperatorStats;
import com.facebook.presto.operator.TaskStats;</BUG>
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.util.Failures;
import com.google.common.collect.ImmutableList;
|
package com.facebook.presto.execution;
import com.facebook.presto.Session;
import com.facebook.presto.execution.StateMachine.StateChangeListener;
import com.facebook.presto.execution.scheduler.SplitSchedulerStats;
import com.facebook.presto.operator.BlockedReason;
import com.facebook.presto.operator.OperatorStats;
import com.facebook.presto.operator.PipelineStats;
import com.facebook.presto.operator.TaskStats;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.util.Failures;
import com.google.common.collect.ImmutableList;
|
1,117 |
long processedInputPositions = 0;
long outputDataSize = 0;
long outputPositions = 0;
boolean fullyBlocked = true;
Set<BlockedReason> blockedReasons = new HashSet<>();
<BUG>Map<Integer, OperatorStats> operatorToStats = new HashMap<>();
</BUG>
for (TaskInfo taskInfo : taskInfos) {
TaskState taskState = taskInfo.getTaskStatus().getState();
if (taskState.isDone()) {
|
long processedInputPositions = 0;
long outputDataSize = 0;
long outputPositions = 0;
boolean fullyBlocked = true;
Set<BlockedReason> blockedReasons = new HashSet<>();
Map<String, OperatorStats> operatorToStats = new HashMap<>();
for (TaskInfo taskInfo : taskInfos) {
TaskState taskState = taskInfo.getTaskStatus().getState();
if (taskState.isDone()) {
|
1,118 |
rawInputPositions += taskStats.getRawInputPositions();
processedInputDataSize += taskStats.getProcessedInputDataSize().toBytes();
processedInputPositions += taskStats.getProcessedInputPositions();
outputDataSize += taskStats.getOutputDataSize().toBytes();
outputPositions += taskStats.getOutputPositions();
<BUG>taskStats.getPipelines().forEach(pipeline -> pipeline.getOperatorSummaries()
.forEach(operatorStats -> operatorToStats.compute(operatorStats.getOperatorId(), (k, v) -> v == null ? operatorStats : v.add(operatorStats))));
}</BUG>
StageStats stageStats = new StageStats(
|
rawInputPositions += taskStats.getRawInputPositions();
processedInputDataSize += taskStats.getProcessedInputDataSize().toBytes();
processedInputPositions += taskStats.getProcessedInputPositions();
outputDataSize += taskStats.getOutputDataSize().toBytes();
outputPositions += taskStats.getOutputPositions();
for (PipelineStats pipeline : taskStats.getPipelines()) {
for (OperatorStats operatorStats : pipeline.getOperatorSummaries()) {
String id = pipeline.getPipelineId() + "." + operatorStats.getOperatorId();
operatorToStats.compute(id, (k, v) -> v == null ? operatorStats : v.add(operatorStats));
}
}
}
StageStats stageStats = new StageStats(
|
1,119 |
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
@Immutable
public class PipelineStats
<BUG>{
private final DateTime firstStartTime;</BUG>
private final DateTime lastStartTime;
private final DateTime lastEndTime;
private final boolean inputPipeline;
|
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
@Immutable
public class PipelineStats
{
private final int pipelineId;
private final DateTime firstStartTime;
private final DateTime lastStartTime;
private final DateTime lastEndTime;
private final boolean inputPipeline;
|
1,120 |
private final DataSize outputDataSize;
private final long outputPositions;
private final List<OperatorStats> operatorSummaries;
private final List<DriverStats> drivers;
@JsonCreator
<BUG>public PipelineStats(
@JsonProperty("firstStartTime") DateTime firstStartTime,</BUG>
@JsonProperty("lastStartTime") DateTime lastStartTime,
@JsonProperty("lastEndTime") DateTime lastEndTime,
@JsonProperty("inputPipeline") boolean inputPipeline,
|
private final DataSize outputDataSize;
private final long outputPositions;
private final List<OperatorStats> operatorSummaries;
private final List<DriverStats> drivers;
@JsonCreator
public PipelineStats(
@JsonProperty("pipelineId") int pipelineId,
@JsonProperty("firstStartTime") DateTime firstStartTime,
@JsonProperty("lastStartTime") DateTime lastStartTime,
@JsonProperty("lastEndTime") DateTime lastEndTime,
@JsonProperty("inputPipeline") boolean inputPipeline,
|
1,121 |
@JsonProperty("processedInputPositions") long processedInputPositions,
@JsonProperty("outputDataSize") DataSize outputDataSize,
@JsonProperty("outputPositions") long outputPositions,
@JsonProperty("operatorSummaries") List<OperatorStats> operatorSummaries,
@JsonProperty("drivers") List<DriverStats> drivers)
<BUG>{
this.firstStartTime = firstStartTime;</BUG>
this.lastStartTime = lastStartTime;
this.lastEndTime = lastEndTime;
this.inputPipeline = inputPipeline;
|
@JsonProperty("processedInputPositions") long processedInputPositions,
@JsonProperty("outputDataSize") DataSize outputDataSize,
@JsonProperty("outputPositions") long outputPositions,
@JsonProperty("operatorSummaries") List<OperatorStats> operatorSummaries,
@JsonProperty("drivers") List<DriverStats> drivers)
{
this.pipelineId = pipelineId;
this.firstStartTime = firstStartTime;
this.lastStartTime = lastStartTime;
this.lastEndTime = lastEndTime;
this.inputPipeline = inputPipeline;
|
1,122 |
import static com.facebook.presto.operator.index.PageBufferOperator.PageBufferOperatorFactory;
import static com.facebook.presto.operator.index.PagesIndexBuilderOperator.PagesIndexBuilderOperatorFactory;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
public class IndexBuildDriverFactoryProvider
<BUG>{
private final int outputOperatorId;</BUG>
private final PlanNodeId planNodeId;
private final boolean inputDriver;
private final List<OperatorFactory> coreOperatorFactories;
|
import static com.facebook.presto.operator.index.PageBufferOperator.PageBufferOperatorFactory;
import static com.facebook.presto.operator.index.PagesIndexBuilderOperator.PagesIndexBuilderOperatorFactory;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
public class IndexBuildDriverFactoryProvider
{
private final int pipelineId;
private final int outputOperatorId;
private final PlanNodeId planNodeId;
private final boolean inputDriver;
private final List<OperatorFactory> coreOperatorFactories;
|
1,123 |
private final PlanNodeId planNodeId;
private final boolean inputDriver;
private final List<OperatorFactory> coreOperatorFactories;
private final List<Type> outputTypes;
private final Optional<DynamicTupleFilterFactory> dynamicTupleFilterFactory;
<BUG>public IndexBuildDriverFactoryProvider(int outputOperatorId, PlanNodeId planNodeId, boolean inputDriver, List<OperatorFactory> coreOperatorFactories, Optional<DynamicTupleFilterFactory> dynamicTupleFilterFactory)
</BUG>
{
requireNonNull(planNodeId, "planNodeId is null");
requireNonNull(coreOperatorFactories, "coreOperatorFactories is null");
|
private final PlanNodeId planNodeId;
private final boolean inputDriver;
private final List<OperatorFactory> coreOperatorFactories;
private final List<Type> outputTypes;
private final Optional<DynamicTupleFilterFactory> dynamicTupleFilterFactory;
public IndexBuildDriverFactoryProvider(int pipelineId, int outputOperatorId, PlanNodeId planNodeId, boolean inputDriver, List<OperatorFactory> coreOperatorFactories, Optional<DynamicTupleFilterFactory> dynamicTupleFilterFactory)
{
requireNonNull(planNodeId, "planNodeId is null");
requireNonNull(coreOperatorFactories, "coreOperatorFactories is null");
|
1,124 |
Iterators.addAll(published, metrics);
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedRateSchedule(5, 1, TimeUnit.SECONDS);
<BUG>}
@Override
protected boolean isPublishMetaMetrics() {
return false;</BUG>
}
|
Iterators.addAll(published, metrics);
}
@Override
protected Scheduler scheduler() {
return Scheduler.newFixedRateSchedule(5, 1, TimeUnit.SECONDS);
}
|
1,125 |
return result;
}
@Test
public void testSearchWithTags() throws Exception {
verifySearchResultWithTags("/v3/metrics/search?target=tag", getSearchResultExpected("namespace", "myspace",
<BUG>"namespace", "yourspace",
"namespace", "system"));</BUG>
verifySearchResultWithTags("/v3/metrics/search?target=tag&tag=namespace:myspace",
getSearchResultExpected("app", "WordCount1"));
|
return result;
}
@Test
public void testSearchWithTags() throws Exception {
verifySearchResultWithTags("/v3/metrics/search?target=tag", getSearchResultExpected("namespace", "myspace",
"namespace", "yourspace"));
verifySearchResultWithTags("/v3/metrics/search?target=tag&tag=namespace:myspace",
getSearchResultExpected("app", "WordCount1"));
|
1,126 |
import co.cask.cdap.api.metrics.MetricValues;
import co.cask.cdap.common.io.BinaryDecoder;</BUG>
import co.cask.cdap.internal.io.DatumReader;
import co.cask.common.io.ByteBufferInputStream;
<BUG>import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
</BUG>
import org.apache.twill.kafka.client.FetchedMessage;
|
import co.cask.cdap.api.metrics.MetricValues;
import co.cask.cdap.common.conf.Constants;
import co.cask.cdap.common.io.BinaryDecoder;
import co.cask.cdap.internal.io.DatumReader;
import co.cask.common.io.ByteBufferInputStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.twill.kafka.client.FetchedMessage;
|
1,127 |
import org.apache.twill.kafka.client.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
<BUG>import java.util.List;
public final class MetricsMessageCallback implements KafkaConsumer.MessageCallback {
private static final Logger LOG = LoggerFactory.getLogger(MetricsMessageCallback.class);
private final DatumReader<MetricValues> recordReader;</BUG>
private final Schema recordSchema;
|
import org.apache.twill.kafka.client.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
public final class MetricsMessageCallback implements KafkaConsumer.MessageCallback {
private static final Logger LOG = LoggerFactory.getLogger(MetricsMessageCallback.class);
private static final ImmutableMap<String, String> METRICS_PROCESSOR_CONTEXT =
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, Constants.SYSTEM_NAMESPACE,
Constants.Metrics.Tag.COMPONENT, "metrics.processor");
private final DatumReader<MetricValues> recordReader;
private final Schema recordSchema;
|
1,128 |
this.recordSchema = recordSchema;
this.metricStore = metricStore;
}
@Override
public void onReceived(Iterator<FetchedMessage> messages) {
<BUG>final ByteBufferInputStream is = new ByteBufferInputStream(null);
List<MetricValues> records = ImmutableList.copyOf(
Iterators.filter(Iterators.transform(messages, new Function<FetchedMessage, MetricValues>() {
@Override
public MetricValues apply(FetchedMessage input) {</BUG>
try {
|
this.recordSchema = recordSchema;
this.metricStore = metricStore;
}
@Override
public void onReceived(Iterator<FetchedMessage> messages) {
final ByteBufferInputStream is = new ByteBufferInputStream(null);
List<MetricValues> records = Lists.newArrayList();
while (messages.hasNext()) {
FetchedMessage input = messages.next();
|
1,129 |
entity.timeUntilPortal = entity.getPortalCooldown();
if(entity instanceof EntityPlayerMP)
((EntityPlayerMP) entity).addChatMessage(new TextComponentTranslation("message.transportalizer.destinationBlocked"));
return;
}
<BUG>if(location.dim != entity.dimension)</BUG>
Teleport.teleportEntity(entity, location.dim, null, destTransportalizer.pos.getX() + 0.5, destTransportalizer.pos.getY() + 0.6, destTransportalizer.pos.getZ() + 0.5);
<BUG>else
teleportTo(entity, transportalizers.get(this.destId));</BUG>
entity.timeUntilPortal = entity.getPortalCooldown();
|
entity.timeUntilPortal = entity.getPortalCooldown();
if(entity instanceof EntityPlayerMP)
((EntityPlayerMP) entity).addChatMessage(new TextComponentTranslation("message.transportalizer.destinationBlocked"));
return;
|
1,130 |
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.WorldServer;
import com.mraof.minestuck.block.BlockChessTile;
import com.mraof.minestuck.block.MinestuckBlocks;
<BUG>import com.mraof.minestuck.util.ITeleporter;</BUG>
import com.mraof.minestuck.util.Location;
import com.mraof.minestuck.util.Teleport;
<BUG>public class TileEntitySkaiaPortal extends TileEntity implements ITeleporter
</BUG>
{
|
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.WorldServer;
import com.mraof.minestuck.block.BlockChessTile;
import com.mraof.minestuck.block.MinestuckBlocks;
import com.mraof.minestuck.util.Location;
import com.mraof.minestuck.util.Teleport;
public class TileEntitySkaiaPortal extends TileEntity implements Teleport.ITeleporter
{
|
1,131 |
import net.minecraftforge.fml.common.FMLCommonHandler;
public class Teleport
{
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter, BlockPos dest)
{
<BUG>return teleportEntity(entity, destinationDimension, teleporter, dest.getX(), dest.getY(), dest.getZ());
</BUG>
}
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter)
{
|
import net.minecraftforge.fml.common.FMLCommonHandler;
public class Teleport
{
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter, BlockPos dest)
{
return teleportEntity(entity, destinationDimension, teleporter, dest.getX() + 0.5, dest.getY(), dest.getZ() + 0.5);
}
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter)
{
|
1,132 |
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter)
{
return teleportEntity(entity, destinationDimension, teleporter, entity.posX, entity.posY, entity.posZ);
}
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter, double x, double y, double z)
<BUG>{
if(!ForgeHooks.onTravelToDimension(entity, destinationDimension))</BUG>
return false;
MinecraftServer mcServer = entity.getServer();
int prevDimension = entity.dimension;
|
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter)
{
return teleportEntity(entity, destinationDimension, teleporter, entity.posX, entity.posY, entity.posZ);
}
public static boolean teleportEntity(Entity entity, int destinationDimension, ITeleporter teleporter, double x, double y, double z)
{
if(destinationDimension == entity.dimension)
return localTeleport(entity, teleporter, x, y, z);
if(entity.worldObj.isRemote)
throw new IllegalArgumentException("Shouldn't do teleporting with a clientside entity.");
if(!ForgeHooks.onTravelToDimension(entity, destinationDimension))
return false;
MinecraftServer mcServer = entity.getServer();
int prevDimension = entity.dimension;
|
1,133 |
player.setWorld(worldDest);
playerList.preparePlayer(player, worldFrom);
player.connection.setPlayerLocation(player.posX, player.posY, player.posZ, player.rotationYaw, player.rotationPitch);
player.interactionManager.setWorld(worldDest);
player.connection.sendPacket(new SPacketPlayerAbilities(player.capabilities));
<BUG>mcServer.getPlayerList().updateTimeAndWeatherForPlayer(player, worldDest);
mcServer.getPlayerList().syncPlayerInventory(player);
</BUG>
Iterator<?> iterator = player.getActivePotionEffects().iterator();
|
player.setWorld(worldDest);
playerList.preparePlayer(player, worldFrom);
player.connection.setPlayerLocation(player.posX, player.posY, player.posZ, player.rotationYaw, player.rotationPitch);
player.interactionManager.setWorld(worldDest);
player.connection.sendPacket(new SPacketPlayerAbilities(player.capabilities));
playerList.updateTimeAndWeatherForPlayer(player, worldDest);
playerList.syncPlayerInventory(player);
Iterator<?> iterator = player.getActivePotionEffects().iterator();
|
1,134 |
player.connection.sendPacket(new SPacketEntityEffect(player.getEntityId(), potioneffect));
}
FMLCommonHandler.instance().firePlayerChangedDimensionEvent(player, prevDimension, destinationDimension);
return true;
}
<BUG>else if (!entity.worldObj.isRemote && !entity.isDead)
{</BUG>
Entity newEntity = EntityList.createEntityByName(EntityList.getEntityString(entity), worldDest);
if(newEntity == null)
return false;
|
player.connection.sendPacket(new SPacketEntityEffect(player.getEntityId(), potioneffect));
}
FMLCommonHandler.instance().firePlayerChangedDimensionEvent(player, prevDimension, destinationDimension);
return true;
}
else if (!entity.isDead)
{
Entity newEntity = EntityList.createEntityByName(EntityList.getEntityString(entity), worldDest);
if(newEntity == null)
return false;
|
1,135 |
import org.eclipse.xtext.resource.XtextResource;
import org.eclipse.xtext.scoping.IScope;
import org.eclipse.xtext.scoping.IScopeProvider;
import org.eclipse.xtext.ui.XtextProjectHelper;
import org.eclipse.xtext.util.RuntimeIOException;
<BUG>import org.eclipse.xtext.xbase.formatting.FormattingPreferenceValues;
import org.eclipse.xtext.xbase.formatting.IBasicFormatter;
import org.eclipse.xtext.xbase.formatting.IFormattingPreferenceValuesProvider;
import org.eclipse.xtext.xbase.formatting.TextReplacement;</BUG>
import com.google.common.collect.Sets;
|
import org.eclipse.xtext.resource.XtextResource;
import org.eclipse.xtext.scoping.IScope;
import org.eclipse.xtext.scoping.IScopeProvider;
import org.eclipse.xtext.ui.XtextProjectHelper;
import org.eclipse.xtext.util.RuntimeIOException;
import com.google.common.collect.Sets;
|
1,136 |
if (parseResult != null)
{
IPreferenceValues configuration = preferencesProvider.getPreferenceValues(outputResource);
try
{
<BUG>List<TextReplacement> edits = formatter.format(outputResource, 0, parseResult.getRootNode().getLength(), new FormattingPreferenceValues(configuration));
BufferedInputStream bufferedInputStream = new BufferedInputStream(resourceSet.getURIConverter().createInputStream(outputResource.getURI()));</BUG>
byte[] input = new byte[bufferedInputStream.available()];
bufferedInputStream.read(input);
bufferedInputStream.close();
|
if (parseResult != null)
{
IPreferenceValues configuration = preferencesProvider.getPreferenceValues(outputResource);
try
{
FormatterRequest request = new FormatterRequest();
request.setTextRegionAccess(new NodeModelBasedRegionAccessBuilder().withResource(outputResource).create());
request.setPreferences(new TypedPreferenceValues(configuration));
request.addRegion(parseResult.getRootNode().getTotalTextRegion());
List<ITextReplacement> edits = formatter.format(request);
BufferedInputStream bufferedInputStream = new BufferedInputStream(resourceSet.getURIConverter().createInputStream(outputResource.getURI()));
byte[] input = new byte[bufferedInputStream.available()];
bufferedInputStream.read(input);
bufferedInputStream.close();
|
1,137 |
bufferedInputStream.close();
String text = new String(input, outputResource.getEncoding());
StringBuilder builder = new StringBuilder(text);
for (int i = edits.size(); --i >= 0; )
{
<BUG>TextReplacement replacement = edits.get(i);
builder.replace(replacement.getOffset(), replacement.getOffset() + replacement.getLength(), replacement.getText());
</BUG>
}
|
bufferedInputStream.close();
String text = new String(input, outputResource.getEncoding());
StringBuilder builder = new StringBuilder(text);
for (int i = edits.size(); --i >= 0; )
{
ITextReplacement replacement = edits.get(i);
int offset = replacement.getOffset();
builder.replace(offset, offset + replacement.getLength(), replacement.getReplacementText());
}
|
1,138 |
} else if (!outstandingMessageCount.tryAcquire(messages)) {
throw new MaxOutstandingMessagesReachedException(maxOutstandingMessages.get());
}
}
if (outstandingByteCount != null) {
<BUG>int permitsToDraw = maxOutstandingBytes.get() > bytes ? bytes : maxOutstandingBytes.get();
if (!failOnLimits) {</BUG>
outstandingByteCount.acquireUninterruptibly(permitsToDraw);
} else if (!outstandingByteCount.tryAcquire(permitsToDraw)) {
throw new MaxOutstandingBytesReachedException(maxOutstandingBytes.get());
|
} else if (!outstandingMessageCount.tryAcquire(messages)) {
throw new MaxOutstandingMessagesReachedException(maxOutstandingMessages.get());
}
}
if (outstandingByteCount != null) {
int permitsToDraw = Math.min(bytes, maxOutstandingBytes.get());
if (!failOnLimits) {
outstandingByteCount.acquireUninterruptibly(permitsToDraw);
} else if (!outstandingByteCount.tryAcquire(permitsToDraw)) {
throw new MaxOutstandingBytesReachedException(maxOutstandingBytes.get());
|
1,139 |
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
<BUG>import static org.apache.hadoop.fs.s3a.S3AConstants.*;
public class S3AFileSystem extends FileSystem {</BUG>
private URI uri;
private Path workingDir;
private AmazonS3Client s3;
|
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
import static org.apache.hadoop.fs.s3a.Constants.*;
public class S3AFileSystem extends FileSystem {
private URI uri;
private Path workingDir;
private AmazonS3Client s3;
|
1,140 |
public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
uri = URI.create(name.getScheme() + "://" + name.getAuthority());
workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this.uri,
this.getWorkingDirectory());
<BUG>String accessKey = conf.get(ACCESS_KEY, null);
String secretKey = conf.get(SECRET_KEY, null);
</BUG>
String userInfo = name.getUserInfo();
|
public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
uri = URI.create(name.getScheme() + "://" + name.getAuthority());
workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this.uri,
this.getWorkingDirectory());
String accessKey = conf.get(NEW_ACCESS_KEY, conf.get(OLD_ACCESS_KEY, null));
String secretKey = conf.get(NEW_SECRET_KEY, conf.get(OLD_SECRET_KEY, null));
String userInfo = name.getUserInfo();
|
1,141 |
} else {
accessKey = userInfo;
}
}
AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain(
<BUG>new S3ABasicAWSCredentialsProvider(accessKey, secretKey),
new InstanceProfileCredentialsProvider(),
new S3AAnonymousAWSCredentialsProvider()
);</BUG>
bucket = name.getHost();
|
} else {
accessKey = userInfo;
}
}
AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain(
new BasicAWSCredentialsProvider(accessKey, secretKey),
new InstanceProfileCredentialsProvider(),
new AnonymousAWSCredentialsProvider()
);
bucket = name.getHost();
|
1,142 |
awsConf.setSocketTimeout(conf.getInt(SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT));
</BUG>
s3 = new AmazonS3Client(credentials, awsConf);
<BUG>maxKeys = conf.getInt(MAX_PAGING_KEYS, DEFAULT_MAX_PAGING_KEYS);
partSize = conf.getLong(MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
</BUG>
if (partSize < 5 * 1024 * 1024) {
|
new InstanceProfileCredentialsProvider(),
new AnonymousAWSCredentialsProvider()
);
bucket = name.getHost();
ClientConfiguration awsConf = new ClientConfiguration();
awsConf.setMaxConnections(conf.getInt(NEW_MAXIMUM_CONNECTIONS, conf.getInt(OLD_MAXIMUM_CONNECTIONS, DEFAULT_MAXIMUM_CONNECTIONS)));
awsConf.setProtocol(conf.getBoolean(NEW_SECURE_CONNECTIONS, conf.getBoolean(OLD_SECURE_CONNECTIONS, DEFAULT_SECURE_CONNECTIONS)) ? Protocol.HTTPS : Protocol.HTTP);
awsConf.setMaxErrorRetry(conf.getInt(NEW_MAX_ERROR_RETRIES, conf.getInt(OLD_MAX_ERROR_RETRIES, DEFAULT_MAX_ERROR_RETRIES)));
awsConf.setSocketTimeout(conf.getInt(NEW_SOCKET_TIMEOUT, conf.getInt(OLD_SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT)));
s3 = new AmazonS3Client(credentials, awsConf);
maxKeys = conf.getInt(NEW_MAX_PAGING_KEYS, conf.getInt(OLD_MAX_PAGING_KEYS, DEFAULT_MAX_PAGING_KEYS));
partSize = conf.getLong(NEW_MULTIPART_SIZE, conf.getLong(OLD_MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE));
partSizeThreshold = conf.getInt(NEW_MIN_MULTIPART_THRESHOLD, conf.getInt(OLD_MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD));
|
1,143 |
if (partSizeThreshold < 5 * 1024 * 1024) {
<BUG>LOG.error(MIN_MULTIPART_THRESHOLD + " must be at least 5 MB");
</BUG>
partSizeThreshold = 5 * 1024 * 1024;
}
<BUG>String cannedACLName = conf.get(CANNED_ACL, DEFAULT_CANNED_ACL);
</BUG>
if (!cannedACLName.isEmpty()) {
cannedACL = CannedAccessControlList.valueOf(cannedACLName);
} else {
|
if (partSizeThreshold < 5 * 1024 * 1024) {
LOG.error(NEW_MIN_MULTIPART_THRESHOLD + " must be at least 5 MB");
partSizeThreshold = 5 * 1024 * 1024;
}
String cannedACLName = conf.get(NEW_CANNED_ACL, conf.get(OLD_CANNED_ACL, DEFAULT_CANNED_ACL));
if (!cannedACLName.isEmpty()) {
cannedACL = CannedAccessControlList.valueOf(cannedACLName);
} else {
|
1,144 |
cannedACL = null;
}
if (!s3.doesBucketExist(bucket)) {
throw new IOException("Bucket " + bucket + " does not exist");
}
<BUG>boolean purgeExistingMultipart = conf.getBoolean(PURGE_EXISTING_MULTIPART, DEFAULT_PURGE_EXISTING_MULTIPART);
long purgeExistingMultipartAge = conf.getLong(PURGE_EXISTING_MULTIPART_AGE, DEFAULT_PURGE_EXISTING_MULTIPART_AGE);
</BUG>
if (purgeExistingMultipart) {
|
cannedACL = null;
}
if (!s3.doesBucketExist(bucket)) {
throw new IOException("Bucket " + bucket + " does not exist");
}
boolean purgeExistingMultipart = conf.getBoolean(NEW_PURGE_EXISTING_MULTIPART, conf.getBoolean(OLD_PURGE_EXISTING_MULTIPART, DEFAULT_PURGE_EXISTING_MULTIPART));
long purgeExistingMultipartAge = conf.getLong(NEW_PURGE_EXISTING_MULTIPART_AGE, conf.getLong(OLD_PURGE_EXISTING_MULTIPART_AGE, DEFAULT_PURGE_EXISTING_MULTIPART_AGE));
if (purgeExistingMultipart) {
|
1,145 |
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
<BUG>import static org.apache.hadoop.fs.s3a.S3AConstants.*;
public class S3AOutputStream extends OutputStream {</BUG>
private OutputStream backupStream;
private File backupFile;
private boolean closed;
|
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import static org.apache.hadoop.fs.s3a.Constants.*;
public class S3AOutputStream extends OutputStream {
private OutputStream backupStream;
private File backupFile;
private boolean closed;
|
1,146 |
this.client = client;
this.progress = progress;
this.fs = fs;
this.cannedACL = cannedACL;
this.statistics = statistics;
<BUG>partSize = conf.getLong(MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
</BUG>
if (conf.get(BUFFER_DIR, null) != null) {
|
this.client = client;
this.progress = progress;
this.fs = fs;
this.cannedACL = cannedACL;
this.statistics = statistics;
partSize = conf.getLong(OLD_MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE);
partSizeThreshold = conf.getInt(OLD_MIN_MULTIPART_THRESHOLD, DEFAULT_MIN_MULTIPART_THRESHOLD);
if (conf.get(BUFFER_DIR, null) != null) {
|
1,147 |
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
<BUG>import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;</BUG>
import java.util.concurrent.locks.Lock;
|
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.Lock;
|
1,148 |
deleteDestSequenceStmt.execute();
commit();
</BUG>
} catch (SQLException ex) {
conex = ex;
<BUG>abort();
</BUG>
throw new RMStoreException(ex);
<BUG>} finally {
updateConnectionState(conex);
</BUG>
}
|
oi.setValue(oidValue);
}
ProtocolVariation pv = decodeProtocolVersion(res.getString(5));
return new SourceSequence(sid, expiry, oi, cmn, lm, pv);
}
} catch (SQLException ex) {
conex = ex;
LOG.log(Level.WARNING, new Message("SELECT_SRC_SEQ_FAILED_MSG", LOG).toString(), ex);
} finally {
releaseResources(stmt, res);
updateConnectionState(con, conex);
|
1,149 |
deleteSrcSequenceStmt.execute();
commit();
</BUG>
} catch (SQLException ex) {
conex = ex;
<BUG>abort();
</BUG>
throw new RMStoreException(ex);
<BUG>} finally {
updateConnectionState(conex);
</BUG>
}
|
oi.setValue(oidValue);
}
ProtocolVariation pv = decodeProtocolVersion(res.getString(5));
return new SourceSequence(sid, expiry, oi, cmn, lm, pv);
}
} catch (SQLException ex) {
conex = ex;
LOG.log(Level.WARNING, new Message("SELECT_SRC_SEQ_FAILED_MSG", LOG).toString(), ex);
} finally {
releaseResources(stmt, res);
updateConnectionState(con, conex);
|
1,150 |
beginTransaction();
<BUG>updateDestinationSequence(seq);
</BUG>
if (msg != null && msg.getCachedOutputStream() != null) {
<BUG>storeMessage(seq.getIdentifier(), msg, false);
</BUG>
}
<BUG>commit();
</BUG>
} catch (SQLException ex) {
|
beginTransaction();
updateDestinationSequence(con, seq);
if (msg != null && msg.getCachedOutputStream() != null) {
storeMessage(con, seq.getIdentifier(), msg, false);
}
commit(con);
} catch (SQLException ex) {
|
1,151 |
</BUG>
}
}
public void persistOutgoing(SourceSequence seq, RMMessage msg) {
<BUG>verifyConnection();
SQLException conex = null;</BUG>
try {
beginTransaction();
<BUG>updateSourceSequence(seq);
</BUG>
if (msg != null && msg.getCachedOutputStream() != null) {
|
} catch (IOException ex) {
abort(con);
throw new RMStoreException(ex);
} finally {
updateConnectionState(con, conex);
}
}
public void persistOutgoing(SourceSequence seq, RMMessage msg) {
Connection con = verifyConnection();
SQLException conex = null;
try {
beginTransaction();
updateSourceSequence(con, seq);
|
1,152 |
beginTransaction();
<BUG>updateSourceSequence(seq);
</BUG>
if (msg != null && msg.getCachedOutputStream() != null) {
<BUG>storeMessage(seq.getIdentifier(), msg, true);
</BUG>
}
<BUG>commit();
</BUG>
} catch (SQLException ex) {
|
beginTransaction();
updateSourceSequence(con, seq);
if (msg != null && msg.getCachedOutputStream() != null) {
storeMessage(con, seq.getIdentifier(), msg, true);
}
commit(con);
} catch (SQLException ex) {
|
1,153 |
String to = msg.getTo();
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Storing {0} message number {1} for sequence {2}, to = {3}",
new Object[] {outbound ? "outbound" : "inbound", nr, id, to});
}
<BUG>InputStream msgin = null;
try {</BUG>
msgin = msg.getInputStream();
<BUG>PreparedStatement stmt = outbound ? createOutboundMessageStmt : createInboundMessageStmt;
int i = 1;</BUG>
stmt.setString(i++, id);
|
String to = msg.getTo();
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Storing {0} message number {1} for sequence {2}, to = {3}",
new Object[] {outbound ? "outbound" : "inbound", nr, id, to});
}
InputStream msgin = null;
PreparedStatement stmt = null;
try {
msgin = msg.getInputStream();
stmt = getStatement(con, outbound ? CREATE_OUTBOUND_MESSAGE_STMT_STR : CREATE_INBOUND_MESSAGE_STMT_STR);
int i = 1;
stmt.setString(i++, id);
|
1,154 |
if (msgin != null) {
try {
msgin.close();
} catch (IOException e) {
}
<BUG>}
}
}
protected void updateSourceSequence(SourceSequence seq)
</BUG>
throws SQLException {
|
if (msgin != null) {
try {
msgin.close();
} catch (IOException e) {
}
}
releaseResources(stmt, null);
}
}
protected void storeMessage(Identifier sid, RMMessage msg, boolean outbound)
throws IOException, SQLException {
storeMessage(connection, sid, msg, outbound);
}
protected void updateSourceSequence(Connection con, SourceSequence seq)
|
1,155 |
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
throw ex;
} else {
LOG.fine("Table CXF_RM_SRC_SEQUENCES already exists.");
<BUG>verifyTable(SRC_SEQUENCES_TABLE_NAME, SRC_SEQUENCES_TABLE_COLS);
</BUG>
}
} finally {
stmt.close();
|
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
throw ex;
} else {
LOG.fine("Table CXF_RM_SRC_SEQUENCES already exists.");
verifyTable(con, SRC_SEQUENCES_TABLE_NAME, SRC_SEQUENCES_TABLE_COLS);
}
} finally {
stmt.close();
|
1,156 |
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
throw ex;
} else {
LOG.fine("Table CXF_RM_DEST_SEQUENCES already exists.");
<BUG>verifyTable(DEST_SEQUENCES_TABLE_NAME, DEST_SEQUENCES_TABLE_COLS);
</BUG>
}
} finally {
stmt.close();
|
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
throw ex;
} else {
LOG.fine("Table CXF_RM_SRC_SEQUENCES already exists.");
verifyTable(con, SRC_SEQUENCES_TABLE_NAME, SRC_SEQUENCES_TABLE_COLS);
}
} finally {
stmt.close();
|
1,157 |
}
} finally {
stmt.close();
}
for (String tableName : new String[] {OUTBOUND_MSGS_TABLE_NAME, INBOUND_MSGS_TABLE_NAME}) {
<BUG>stmt = connection.createStatement();
try {</BUG>
stmt.executeUpdate(MessageFormat.format(CREATE_MESSAGES_TABLE_STMT, tableName));
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
|
}
} finally {
stmt.close();
}
stmt = con.createStatement();
try {
stmt.executeUpdate(CREATE_DEST_SEQUENCES_TABLE_STMT);
} catch (SQLException ex) {
if (!isTableExistsError(ex)) {
|
1,158 |
throw ex;
} else {
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Table " + tableName + " already exists.");
}
<BUG>verifyTable(tableName, MESSAGES_TABLE_COLS);
</BUG>
}
} finally {
stmt.close();
|
throw ex;
} else {
LOG.fine("Table CXF_RM_SRC_SEQUENCES already exists.");
verifyTable(con, SRC_SEQUENCES_TABLE_NAME, SRC_SEQUENCES_TABLE_COLS);
}
} finally {
stmt.close();
|
1,159 |
if (newCols.size() > 0) {
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Table " + tableName + " needs additional columns");
}
for (String[] newCol : newCols) {
<BUG>Statement st = connection.createStatement();
try {</BUG>
st.executeUpdate(MessageFormat.format(ALTER_TABLE_STMT_STR,
tableName, newCol[0], newCol[1]));
if (LOG.isLoggable(Level.FINE)) {
|
if (newCols.size() > 0) {
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Table " + tableName + " needs additional columns");
}
for (String[] newCol : newCols) {
Statement st = con.createStatement();
try {
st.executeUpdate(MessageFormat.format(ALTER_TABLE_STMT_STR,
tableName, newCol[0], newCol[1]));
if (LOG.isLoggable(Level.FINE)) {
|
1,160 |
if (nextReconnectAttempt < System.currentTimeMillis()) {
nextReconnectAttempt = System.currentTimeMillis() + reconnectDelay;
reconnectDelay = reconnectDelay * useExponentialBackOff;
}
}
<BUG>}
public static void deleteDatabaseFiles() {</BUG>
deleteDatabaseFiles(DEFAULT_DATABASE_NAME, true);
}
public static void deleteDatabaseFiles(String dbName, boolean now) {
|
if (nextReconnectAttempt < System.currentTimeMillis()) {
nextReconnectAttempt = System.currentTimeMillis() + reconnectDelay;
reconnectDelay = reconnectDelay * useExponentialBackOff;
}
}
}
}
}
public static void deleteDatabaseFiles() {
deleteDatabaseFiles(DEFAULT_DATABASE_NAME, true);
}
public static void deleteDatabaseFiles(String dbName, boolean now) {
|
1,161 |
import java.sql.Connection;
import java.sql.SQLException;
import com.jfixby.cmns.api.debug.Debug;
import com.jfixby.cmns.api.err.Err;
import com.jfixby.cmns.api.log.L;
<BUG>import com.mysql.jdbc.jdbc2.optional.MysqlDataSource;</BUG>
public class MySQLConnection {
private SQLException e;
<BUG>private final MysqlDataSource dataSource;
public MySQLConnection (final MysqlDataSource dataSource) {
this.dataSource = dataSource;
}</BUG>
public boolean open () {
|
import java.sql.Connection;
import java.sql.SQLException;
import com.jfixby.cmns.api.debug.Debug;
import com.jfixby.cmns.api.err.Err;
import com.jfixby.cmns.api.log.L;
public class MySQLConnection {
private SQLException e;
private final MySQL mySQL;
public MySQLConnection (final MySQL mySQL) {
this.mySQL = mySQL;
}
public boolean open () {
|
1,162 |
Debug.checkNull("mysql_connection", this.mysql_connection);
return this.mysql_connection;
}
@Override
public String toString () {
<BUG>return "MySQLConnection[" + this.dataSource.getUrl() + " : " + this.dataSource.getURL() + "]";
}</BUG>
@Override
protected void finalize () throws Throwable {
super.finalize();
|
Debug.checkNull("mysql_connection", this.mysql_connection);
return this.mysql_connection;
}
@Override
public String toString () {
return "MySQLConnection[" + this.mySQL.getUrl() + "]";
}
@Override
protected void finalize () throws Throwable {
super.finalize();
|
1,163 |
package com.jfixby.cmns.adopted.gdx.log;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.Gdx;
import com.jfixby.cmns.api.collections.EditableCollection;
<BUG>import com.jfixby.cmns.api.collections.Map;
import com.jfixby.cmns.api.log.LoggerComponent;</BUG>
import com.jfixby.cmns.api.util.JUtils;
public class GdxLogger implements LoggerComponent {
public GdxLogger () {
|
package com.jfixby.cmns.adopted.gdx.log;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.Gdx;
import com.jfixby.cmns.api.collections.EditableCollection;
import com.jfixby.cmns.api.collections.Map;
import com.jfixby.cmns.api.err.Err;
import com.jfixby.cmns.api.log.LoggerComponent;
import com.jfixby.cmns.api.util.JUtils;
public class GdxLogger implements LoggerComponent {
public GdxLogger () {
|
1,164 |
public static final String PackageName = "app.version.package_name";
}
public static final String VERSION_FILE_NAME = "version.json";
private static final long serialVersionUID = 6662721574596241247L;
public String packageName;
<BUG>public int major = -1;
public int minor = -1;
public VERSION_STAGE stage = null;
public int build = -1;
</BUG>
public int versionCode = -1;
|
public static final String PackageName = "app.version.package_name";
}
public static final String VERSION_FILE_NAME = "version.json";
private static final long serialVersionUID = 6662721574596241247L;
public String packageName;
public String major = "";
public String minor = "";
public String build = "";
public int versionCode = -1;
|
1,165 |
package com.jfixby.red.collections;
<BUG>import com.jfixby.cmns.api.java.IntValue;
import com.jfixby.cmns.api.math.FloatMath;</BUG>
public class RedHistogrammValue {
private RedHistogramm master;
public RedHistogrammValue (RedHistogramm redHistogramm) {
|
package com.jfixby.red.collections;
import com.jfixby.cmns.api.java.Int;
import com.jfixby.cmns.api.math.FloatMath;
public class RedHistogrammValue {
private RedHistogramm master;
public RedHistogrammValue (RedHistogramm redHistogramm) {
|
1,166 |
<BUG>package com.jfixby.cmns.db.mysql;
import com.jfixby.cmns.api.debug.Debug;</BUG>
import com.jfixby.cmns.api.log.L;
import com.jfixby.cmns.db.api.DBComponent;
import com.mysql.jdbc.jdbc2.optional.MysqlDataSource;
|
package com.jfixby.cmns.db.mysql;
import java.sql.Connection;
import java.sql.SQLException;
import com.jfixby.cmns.api.debug.Debug;
import com.jfixby.cmns.api.log.L;
import com.jfixby.cmns.db.api.DBComponent;
import com.mysql.jdbc.jdbc2.optional.MysqlDataSource;
|
1,167 |
}
public MySQLTable getTable (final String name) {
return new MySQLTable(this, name);
}
public MySQLConnection obtainConnection () {
<BUG>final MySQLConnection connection = new MySQLConnection(this.dataSource);
connection.open();</BUG>
return connection;
}
public void releaseConnection (final MySQLConnection connection) {
|
}
public String getDBName () {
return this.dbName;
}
public MySQLTable getTable (final String name) {
return new MySQLTable(this, name);
}
public MySQLConnection obtainConnection () {
final MySQLConnection connection = new MySQLConnection(this);
connection.open();
return connection;
}
public void releaseConnection (final MySQLConnection connection) {
|
1,168 |
package org.derive4j.exemple;
import org.derive4j.Data;
<BUG>@Data
public abstract class Event<T> {</BUG>
interface Cases<T, R> {
R itemAdded(String itemName);
R itemRemoved(T ref, String itemName);
|
package org.derive4j.exemple;
import org.derive4j.Data;
import org.derive4j.Flavour;
@Data(flavour = Flavour.Javaslang)
public abstract class Event<T> {
interface Cases<T, R> {
R itemAdded(String itemName);
R itemRemoved(T ref, String itemName);
|
1,169 |
import javax.lang.model.util.Elements;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.derive4j.processor.derivator.FlavourImpl.EitherType.eitherType;
<BUG>import static org.derive4j.processor.derivator.FlavourImpl.OptionType.optionTye;
</BUG>
public final class FlavourImpl {
public static TypeElement findF0(Flavour flavour, Elements elements) {
return elements.getTypeElement(
|
import javax.lang.model.util.Elements;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.derive4j.processor.derivator.FlavourImpl.EitherType.eitherType;
import static org.derive4j.processor.derivator.FlavourImpl.OptionType.optionType;
public final class FlavourImpl {
public static TypeElement findF0(Flavour flavour, Elements elements) {
return elements.getTypeElement(
|
1,170 |
return elements.getTypeElement(
Flavours.cases()
.Jdk(() -> Supplier.class.getName())
.Fj(() -> "fj.F0")
.Fugue(() -> Supplier.class.getName())
<BUG>.Fugue2(() -> "com.google.common.base.Supplier")
.apply(flavour)</BUG>
);
}
public static TypeElement findF(Flavour flavour, Elements elements) {
|
return elements.getTypeElement(
Flavours.cases()
.Jdk(() -> Supplier.class.getName())
.Fj(() -> "fj.F0")
.Fugue(() -> Supplier.class.getName())
.Fugue2(() -> "com.google.common.base.Supplier")
.Javaslang(() -> Supplier.class.getName())
.apply(flavour)
);
}
public static TypeElement findF(Flavour flavour, Elements elements) {
|
1,171 |
public static Optional<EitherType> findEitherType(Flavour flavour, Elements elements) {
return Flavours.cases()
.Jdk(() -> Optional.<EitherType>empty())
.Fj(() -> Optional.of(eitherType(elements.getTypeElement("fj.data.Either"), "left", "right")))
.Fugue(() -> Optional.of(eitherType(elements.getTypeElement("io.atlassian.fugue.Either"), "left", "right")))
<BUG>.Fugue2(() -> Optional.of(eitherType(elements.getTypeElement("com.atlassian.fugue.Either"), "left", "right")))
.apply(flavour);</BUG>
}
public static abstract class OptionType {
<BUG>public static OptionType optionTye(TypeElement typeElement, String noneConstructor, String someConstructor) {
</BUG>
return new OptionType() {
|
public static Optional<EitherType> findEitherType(Flavour flavour, Elements elements) {
return Flavours.cases()
.Jdk(() -> Optional.<EitherType>empty())
.Fj(() -> Optional.of(eitherType(elements.getTypeElement("fj.data.Either"), "left", "right")))
.Fugue(() -> Optional.of(eitherType(elements.getTypeElement("io.atlassian.fugue.Either"), "left", "right")))
.Fugue2(() -> Optional.of(eitherType(elements.getTypeElement("com.atlassian.fugue.Either"), "left", "right")))
.Javaslang(() -> Optional.of(eitherType(elements.getTypeElement("javaslang.control.Either"), "left", "right")))
.apply(flavour);
}
public static abstract class OptionType {
public static OptionType optionType(TypeElement typeElement, String noneConstructor, String someConstructor) {
return new OptionType() {
|
1,172 |
<BUG>package org.jgroups.protocols;
import org.jgroups.Address;</BUG>
import org.jgroups.Event;
import org.jgroups.Message;
import org.jgroups.PhysicalAddress;
|
package org.jgroups.protocols;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.jgroups.Address;
import org.jgroups.Event;
import org.jgroups.Message;
import org.jgroups.PhysicalAddress;
|
1,173 |
this.initial_hosts = initial_hosts ;
}
public List<InetSocketAddress> getInitialHosts() {
return initial_hosts;
}
<BUG>List<InetSocketAddress> initial_hosts=null; // (list of IpAddresses) hosts to be contacted for the initial membership
final List<RouterStub> stubs=new ArrayList<RouterStub>();
Future<?> reconnect_future=null;
Future<?> connection_checker=null;
protected volatile boolean running=true;</BUG>
public void init() throws Exception {
|
this.initial_hosts = initial_hosts ;
}
public List<InetSocketAddress> getInitialHosts() {
return initial_hosts;
}
List<InetSocketAddress> initial_hosts=null; // (list of IpAddresses) hosts to be contacted for the initial membership
private volatile RouterStubManager stubManager;
|
1,174 |
}
}
stopReconnector();</BUG>
}
<BUG>public void destroy() {
for (RouterStub stub : stubs) {
stub.destroy();
}</BUG>
super.destroy();
}
|
}
public void start() throws Exception {
super.start();
}
public void stop() {
super.stop();
stubManager.disconnectStubs();
}
public void destroy() {
stubManager.destroyStubs();
super.destroy();
}
|
1,175 |
return;
}
if (log.isTraceEnabled())
log.trace("fetching members from GossipRouter(s)");
final List<PingData> responses = new LinkedList<PingData>();
<BUG>for (RouterStub stub : stubs) {
</BUG>
try {
List<PingData> rsps = stub.getMembers(group_addr);
responses.addAll(rsps);
|
return;
}
if (log.isTraceEnabled())
log.trace("fetching members from GossipRouter(s)");
final List<PingData> responses = new LinkedList<PingData>();
for (RouterStub stub : stubManager.getStubs()) {
try {
List<PingData> rsps = stub.getMembers(group_addr);
responses.addAll(rsps);
|
1,176 |
String logical_name=org.jgroups.util.UUID.get(logical_addr);
PhysicalAddress physical_addr=(PhysicalAddress)down_prot.down(new Event(Event.GET_PHYSICAL_ADDRESS, local_addr));
List<PhysicalAddress> physical_addrs=physical_addr != null? new ArrayList<PhysicalAddress>() : null;
if(physical_addr != null)
physical_addrs.add(physical_addr);
<BUG>int num_faulty_conns=0;
for (RouterStub stub : stubs) {
</BUG>
try {
if(log.isTraceEnabled())
|
String logical_name=org.jgroups.util.UUID.get(logical_addr);
PhysicalAddress physical_addr=(PhysicalAddress)down_prot.down(new Event(Event.GET_PHYSICAL_ADDRESS, local_addr));
List<PhysicalAddress> physical_addrs=physical_addr != null? new ArrayList<PhysicalAddress>() : null;
if(physical_addr != null)
physical_addrs.add(physical_addr);
for (RouterStub stub : stubManager.getStubs()) {
try {
if(log.isTraceEnabled())
|
1,177 |
package com.easytoolsoft.easyreport.web.controller.common;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/error")
<BUG>public class ErrorController extends AbstractController {
@RequestMapping(value = {"/404"})</BUG>
public String error404() {
return "/error/404";
}
|
package com.easytoolsoft.easyreport.web.controller.common;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/error")
public class ErrorController {
@RequestMapping(value = {"/404"})
public String error404() {
return "/error/404";
}
|
1,178 |
package com.easytoolsoft.easyreport.web.controller.membership;
<BUG>import com.easytoolsoft.easyreport.web.controller.common.AbstractController;</BUG>
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/membership")
<BUG>public class LoginController extends AbstractController {
@RequestMapping(value = {"/login"})</BUG>
public String login() {
|
package com.easytoolsoft.easyreport.web.controller.membership;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/membership")
public class LoginController {
@RequestMapping(value = {"/login"})
public String login() {
|
1,179 |
package com.easytoolsoft.easyreport.metadata.service.impl;
import com.alibaba.fastjson.JSON;
<BUG>import com.easytoolsoft.easyreport.engine.query.QueryerFactory;</BUG>
import com.easytoolsoft.easyreport.data.common.helper.PageInfo;
import com.easytoolsoft.easyreport.data.common.helper.ParameterBuilder;
import com.easytoolsoft.easyreport.data.common.service.AbstractCrudService;
<BUG>import com.easytoolsoft.easyreport.engine.data.ReportDataSource;
import com.easytoolsoft.easyreport.engine.data.ReportMetaDataColumn;
import com.easytoolsoft.easyreport.engine.data.ReportQueryParamItem;</BUG>
import com.easytoolsoft.easyreport.data.metadata.dao.IReportDao;
|
package com.easytoolsoft.easyreport.metadata.service.impl;
import com.alibaba.fastjson.JSON;
import com.easytoolsoft.easyreport.data.common.helper.PageInfo;
import com.easytoolsoft.easyreport.data.common.helper.ParameterBuilder;
import com.easytoolsoft.easyreport.data.common.service.AbstractCrudService;
import com.easytoolsoft.easyreport.data.metadata.dao.IReportDao;
import com.easytoolsoft.easyreport.data.metadata.po.Category;
import com.easytoolsoft.easyreport.data.metadata.po.DataSource;
import com.easytoolsoft.easyreport.data.metadata.po.Report;
import com.easytoolsoft.easyreport.data.metadata.po.ReportOptions;
import com.easytoolsoft.easyreport.engine.data.ReportDataSource;
|
1,180 |
package com.easytoolsoft.easyreport.web.controller;
<BUG>import com.easytoolsoft.easyreport.web.controller.common.AbstractController;</BUG>
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/home")
<BUG>public class HomeController extends AbstractController {
@RequestMapping(value = {"", "/", "/index"})</BUG>
public String index() {
|
package com.easytoolsoft.easyreport.web.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "/home")
public class HomeController {
@RequestMapping(value = {"", "/", "/index"})
public String index() {
|
1,181 |
} else {
updateMemo();
callback.updateMemo();
}
dismiss();
<BUG>}else{
</BUG>
Toast.makeText(getActivity(), getString(R.string.toast_memo_empty), Toast.LENGTH_SHORT).show();
}
}
|
} else {
updateMemo();
callback.updateMemo();
}
dismiss();
} else {
Toast.makeText(getActivity(), getString(R.string.toast_memo_empty), Toast.LENGTH_SHORT).show();
}
}
|
1,182 |
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_memo);
<BUG>ChinaPhoneHelper.setStatusBar(this,true);
</BUG>
topicId = getIntent().getLongExtra("topicId", -1);
if (topicId == -1) {
finish();
|
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_memo);
ChinaPhoneHelper.setStatusBar(this, true);
topicId = getIntent().getLongExtra("topicId", -1);
if (topicId == -1) {
finish();
|
1,183 |
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.kiminonawa.mydiary.R;
import com.kiminonawa.mydiary.db.DBManager;
import com.kiminonawa.mydiary.shared.EditMode;
<BUG>import com.kiminonawa.mydiary.shared.ThemeManager;
import java.util.List;
public class MemoAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> implements EditMode {
</BUG>
private List<MemoEntity> memoList;
|
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.kiminonawa.mydiary.R;
import com.kiminonawa.mydiary.db.DBManager;
import com.kiminonawa.mydiary.shared.EditMode;
import com.kiminonawa.mydiary.shared.ThemeManager;
import com.marshalchen.ultimaterecyclerview.dragsortadapter.DragSortAdapter;
import java.util.List;
public class MemoAdapter extends DragSortAdapter<DragSortAdapter.ViewHolder> implements EditMode {
private List<MemoEntity> memoList;
|
1,184 |
private DBManager dbManager;
private boolean isEditMode = false;
private EditMemoDialogFragment.MemoCallback callback;
private static final int TYPE_HEADER = 0;
private static final int TYPE_ITEM = 1;
<BUG>public MemoAdapter(FragmentActivity activity, long topicId, List<MemoEntity> memoList, DBManager dbManager, EditMemoDialogFragment.MemoCallback callback) {
this.mActivity = activity;</BUG>
this.topicId = topicId;
this.memoList = memoList;
|
private DBManager dbManager;
private boolean isEditMode = false;
private EditMemoDialogFragment.MemoCallback callback;
private static final int TYPE_HEADER = 0;
private static final int TYPE_ITEM = 1;
public MemoAdapter(FragmentActivity activity, long topicId, List<MemoEntity> memoList, DBManager dbManager, EditMemoDialogFragment.MemoCallback callback, RecyclerView recyclerView) {
super(recyclerView);
this.mActivity = activity;
this.topicId = topicId;
this.memoList = memoList;
|
1,185 |
this.memoList = memoList;
this.dbManager = dbManager;
this.callback = callback;
}
@Override
<BUG>public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
</BUG>
View view;
if (isEditMode) {
if (viewType == TYPE_HEADER) {
|
this.memoList = memoList;
this.dbManager = dbManager;
this.callback = callback;
}
@Override
public DragSortAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view;
if (isEditMode) {
if (viewType == TYPE_HEADER) {
|
1,186 |
editMemoDialogFragment.show(mActivity.getSupportFragmentManager(), "editMemoDialogFragment");
}
});
}
}
<BUG>protected class MemoViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private View rootView;
private TextView TV_memo_item_content;</BUG>
private ImageView IV_memo_item_delete;
|
editMemoDialogFragment.show(mActivity.getSupportFragmentManager(), "editMemoDialogFragment");
}
});
}
}
protected class MemoViewHolder extends DragSortAdapter.ViewHolder implements View.OnClickListener, View.OnLongClickListener {
private View rootView;
private ImageView IV_memo_item_dot;
private TextView TV_memo_item_content;
private ImageView IV_memo_item_delete;
|
1,187 |
MemoEntry.COLUMN_REF_TOPIC__ID + " = ?"
, new String[]{String.valueOf(topicId)});
}
public Cursor selectMemo(long topicId) {
Cursor c = db.query(MemoEntry.TABLE_NAME, null, MemoEntry.COLUMN_REF_TOPIC__ID + " = ?", new String[]{String.valueOf(topicId)}, null, null,
<BUG>MemoEntry._ID + " DESC", null);
</BUG>
if (c != null) {
c.moveToFirst();
}
|
MemoEntry.COLUMN_REF_TOPIC__ID + " = ?"
, new String[]{String.valueOf(topicId)});
}
public Cursor selectMemo(long topicId) {
Cursor c = db.query(MemoEntry.TABLE_NAME, null, MemoEntry.COLUMN_REF_TOPIC__ID + " = ?", new String[]{String.valueOf(topicId)}, null, null,
MemoEntry.COLUMN_ORDER + " ASC", null);
if (c != null) {
c.moveToFirst();
}
|
1,188 |
MemoEntry._ID + " = ?",
new String[]{String.valueOf(memoId)});
}
public long updateMemoContent(long memoId, String memoContent) {
ContentValues values = new ContentValues();
<BUG>values.put(MemoEntry.COLUMN_CONTENT, memoContent);
return db.update(</BUG>
MemoEntry.TABLE_NAME,
values,
MemoEntry._ID + " = ?",
|
MemoEntry._ID + " = ?",
new String[]{String.valueOf(memoId)});
}
public long updateMemoContent(long memoId, String memoContent) {
ContentValues values = new ContentValues();
values.put(MemoEntry.COLUMN_CONTENT, memoContent);
return db.update(
MemoEntry.TABLE_NAME,
values,
MemoEntry._ID + " = ?",
|
1,189 |
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
public class DependencyConvergenceReport
extends AbstractProjectInfoReport
<BUG>{
private List reactorProjects;
private static final int PERCENTAGE = 100;</BUG>
public String getOutputName()
{
|
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
public class DependencyConvergenceReport
extends AbstractProjectInfoReport
{
private static final int PERCENTAGE = 100;
private static final List SUPPORTED_FONT_FAMILY_NAMES = Arrays.asList( GraphicsEnvironment
.getLocalGraphicsEnvironment().getAvailableFontFamilyNames() );
private List reactorProjects;
public String getOutputName()
{
|
1,190 |
sink.section1();
sink.sectionTitle1();
sink.text( getI18nString( locale, "title" ) );
sink.sectionTitle1_();
Map dependencyMap = getDependencyMap();
<BUG>generateLegend( locale, sink );
generateStats( locale, sink, dependencyMap );
generateConvergence( locale, sink, dependencyMap );
sink.section1_();</BUG>
sink.body_();
|
sink.section1();
sink.sectionTitle1();
sink.text( getI18nString( locale, "title" ) );
sink.sectionTitle1_();
Map dependencyMap = getDependencyMap();
generateLegend( locale, sink );
sink.lineBreak();
generateStats( locale, sink, dependencyMap );
sink.section1_();
generateConvergence( locale, sink, dependencyMap );
sink.body_();
|
1,191 |
Iterator it = artifactMap.keySet().iterator();
while ( it.hasNext() )
{
String version = (String) it.next();
sink.tableRow();
<BUG>sink.tableCell();
sink.text( version );</BUG>
sink.tableCell_();
sink.tableCell();
generateVersionDetails( sink, artifactMap, version );
|
Iterator it = artifactMap.keySet().iterator();
while ( it.hasNext() )
{
String version = (String) it.next();
sink.tableRow();
sink.tableCell( String.valueOf( cellWidth ) + "px" );
sink.text( version );
sink.tableCell_();
sink.tableCell();
generateVersionDetails( sink, artifactMap, version );
|
1,192 |
return uniqueArtifactMap;
}
private void generateLegend( Locale locale, Sink sink )
{
sink.table();
<BUG>sink.tableCaption();
sink.text( getI18nString( locale, "legend" ) + ":" );
sink.tableCaption_();</BUG>
sink.tableRow();
<BUG>sink.tableCell();
iconSuccess( sink );</BUG>
sink.tableCell_();
|
return uniqueArtifactMap;
}
private void generateLegend( Locale locale, Sink sink )
{
sink.table();
sink.tableCaption();
sink.bold();
sink.text( getI18nString( locale, "legend" ) );
sink.bold_();
sink.tableCaption_();
sink.tableRow();
sink.tableCell( "15px" ); // according /images/icon_success_sml.gif
iconSuccess( sink );
sink.tableCell_();
|
1,193 |
sink.tableCell();
sink.text( getI18nString( locale, "legend.shared" ) );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
<BUG>sink.tableCell();
iconError( sink );</BUG>
sink.tableCell_();
sink.tableCell();
sink.text( getI18nString( locale, "legend.different" ) );
|
sink.tableCell();
sink.text( getI18nString( locale, "legend.shared" ) );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableCell( "15px" ); // according /images/icon_error_sml.gif
iconError( sink );
sink.tableCell_();
sink.tableCell();
sink.text( getI18nString( locale, "legend.different" ) );
|
1,194 |
sink.tableCaption();
sink.text( getI18nString( locale, "stats.caption" ) );
sink.tableCaption_();</BUG>
sink.tableRow();
<BUG>sink.tableHeaderCell();
sink.text( getI18nString( locale, "stats.subprojects" ) + ":" );
sink.tableHeaderCell_();</BUG>
sink.tableCell();
sink.text( String.valueOf( reactorProjects.size() ) );
sink.tableCell_();
|
sink.tableCaption();
sink.bold();
sink.text( getI18nString( locale, "stats.caption" ) );
sink.bold_();
sink.tableCaption_();
sink.tableRow();
sink.tableHeaderCell( headerCellWidth );
sink.text( getI18nString( locale, "stats.subprojects" ) );
sink.tableHeaderCell_();
sink.tableCell();
sink.text( String.valueOf( reactorProjects.size() ) );
sink.tableCell_();
|
1,195 |
sink.tableCell();
sink.text( String.valueOf( reactorProjects.size() ) );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
<BUG>sink.tableHeaderCell();
sink.text( getI18nString( locale, "stats.dependencies" ) + ":" );
sink.tableHeaderCell_();</BUG>
sink.tableCell();
sink.text( String.valueOf( depCount ) );
|
sink.tableCell();
sink.text( String.valueOf( reactorProjects.size() ) );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerCellWidth );
sink.text( getI18nString( locale, "stats.dependencies" ) );
sink.tableHeaderCell_();
sink.tableCell();
sink.text( String.valueOf( depCount ) );
|
1,196 |
sink.text( String.valueOf( convergence ) + "%" );
sink.bold_();
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
<BUG>sink.tableHeaderCell();
sink.text( getI18nString( locale, "stats.readyrelease" ) + ":" );
sink.tableHeaderCell_();</BUG>
sink.tableCell();
if ( convergence >= PERCENTAGE && snapshotCount <= 0 )
|
sink.text( String.valueOf( convergence ) + "%" );
sink.bold_();
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerCellWidth );
sink.text( getI18nString( locale, "stats.readyrelease" ) );
sink.tableHeaderCell_();
sink.tableCell();
if ( convergence >= PERCENTAGE && snapshotCount <= 0 )
|
1,197 |
{
ReverseDependencyLink p1 = (ReverseDependencyLink) o1;
ReverseDependencyLink p2 = (ReverseDependencyLink) o2;
return p1.getProject().getId().compareTo( p2.getProject().getId() );
}
<BUG>else
{</BUG>
return 0;
}
}
|
{
iconError( sink );
}
else
{
|
1,198 |
@Override
public void afterExecute(Bus handler, SingleResponse<Relationship> result) {
if (result.hasData()) {
handler.post(new FriendshipUpdatedEvent(accountKey, userKey, result.getData()));
} else if (result.hasException()) {
<BUG>if (BuildConfig.DEBUG) {
Log.w(LOGTAG, "Unable to update friendship", result.getException());
}</BUG>
}
|
@Override
public UserKey[] getAccountKeys() {
return DataStoreUtils.getActivatedAccountKeys(context);
|
1,199 |
MicroBlog microBlog = MicroBlogAPIFactory.getInstance(context, accountId);
if (!Utils.isOfficialCredentials(context, accountId)) continue;
try {
microBlog.setActivitiesAboutMeUnread(cursor);
} catch (MicroBlogException e) {
<BUG>if (BuildConfig.DEBUG) {
Log.w(LOGTAG, e);
}</BUG>
}
|
MicroBlog microBlog = MicroBlogAPIFactory.getInstance(context, accountId);
if (!Utils.isOfficialCredentials(context, accountId)) continue;
try {
microBlog.setActivitiesAboutMeUnread(cursor);
} catch (MicroBlogException e) {
DebugLog.w(LOGTAG, null, e);
|
1,200 |
for (Location location : twitter.getAvailableTrends()) {
map.put(location);
}
return map.pack();
} catch (final MicroBlogException e) {
<BUG>if (BuildConfig.DEBUG) {
Log.w(LOGTAG, e);
}</BUG>
}
|
for (Location location : twitter.getAvailableTrends()) {
map.put(location);
}
return map.pack();
} catch (final MicroBlogException e) {
DebugLog.w(LOGTAG, null, e);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.