id
int64 22
34.9k
| comment_id
int64 0
328
| comment
stringlengths 2
2.55k
| code
stringlengths 31
107k
| classification
stringclasses 6
values | isFinished
bool 1
class | code_context_2
stringlengths 21
27.3k
| code_context_10
stringlengths 29
27.3k
| code_context_20
stringlengths 29
27.3k
|
---|---|---|---|---|---|---|---|---|
654 | 0 | /**
* Prompt to insert an Operator Card. Returns once the card is inserted.
*
* @param message Message to display to the user
*/ | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | NONSATD | true | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} |
654 | 1 | // TODO: Wait on the Operator Card instead | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | IMPLEMENTATION | true | baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} | public void promptForOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Insert Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card instead
framebuffer.pressEnter();
} |
653 | 0 | // TODO: don't count entering the first Five-Way | public PlayerPathData populateStats() {
this.playerEntity = strongholdPath.getPlayerEntity();
StrongholdGenerator.Start start = this.strongholdPath.getStart();
StrongholdTreeAccessor treeAccessor = (StrongholdTreeAccessor) start;
List<StrongholdPathEntry> history = this.strongholdPath.getHistory();
ArrayList<StructurePiece> solution = new ArrayList<>();
StrongholdGenerator.Piece current = this.strongholdPath.getHistory().get(strongholdPath.getHistory().size() - 1).getCurrentPiece();
while (current != null) {
solution.add(current);
current = (StrongholdGenerator.Piece) treeAccessor.getParents().get(current);
}
List<StrongholdPathEntry> validEntries = history.stream()
.filter(entry -> validateEntryForLoss(strongholdPath, strongholdPath.getNextEntry(entry)))
.filter(entry -> !solution.contains(strongholdPath.getNextEntry(entry).getCurrentPiece()) && solution.contains(entry.getCurrentPiece()))
.collect(Collectors.toList());
List<Pair<StrongholdPathEntry, Double>> losses = new ArrayList<>();
validEntries.forEach(strongholdPathEntry -> losses.add(new Pair<>(strongholdPathEntry, loss(strongholdPath, strongholdPath.getNextEntry(strongholdPathEntry), solution))));
this.inaccuracies = losses.stream().filter(pair -> pair.getRight() >= INACCURACY_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
this.mistakes = losses.stream().filter(pair -> pair.getRight() >= MISTAKE_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
this.blunders = losses.stream().filter(pair -> pair.getRight() >= BLUNDER_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
inaccuracies.removeAll(this.mistakes);
mistakes.removeAll(this.blunders);
ArrayList<Pair<StrongholdGenerator.Piece, Integer>> rooms = new ArrayList<>();
history.forEach(pathEntry -> {
Pair<StrongholdGenerator.Piece, Integer> pair = new Pair<>(pathEntry.getCurrentPiece(), pathEntry.getTicksSpentInPiece().get());
rooms.add(pair);
});
return new PlayerPathData(
rooms,
strongholdPath.getTotalTime(),
computeDifficulty(solution),
history.stream()
.filter(pathEntry -> !solution.contains(pathEntry.getCurrentPiece()))
.map(StrongholdPathEntry::getTicksSpentInPiece)
.mapToInt(AtomicInteger::get)
.sum(),
// TODO: don't count entering the first Five-Way
(int) history.stream()
.map(strongholdPathEntry -> strongholdPath.getNextEntry(strongholdPathEntry))
.filter(Objects::nonNull)
.map(StrongholdPathEntry::getCurrentPiece)
.filter(solution::contains)
.count(),
this.inaccuracies.size(),
this.mistakes.size(),
this.blunders.size(),
(int) history.stream()
.filter(entry -> !(entry.getCurrentPiece() instanceof StrongholdGenerator.PortalRoom))
.filter(entry -> !areAdjacent(entry.getCurrentPiece(), strongholdPath.getNextEntry(entry).getCurrentPiece(), treeAccessor))
.count(),
history.size() - 1,
history.stream()
.filter(entry -> FEINBERG_AVG_ROOM_TIMES.containsKey(entry.getCurrentPiece().getClass()))
.mapToInt(value -> value.getTicksSpentInPiece().get() - FEINBERG_AVG_ROOM_TIMES.get(value.getCurrentPiece().getClass()))
.sum()
);
} | IMPLEMENTATION | true | .mapToInt(AtomicInteger::get)
.sum(),
// TODO: don't count entering the first Five-Way
(int) history.stream()
.map(strongholdPathEntry -> strongholdPath.getNextEntry(strongholdPathEntry)) | });
return new PlayerPathData(
rooms,
strongholdPath.getTotalTime(),
computeDifficulty(solution),
history.stream()
.filter(pathEntry -> !solution.contains(pathEntry.getCurrentPiece()))
.map(StrongholdPathEntry::getTicksSpentInPiece)
.mapToInt(AtomicInteger::get)
.sum(),
// TODO: don't count entering the first Five-Way
(int) history.stream()
.map(strongholdPathEntry -> strongholdPath.getNextEntry(strongholdPathEntry))
.filter(Objects::nonNull)
.map(StrongholdPathEntry::getCurrentPiece)
.filter(solution::contains)
.count(),
this.inaccuracies.size(),
this.mistakes.size(),
this.blunders.size(),
(int) history.stream() | validEntries.forEach(strongholdPathEntry -> losses.add(new Pair<>(strongholdPathEntry, loss(strongholdPath, strongholdPath.getNextEntry(strongholdPathEntry), solution))));
this.inaccuracies = losses.stream().filter(pair -> pair.getRight() >= INACCURACY_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
this.mistakes = losses.stream().filter(pair -> pair.getRight() >= MISTAKE_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
this.blunders = losses.stream().filter(pair -> pair.getRight() >= BLUNDER_THRESHOLD).map(Pair::getLeft).map(StrongholdPathEntry::getCurrentPiece).collect(Collectors.toList());
inaccuracies.removeAll(this.mistakes);
mistakes.removeAll(this.blunders);
ArrayList<Pair<StrongholdGenerator.Piece, Integer>> rooms = new ArrayList<>();
history.forEach(pathEntry -> {
Pair<StrongholdGenerator.Piece, Integer> pair = new Pair<>(pathEntry.getCurrentPiece(), pathEntry.getTicksSpentInPiece().get());
rooms.add(pair);
});
return new PlayerPathData(
rooms,
strongholdPath.getTotalTime(),
computeDifficulty(solution),
history.stream()
.filter(pathEntry -> !solution.contains(pathEntry.getCurrentPiece()))
.map(StrongholdPathEntry::getTicksSpentInPiece)
.mapToInt(AtomicInteger::get)
.sum(),
// TODO: don't count entering the first Five-Way
(int) history.stream()
.map(strongholdPathEntry -> strongholdPath.getNextEntry(strongholdPathEntry))
.filter(Objects::nonNull)
.map(StrongholdPathEntry::getCurrentPiece)
.filter(solution::contains)
.count(),
this.inaccuracies.size(),
this.mistakes.size(),
this.blunders.size(),
(int) history.stream()
.filter(entry -> !(entry.getCurrentPiece() instanceof StrongholdGenerator.PortalRoom))
.filter(entry -> !areAdjacent(entry.getCurrentPiece(), strongholdPath.getNextEntry(entry).getCurrentPiece(), treeAccessor))
.count(),
history.size() - 1,
history.stream()
.filter(entry -> FEINBERG_AVG_ROOM_TIMES.containsKey(entry.getCurrentPiece().getClass()))
.mapToInt(value -> value.getTicksSpentInPiece().get() - FEINBERG_AVG_ROOM_TIMES.get(value.getCurrentPiece().getClass()))
.sum()
);
} |
656 | 0 | /**
* Prompt to remove an Operator Card. Returns once the card is removed.
*
* @param message Message to display to the user
*/ | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} | NONSATD | true | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} |
656 | 1 | // TODO: Wait on the Operator Card to be removed | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} | IMPLEMENTATION | true | baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading(); | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} | public void removeOperatorCard(String message) throws IOException {
framebuffer.draw((Graphics2D graphics) ->
baseScreenLayout(graphics,"Remove Operator Card", message, Color.white)
);
// TODO: Wait on the Operator Card to be removed
framebuffer.pressEnter();
renderLoading();
} |
8,849 | 0 | /**
* Perform's size 2 calculations.
*/ | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | NONSATD | true | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} |
8,849 | 1 | // TODO optimized algorhytm's. | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | DESIGN | true | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} | private static boolean performCalculationS2(int srcX, int srcY, RouteStrategy strategy) {
return performCalculationSX(srcX, srcY, 2, strategy); // TODO optimized algorhytm's.
} |
25,235 | 0 | // @Issue("SECURITY-580") | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | NONSATD | true | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} |
25,235 | 1 | // These go through a different code path: | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | NONSATD | true | assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList"); | public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x"); | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} |
25,235 | 2 | /* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/ | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | DESIGN | true | assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x"); | assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} |
25,235 | 3 | /* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/ | @Test
public void positionalConstructors() throws Exception {
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "['true'] as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "['true'] as Boolean");
String cc = "staticMethod org.kohsuke.groovy.sandbox.impl.Checker checkedCast java.lang.Class java.lang.Object boolean boolean boolean";
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "Boolean x = ['true']; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | IMPLEMENTATION | true | assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} | assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "Boolean x = ['true']; x");
assertRejected(new ProxyWhitelist(), "new java.util.TreeMap java.util.Map", "[k: 1] as TreeMap");
assertEvaluate(new StaticWhitelist("new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "[k: 1] as TreeMap");
assertRejected(new StaticWhitelist(cc), "new java.util.TreeMap java.util.Map", "TreeMap x = [k: 1]; x");
assertEvaluate(new StaticWhitelist(cc, "new java.util.TreeMap java.util.Map"), Collections.singletonMap("k", 1), "TreeMap x = [k: 1]; x");
// These go through a different code path:
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "[1] as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "['v'] as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList(1), "LinkedList x = [1]; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "LinkedList x = ['v']; x");
assertEvaluate(new ProxyWhitelist(), Arrays.asList(1), "int[] a = [1]; a as LinkedList");
assertEvaluate(new ProxyWhitelist(), Arrays.asList("v"), "String[] a = ['v']; a as LinkedList");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
assertEvaluate(new StaticWhitelist(cc), Arrays.asList("v"), "String[] a = ['v']; LinkedList x = a; x");
/* TODO casting arrays is not yet supported:
assertRejected(new StaticWhitelist(cc), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; Boolean x = a; x");
assertEvaluate(new StaticWhitelist(cc, "new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; Boolean x = a; x");
assertRejected(new ProxyWhitelist(), "new java.lang.Boolean java.lang.String", "String[] a = ['true']; a as Boolean");
assertEvaluate(new StaticWhitelist("new java.lang.Boolean java.lang.String"), true, "String[] a = ['true']; a as Boolean");
*/
/* TODO tuple assignment is not yet supported:
assertRejected(new ProxyWhitelist(), "new java.util.LinkedList java.util.Collection", "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
assertEvaluate(new StaticWhitelist("new java.util.LinkedList java.util.Collection"), Arrays.asList("v"), "String[] a = ['v']; def (LinkedList x, int y) = [a, 1]; x");
*/
} |
8,858 | 0 | // arbitrary for short text | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | NONSATD | true | = new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight(); | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0); |
8,858 | 1 | // "horizontal" position in rotated world | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | NONSATD | true | float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0)); | {
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs); | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} |
8,858 | 2 | // 4 is a trial-and-error thing, this lowers the text a bit | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | NONSATD | true | float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight); | try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255. | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} |
8,858 | 3 | // cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | NONSATD | true | cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f); | float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0); | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} |
8,858 | 4 | // some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f | private static void addWatermarkText(PDDocument doc, PDPage page, PDFont font, String text)
throws IOException
{
try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | DEFECT | true | gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0); | float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} | try (PDPageContentStream cs
= new PDPageContentStream(doc, page, PDPageContentStream.AppendMode.APPEND, true, true))
{
float fontHeight = 100; // arbitrary for short text
float width = page.getMediaBox().getWidth();
float height = page.getMediaBox().getHeight();
float stringWidth = font.getStringWidth(text) / 1000 * fontHeight;
float diagonalLength = (float) Math.sqrt(width * width + height * height);
float angle = (float) Math.atan2(height, width);
float x = (diagonalLength - stringWidth) / 2; // "horizontal" position in rotated world
float y = -fontHeight / 4; // 4 is a trial-and-error thing, this lowers the text a bit
cs.transform(Matrix.getRotateInstance(angle, 0, 0));
cs.setFont(font, fontHeight);
// cs.setRenderingMode(RenderingMode.STROKE) // for "hollow" effect
PDExtendedGraphicsState gs = new PDExtendedGraphicsState();
gs.setNonStrokingAlphaConstant(0.2f);
gs.setStrokingAlphaConstant(0.2f);
gs.setBlendMode(BlendMode.MULTIPLY);
gs.setLineWidth(3f);
cs.setGraphicsStateParameters(gs);
// some API weirdness here. When int, range is 0..255.
// when float, this would be 0..1f
cs.setNonStrokingColor(255, 0, 0);
cs.setStrokingColor(255, 0, 0);
cs.beginText();
cs.newLineAtOffset(x, y);
cs.showText(text);
cs.endText();
}
} |
33,435 | 0 | // "_all" : {"enabled" : true} | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false)); | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model) | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) { |
33,435 | 1 | // Map each domain properties in supported format, or object for complex type | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping? | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) { | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) { |
33,435 | 2 | // Does it have custom mapping? | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | DESIGN | true | // Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName(); | public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
} | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) { |
33,435 | 3 | // Add the custom mapping (searchable static property in domain model) | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) { | // "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) { | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date"; |
33,435 | 4 | // Handle embedded persistent collections, ie List<String> listOfThings | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | }
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) { | String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
} | String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double"; |
33,435 | 5 | // Handle arrays | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH); | LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) { | }
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) { |
33,435 | 6 | // Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries. | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) { | } else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) { | if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) { |
33,435 | 7 | // todo should this be string?? | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | DESIGN | true | propType = "double";
} else {
// todo should this be string??
propType = "object";
} | propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>) | if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
} |
33,435 | 8 | // fixme: think about composite ids. | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | DESIGN | true | }
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping. | // Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer. | } else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
} |
33,435 | 9 | // Proceed with nested mapping.
// todo limit depth to avoid endless recursion? | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | DESIGN | true | propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked | propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) { | if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm); |
33,435 | 10 | //noinspection unchecked | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | TEST | true | // todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next())); | } else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props); | } else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
} |
33,435 | 11 | // Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer. | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | (getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties"); | if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
} | } else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/ |
33,435 | 12 | // See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/ | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | NONSATD | true | }
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all? | }
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) { | // ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties); |
33,435 | 13 | // does it make sense to include objects into _all? | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | DESIGN | true | // See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false); | parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
} | Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping; |
33,435 | 14 | // todo only enable this through configuration... | @SuppressWarnings("unchecked")
public static Map<String, Object> getElasticMapping(SearchableClassMapping scm) {
Map<String, Object> elasticTypeMappingProperties = new LinkedHashMap<String, Object>();
String parentType = null;
if (!scm.isAll()) {
// "_all" : {"enabled" : true}
elasticTypeMappingProperties.put("_all",
Collections.singletonMap("enabled", false));
}
// Map each domain properties in supported format, or object for complex type
for(SearchableClassPropertyMapping scpm : scm.getPropertiesMapping()) {
// Does it have custom mapping?
GrailsDomainClassProperty property = scpm.getGrailsProperty();
String propType = property.getTypePropertyName();
Map<String, Object> propOptions = new LinkedHashMap<String, Object>();
// Add the custom mapping (searchable static property in domain model)
propOptions.putAll(scpm.getAttributes());
if (!(SUPPORTED_FORMAT.contains(propType))) {
LOG.debug("propType not supported: " + propType + " name: " + property.getName());
if (scpm.isGeoPoint()) {
propType = "geo_point";
}
else if (property.isBasicCollectionType()) {
// Handle embedded persistent collections, ie List<String> listOfThings
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
// Handle arrays
} else if (property.getReferencedPropertyType().isArray()) {
String basicType = ClassUtils.getShortName(property.getReferencedPropertyType().getComponentType()).toLowerCase(Locale.ENGLISH);
if (SUPPORTED_FORMAT.contains(basicType)) {
propType = basicType;
}
} else if (isDateType(property.getReferencedPropertyType())) {
propType = "date";
} else if (GrailsClassUtils.isJdk5Enum(property.getReferencedPropertyType())) {
propType = "string";
} else if (scpm.getConverter() != null) {
// Use 'string' type for properties with custom converter.
// Arrays are automatically resolved by ElasticSearch, so no worries.
propType = "string";
} else if (java.math.BigDecimal.class.isAssignableFrom(property.getReferencedPropertyType())) {
propType = "double";
} else {
// todo should this be string??
propType = "object";
}
if (scpm.getReference() != null) {
propType = "object"; // fixme: think about composite ids.
} else if (scpm.isComponent()) {
// Proceed with nested mapping.
// todo limit depth to avoid endless recursion?
propType = "object";
//noinspection unchecked
propOptions.putAll((Map<String, Object>)
(getElasticMapping(scpm.getComponentPropertyMapping()).values().iterator().next()));
}
// Once it is an object, we need to add id & class mappings, otherwise
// ES will fail with NullPointer.
if (scpm.isComponent() || scpm.getReference() != null) {
Map<String, Object> props = (Map<String, Object>) propOptions.get("properties");
if (props == null) {
props = new LinkedHashMap<String, Object>();
propOptions.put("properties", props);
}
props.put("id", defaultDescriptor("long", "not_analyzed", true));
props.put("class", defaultDescriptor("string", "no", true));
props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} | IMPLEMENTATION | true | }
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets"); | propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
} | props.put("ref", defaultDescriptor("string", "no", true));
}
if (scpm.isParentKey()) {
parentType = property.getTypePropertyName();
scm.setParent(scpm);
}
}
else if (scpm.isGeoPoint()) {
propType = "geo_point";
}
propOptions.put("type", propType);
// See http://www.elasticsearch.com/docs/elasticsearch/mapping/all_field/
if (!propType.equals("object") && scm.isAll()) {
// does it make sense to include objects into _all?
if (scpm.shouldExcludeFromAll()) {
propOptions.put("include_in_all", false);
} else {
propOptions.put("include_in_all", true);
}
}
// todo only enable this through configuration...
if (propType.equals("string") && scpm.isAnalyzed()) {
propOptions.put("term_vector", "with_positions_offsets");
}
elasticTypeMappingProperties.put(scpm.getPropertyName(), propOptions);
}
Map<String, Object> mapping = new LinkedHashMap<String, Object>();
Map<String, Object> objectMapping = new LinkedHashMap<String, Object>();
if (parentType != null) {
objectMapping.put("_parent", Collections.singletonMap("type", parentType));
}
objectMapping.put("properties", elasticTypeMappingProperties);
mapping.put(scm.getElasticTypeName(), objectMapping);
return mapping;
} |
17,055 | 0 | /**
* send an email to given email address
* @param userId1
* @param userId2
* @param asgmtName
* @param score
* @param recipientMail
* @param reportLink
* @throws Exception
*/ | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} | NONSATD | true | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} |
17,055 | 1 | // to do add link in email | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} | IMPLEMENTATION | true | + "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message); | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} | public void sendEmail(String userId1, String userId2, String asgmtName, double score, String recipientMail,
String reportLink) throws Exception {
MimeMessage message = sender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
String stud1 = userService.findById(userId1).getfName();
String stud2 = userService.findById(userId2).getfName();
helper.setTo("[email protected]");
helper.setText("Codesniffer found plagiarised submission with similarity score" + score
+ "Click the below link to view the full report+\n" + "https://s3.amazonaws.com/codesniffer-reports/"
+ reportLink+ "/match0.html");
// to do add link in email
helper.setSubject("Plag detected in " + asgmtName + " between " + stud1 + " and " + stud2);
sender.send(message);
} |
25,248 | 0 | /**
* Build a chart from the the *first* dimension of the specified data.
* Ranges are detected automatically.
*
* @param data
* @param numBuckets
* @return
*/ | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | NONSATD | true | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} |
25,248 | 1 | // Do a pass to compute range | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | NONSATD | true | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList(); | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy); | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} |
25,248 | 2 | // TODO: Check min and max are sensible? | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | IMPLEMENTATION | true | max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified | int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} |
25,248 | 3 | // TODO: Allow the num of buckets to be specified | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | IMPLEMENTATION | true | // TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets)); | double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} | public static HistogramChart fromData(Iterable<? extends Vector> data,
int numBuckets) {
// Do a pass to compute range
double min = Double.MAX_VALUE, max = Double.MIN_VALUE;
ArrayList<Double> copy = new ArrayList();
for (Vector datum : data) {
double x = datum.get(0);
copy.add(x);
min = Math.min(min, x);
max = Math.max(max, x);
}
// TODO: Check min and max are sensible?
double[] array = MathUtils.toArray(copy);
// TODO: Allow the num of buckets to be specified
HistogramData dist = new HistogramData(new GridInfo(min, max,
numBuckets));
for (int i = 0; i < array.length; i++) {
dist.count(array[i]);
}
return new HistogramChart(dist);
} |
17,057 | 0 | /**
* Runs the current data through the deflater. As much as possible this will be buffered in the current output
* stream.
*
* @throws IOException
*/ | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} | NONSATD | true | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} |
17,057 | 1 | //we don't need to flush here, as this should have been called already by the time we get to
//this point | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} | NONSATD | true | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try { | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) { | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
} |
17,057 | 2 | //TODO: we should pool this and make it configurable or something | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
}
if (!performFlushIfRequired()) {
return;
}
}
} else {
force = false;
}
}
} finally {
if (nextCreated) {
if (anyAreSet(state, WRITES_RESUMED)) {
next.resumeWrites();
}
}
}
} | DESIGN | true | final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH); | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count); | private void deflateData(boolean force) throws IOException {
//we don't need to flush here, as this should have been called already by the time we get to
//this point
boolean nextCreated = false;
try {
PooledByteBuffer pooled = this.currentBuffer;
final ByteBuffer outputBuffer = pooled.getBuffer();
final boolean shutdown = anyAreSet(state, SHUTDOWN);
byte[] buffer = new byte[1024]; //TODO: we should pool this and make it configurable or something
while (force || !deflater.needsInput() || (shutdown && !deflater.finished())) {
int count = deflater.deflate(buffer, 0, buffer.length, force ? Deflater.SYNC_FLUSH: Deflater.NO_FLUSH);
Connectors.updateResponseBytesSent(exchange, count);
if (count != 0) {
int remaining = outputBuffer.remaining();
if (remaining > count) {
outputBuffer.put(buffer, 0, count);
} else {
if (remaining == count) {
outputBuffer.put(buffer, 0, count);
} else {
outputBuffer.put(buffer, 0, remaining);
additionalBuffer = ByteBuffer.wrap(buffer, remaining, count - remaining);
}
outputBuffer.flip();
this.state |= FLUSHING_BUFFER;
if (next == null) {
nextCreated = true;
this.next = createNextChannel();
} |
675 | 0 | //TODO figure out how to act with this stuff | @Override
public int describeContents() {
return 0;//TODO figure out how to act with this stuff
} | DESIGN | true | @Override
public int describeContents() {
return 0;//TODO figure out how to act with this stuff
} | @Override
public int describeContents() {
return 0;//TODO figure out how to act with this stuff
} | @Override
public int describeContents() {
return 0;//TODO figure out how to act with this stuff
} |
17,067 | 0 | //TODO support specified height/width/alignment | protected void closeLayout( )
{
//TODO support specified height/width/alignment
if ( root != null )
{
root.setContentHeight( childHeight );
IStyle areaStyle = root.getStyle( );
int width = getCurrentIP( )
+ getOffsetX( )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_PADDING_RIGHT ) )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_BORDER_RIGHT_WIDTH ) );
root.setWidth( width );
int height = 0;
Iterator iter = root.getChildren( );
while(iter.hasNext())
{
AbstractArea child = (AbstractArea)iter.next( );
height = Math.max( height, child.getAllocatedHeight( ));
}
root.setContentHeight( height );
}
//FIXME verticalAlign may effect the root height.
verticalAlign();
} | IMPLEMENTATION | true | protected void closeLayout( )
{
//TODO support specified height/width/alignment
if ( root != null )
{ | protected void closeLayout( )
{
//TODO support specified height/width/alignment
if ( root != null )
{
root.setContentHeight( childHeight );
IStyle areaStyle = root.getStyle( );
int width = getCurrentIP( )
+ getOffsetX( )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_PADDING_RIGHT ) )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_BORDER_RIGHT_WIDTH ) ); | protected void closeLayout( )
{
//TODO support specified height/width/alignment
if ( root != null )
{
root.setContentHeight( childHeight );
IStyle areaStyle = root.getStyle( );
int width = getCurrentIP( )
+ getOffsetX( )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_PADDING_RIGHT ) )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_BORDER_RIGHT_WIDTH ) );
root.setWidth( width );
int height = 0;
Iterator iter = root.getChildren( );
while(iter.hasNext())
{
AbstractArea child = (AbstractArea)iter.next( );
height = Math.max( height, child.getAllocatedHeight( ));
}
root.setContentHeight( height );
} |
17,067 | 1 | //FIXME verticalAlign may effect the root height. | protected void closeLayout( )
{
//TODO support specified height/width/alignment
if ( root != null )
{
root.setContentHeight( childHeight );
IStyle areaStyle = root.getStyle( );
int width = getCurrentIP( )
+ getOffsetX( )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_PADDING_RIGHT ) )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_BORDER_RIGHT_WIDTH ) );
root.setWidth( width );
int height = 0;
Iterator iter = root.getChildren( );
while(iter.hasNext())
{
AbstractArea child = (AbstractArea)iter.next( );
height = Math.max( height, child.getAllocatedHeight( ));
}
root.setContentHeight( height );
}
//FIXME verticalAlign may effect the root height.
verticalAlign();
} | DEFECT | true | root.setContentHeight( height );
}
//FIXME verticalAlign may effect the root height.
verticalAlign();
} | root.setWidth( width );
int height = 0;
Iterator iter = root.getChildren( );
while(iter.hasNext())
{
AbstractArea child = (AbstractArea)iter.next( );
height = Math.max( height, child.getAllocatedHeight( ));
}
root.setContentHeight( height );
}
//FIXME verticalAlign may effect the root height.
verticalAlign();
} | if ( root != null )
{
root.setContentHeight( childHeight );
IStyle areaStyle = root.getStyle( );
int width = getCurrentIP( )
+ getOffsetX( )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_PADDING_RIGHT ) )
+ getDimensionValue( areaStyle
.getProperty( StyleConstants.STYLE_BORDER_RIGHT_WIDTH ) );
root.setWidth( width );
int height = 0;
Iterator iter = root.getChildren( );
while(iter.hasNext())
{
AbstractArea child = (AbstractArea)iter.next( );
height = Math.max( height, child.getAllocatedHeight( ));
}
root.setContentHeight( height );
}
//FIXME verticalAlign may effect the root height.
verticalAlign();
} |
685 | 0 | // TODO show resulting document | public static final int testElementParsing (
final PrintStream out, final BufferedReader in, final String ... args)
{
final String ans=getval(out, in, "use owner document y/[n]/q");
if (isQuit(ans))
return 0;
Document owner=null;
if ((ans != null) && (ans.length() > 0) && ('y' == Character.toLowerCase(ans.charAt(0))))
{
try
{
owner = DOMUtils.createDefaultDocument();
}
catch(ParserConfigurationException e)
{
System.err.println(e.getClass().getName() + ": " + e.getMessage());
}
}
final int nErr=testElementParsing(out, in, owner, args);
if (owner != null)
{
// TODO show resulting document
}
return nErr;
} | IMPLEMENTATION | true | if (owner != null)
{
// TODO show resulting document
}
return nErr; | owner = DOMUtils.createDefaultDocument();
}
catch(ParserConfigurationException e)
{
System.err.println(e.getClass().getName() + ": " + e.getMessage());
}
}
final int nErr=testElementParsing(out, in, owner, args);
if (owner != null)
{
// TODO show resulting document
}
return nErr;
} | final PrintStream out, final BufferedReader in, final String ... args)
{
final String ans=getval(out, in, "use owner document y/[n]/q");
if (isQuit(ans))
return 0;
Document owner=null;
if ((ans != null) && (ans.length() > 0) && ('y' == Character.toLowerCase(ans.charAt(0))))
{
try
{
owner = DOMUtils.createDefaultDocument();
}
catch(ParserConfigurationException e)
{
System.err.println(e.getClass().getName() + ": " + e.getMessage());
}
}
final int nErr=testElementParsing(out, in, owner, args);
if (owner != null)
{
// TODO show resulting document
}
return nErr;
} |
17,078 | 0 | /**
* Returns the maximum value that this calendar field could have,
* taking into consideration the given time value and the current
* values of the
* {@link Calendar#getFirstDayOfWeek() getFirstDayOfWeek},
* {@link Calendar#getMinimalDaysInFirstWeek() getMinimalDaysInFirstWeek},
* and
* {@link Calendar#getTimeZone() getTimeZone} methods.
* For example, if the date of this instance is Heisei 16February 1,
* the actual maximum value of the <code>DAY_OF_MONTH</code> field
* is 29 because Heisei 16 is a leap year, and if the date of this
* instance is Heisei 17 February 1, it's 28.
*
* @param field the calendar field
* @return the maximum of the given field for the time value of
* this <code>JapaneseImperialCalendar</code>
* @see #getMinimum(int)
* @see #getMaximum(int)
* @see #getGreatestMinimum(int)
* @see #getLeastMaximum(int)
* @see #getActualMinimum(int)
*/ | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} |
17,078 | 1 | // TODO: there may be multiple transitions in a year. | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | IMPLEMENTATION | true | value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) { | }
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone(); | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
} |
17,078 | 2 | // Handle transition year.
// TODO: there may be multiple transitions in a year. | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | IMPLEMENTATION | true | {
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) { | }
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) { | LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d); |
17,078 | 3 | // shift 400 years to avoid underflow | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | } else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d); | {
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1); | jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value |
17,078 | 4 | // Get the day of week of January 1 of the year | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1); | long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1; | long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
} |
17,078 | 5 | // Normalize the day of week with the firstDayOfWeek value | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) { | int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
} | // shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) { |
17,078 | 6 | // # of days in the first week | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) { | getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
} | value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{ |
17,078 | 7 | // Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow. | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400); | }
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value); | ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
} |
17,078 | 8 | // Use the same year as d.getYear() to be
// consistent with leap and common years. | public int getActualMaximum(int field) {
final int fieldsForFixedMax = ERA_MASK|DAY_OF_WEEK_MASK|HOUR_MASK|AM_PM_MASK|
HOUR_OF_DAY_MASK|MINUTE_MASK|SECOND_MASK|MILLISECOND_MASK|
ZONE_OFFSET_MASK|DST_OFFSET_MASK;
if ((fieldsForFixedMax & (1<<field)) != 0) {
return getMaximum(field);
}
JapaneseImperialCalendar jc = getNormalizedCalendar();
LocalGregorianCalendar.Date date = jc.jdate;
int normalizedYear = date.getNormalizedYear();
int value = -1;
switch (field) {
case MONTH:
{
value = DECEMBER;
if (isTransitionYear(date.getNormalizedYear())) {
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
if (fd < transition) {
LocalGregorianCalendar.Date ldate
= (LocalGregorianCalendar.Date) date.clone();
jcal.getCalendarDateFromFixedDate(ldate, transition - 1);
value = ldate.getMonth() - 1;
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
value = d.getMonth() - 1;
}
}
}
break;
case DAY_OF_MONTH:
value = jcal.getMonthLength(date);
break;
case DAY_OF_YEAR:
{
if (isTransitionYear(date.getNormalizedYear())) {
// Handle transition year.
// TODO: there may be multiple transitions in a year.
int eraIndex = getEraIndex(date);
if (date.getYear() != 1) {
eraIndex++;
assert eraIndex < eras.length;
}
long transition = sinceFixedDates[eraIndex];
long fd = jc.cachedFixedDate;
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
if (fd < transition) {
value = (int)(transition - gcal.getFixedDate(d));
} else {
d.addYear(+1);
value = (int)(gcal.getFixedDate(d) - transition);
}
} else {
LocalGregorianCalendar.Date d = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == d.getEra() && date.getYear() == d.getYear()) {
long fd = jcal.getFixedDate(d);
long jan1 = getFixedDateJan1(d, fd);
value = (int)(fd - jan1) + 1;
} else if (date.getYear() == getMinimum(YEAR)) {
CalendarDate d1 = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
long fd1 = jcal.getFixedDate(d1);
d1.addYear(1);
d1.setMonth(BaseCalendar.JANUARY).setDayOfMonth(1);
jcal.normalize(d1);
long fd2 = jcal.getFixedDate(d1);
value = (int)(fd2 - fd1);
} else {
value = jcal.getYearLength(date);
}
}
}
break;
case WEEK_OF_YEAR:
{
if (!isTransitionYear(date.getNormalizedYear())) {
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (date.getEra() == jd.getEra() && date.getYear() == jd.getYear()) {
long fd = jcal.getFixedDate(jd);
long jan1 = getFixedDateJan1(jd, fd);
value = getWeekNumber(jan1, fd);
} else if (date.getEra() == null && date.getYear() == getMinimum(YEAR)) {
CalendarDate d = jcal.getCalendarDate(Long.MIN_VALUE, getZone());
// shift 400 years to avoid underflow
d.addYear(+400);
jcal.normalize(d);
jd.setEra(d.getEra());
jd.setDate(d.getYear() + 1, BaseCalendar.JANUARY, 1);
jcal.normalize(jd);
long jan1 = jcal.getFixedDate(d);
long nextJan1 = jcal.getFixedDate(jd);
long nextJan1st = LocalGregorianCalendar.getDayOfWeekDateOnOrBefore(nextJan1 + 6,
getFirstDayOfWeek());
int ndays = (int)(nextJan1st - nextJan1);
if (ndays >= getMinimalDaysInFirstWeek()) {
nextJan1st -= 7;
}
value = getWeekNumber(jan1, nextJan1st);
} else {
// Get the day of week of January 1 of the year
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1);
int dayOfWeek = gcal.getDayOfWeek(d);
// Normalize the day of week with the firstDayOfWeek value
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
value = 52;
int magic = dayOfWeek + getMinimalDaysInFirstWeek() - 1;
if ((magic == 6) ||
(date.isLeapYear() && (magic == 5 || magic == 12))) {
value++;
}
}
break;
}
if (jc == this) {
jc = (JapaneseImperialCalendar) jc.clone();
}
int max = getActualMaximum(DAY_OF_YEAR);
jc.set(DAY_OF_YEAR, max);
value = jc.get(WEEK_OF_YEAR);
if (value == 1 && max > 7) {
jc.add(WEEK_OF_YEAR, -1);
value = jc.get(WEEK_OF_YEAR);
}
}
break;
case WEEK_OF_MONTH:
{
LocalGregorianCalendar.Date jd = jcal.getCalendarDate(Long.MAX_VALUE,
getZone());
if (!(date.getEra() == jd.getEra() && date.getYear() == jd.getYear())) {
CalendarDate d = gcal.newCalendarDate(TimeZone.NO_TIMEZONE);
d.setDate(date.getNormalizedYear(), date.getMonth(), 1);
int dayOfWeek = gcal.getDayOfWeek(d);
int monthLength = gcal.getMonthLength(d);
dayOfWeek -= getFirstDayOfWeek();
if (dayOfWeek < 0) {
dayOfWeek += 7;
}
int nDaysFirstWeek = 7 - dayOfWeek; // # of days in the first week
value = 3;
if (nDaysFirstWeek >= getMinimalDaysInFirstWeek()) {
value++;
}
monthLength -= nDaysFirstWeek + 7 * 3;
if (monthLength > 0) {
value++;
if (monthLength > 7) {
value++;
}
}
} else {
long fd = jcal.getFixedDate(jd);
long month1 = fd - jd.getDayOfMonth() + 1;
value = getWeekNumber(month1, fd);
}
}
break;
case DAY_OF_WEEK_IN_MONTH:
{
int ndays, dow1;
int dow = date.getDayOfWeek();
BaseCalendar.Date d = (BaseCalendar.Date) date.clone();
ndays = jcal.getMonthLength(d);
d.setDayOfMonth(1);
jcal.normalize(d);
dow1 = d.getDayOfWeek();
int x = dow - dow1;
if (x < 0) {
x += 7;
}
ndays -= x;
value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} | NONSATD | true | getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
} | value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field); | value = (ndays + 6) / 7;
}
break;
case YEAR:
{
CalendarDate jd = jcal.getCalendarDate(jc.getTimeInMillis(), getZone());
CalendarDate d;
int eraIndex = getEraIndex(date);
if (eraIndex == eras.length - 1) {
d = jcal.getCalendarDate(Long.MAX_VALUE, getZone());
value = d.getYear();
// Use an equivalent year for the
// getYearOffsetInMillis call to avoid overflow.
if (value > 400) {
jd.setYear(value - 400);
}
} else {
d = jcal.getCalendarDate(eras[eraIndex + 1].getSince(getZone()) - 1,
getZone());
value = d.getYear();
// Use the same year as d.getYear() to be
// consistent with leap and common years.
jd.setYear(value);
}
jcal.normalize(jd);
if (getYearOffsetInMillis(jd) > getYearOffsetInMillis(d)) {
value--;
}
}
break;
default:
throw new ArrayIndexOutOfBoundsException(field);
}
return value;
} |
25,271 | 0 | /**
* Add a Qualification non diplemantes.
*/ | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,271 | 1 | // on transforme le commentaire pour corriger les caractères spéciaux | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !! | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,271 | 2 | //TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment())); | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | DEFECT | true | // on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,271 | 3 | //ajout en base | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | // .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,271 | 4 | //Ajout dans l'individu courant | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | //ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addQualifNoDip() throws IOException {
if (isValidIndCursus(QualifNonDiplomante.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
pojoQualif.getCursus().setComment(pojoQualif.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(pojoQualif.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), pojoQualif.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(pojoQualif.getCursus());
}
pojoQualif.addCursus();
Collections.sort(pojoQualif.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,270 | 0 | /**
* Add a CursusPro.
*/ | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,270 | 1 | // on transforme le commentaire pour corriger les caractères spéciaux | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !! | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,270 | 2 | //TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment())); | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | DEFECT | true | // on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,270 | 3 | //ajout en base | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | // .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
25,270 | 4 | //Ajout dans l'individu courant | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | NONSATD | true | //ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} | public void addCursusPro() throws IOException {
if (isValidIndCursus(CursusPro.class, true)) {
// on transforme le commentaire pour corriger les caractères spéciaux
indCursusPojo.getCursus().setComment(indCursusPojo.getCursus().getComment());
//TODO: Fix this !!
// org.esupportail.commons.utils.strings.StringUtils
// .htmlToText(indCursusPojo.getCursus().getComment()));
if (actionEnum.getWhatAction().equals(ActionEnum.UPDATE_ACTION)) {
//ajout en base
addOneCursus(getCurrentInd().getIndividu(), indCursusPojo.getCursus());
//Ajout dans l'individu courant
getCurrentInd().getIndividu().getCursus().add(indCursusPojo.getCursus());
}
indCursusPojo.addCursus();
Collections.sort(indCursusPojo.getCursusList(), new ComparatorString(IndCursus.class));
}
} |
8,889 | 0 | // Don't enable the screen until all existing windows have been drawn. | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | NONSATD | true | return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays? | new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation. | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) { |
8,889 | 1 | // TODO(multidisplay): Expand to all displays? | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | DESIGN | true | // Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return; | return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true; | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain(); |
8,889 | 2 | // stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation. | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | NONSATD | true | if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true; | return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) { | + " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
} |
8,889 | 3 | // BOOT_FINISHED | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | NONSATD | true | Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle(); | if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!"); | return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false); |
8,889 | 4 | // Enable input dispatch. | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | NONSATD | true | mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
} | data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} |
8,889 | 5 | // Make sure the last requested orientation has been applied. | private void performEnableScreen() {
synchronized (mGlobalLock) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: mDisplayEnabled=" + mDisplayEnabled
+ " mForceDisplayEnabled=" + mForceDisplayEnabled
+ " mShowingBootMessages=" + mShowingBootMessages
+ " mSystemBooted=" + mSystemBooted
+ " mOnlyCore=" + mOnlyCore,
new RuntimeException("here").fillInStackTrace());
if (mDisplayEnabled) {
return;
}
if (!mSystemBooted && !mShowingBootMessages) {
return;
}
if (!mShowingBootMessages && !mPolicy.canDismissBootAnimation()) {
return;
}
// Don't enable the screen until all existing windows have been drawn.
if (!mForceDisplayEnabled
// TODO(multidisplay): Expand to all displays?
&& getDefaultDisplayContentLocked().checkWaitingForWindows()) {
return;
}
if (!mBootAnimationStopped) {
Trace.asyncTraceBegin(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
// stop boot animation
// formerly we would just kill the process, but we now ask it to exit so it
// can choose where to stop the animation.
SystemProperties.set("service.bootanim.exit", "1");
mBootAnimationStopped = true;
}
if (!mForceDisplayEnabled && !checkBootAnimationCompleteLocked()) {
if (DEBUG_BOOT) Slog.i(TAG_WM, "performEnableScreen: Waiting for anim complete");
return;
}
try {
IBinder surfaceFlinger = ServiceManager.getService("SurfaceFlinger");
if (surfaceFlinger != null) {
Slog.i(TAG_WM, "******* TELLING SURFACE FLINGER WE ARE BOOTED!");
Parcel data = Parcel.obtain();
data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | NONSATD | true | }
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} | data.writeInterfaceToken("android.ui.ISurfaceComposer");
surfaceFlinger.transact(IBinder.FIRST_CALL_TRANSACTION, // BOOT_FINISHED
data, null, 0);
data.recycle();
}
} catch (RemoteException ex) {
Slog.e(TAG_WM, "Boot completed: SurfaceFlinger is dead!");
}
EventLog.writeEvent(EventLogTags.WM_BOOT_ANIMATION_DONE, SystemClock.uptimeMillis());
Trace.asyncTraceEnd(TRACE_TAG_WINDOW_MANAGER, "Stop bootanim", 0);
mDisplayEnabled = true;
if (DEBUG_SCREEN_ON || DEBUG_BOOT) Slog.i(TAG_WM, "******************** ENABLING SCREEN!");
// Enable input dispatch.
mInputManagerCallback.setEventDispatchingLw(mEventDispatchingEnabled);
}
try {
mActivityManager.bootAnimationComplete();
} catch (RemoteException e) {
}
mPolicy.enableScreenAfterBoot();
// Make sure the last requested orientation has been applied.
updateRotationUnchecked(false, false);
} |
25,273 | 0 | //TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse);
} | IMPLEMENTATION | true | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse);
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse);
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse);
} |
25,274 | 0 | //TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator );
} | IMPLEMENTATION | true | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator );
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator );
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator );
} |
25,275 | 0 | //TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator, IParserConfiguration configuration) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator, configuration );
} | IMPLEMENTATION | true | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator, IParserConfiguration configuration) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator, configuration );
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator, IParserConfiguration configuration) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator, configuration );
} | @Override
public IASTTranslationUnit getTranslationUnit(IFile fileToParse, ICodeReaderFactory fileCreator, IParserConfiguration configuration) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getTranslationUnit(fileToParse, fileCreator, configuration );
} |
25,276 | 0 | //TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided | @Override
public IASTCompletionNode getCompletionNode(IFile fileToParse, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, offset, fileCreator);
} | IMPLEMENTATION | true | public IASTCompletionNode getCompletionNode(IFile fileToParse, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, offset, fileCreator);
} | @Override
public IASTCompletionNode getCompletionNode(IFile fileToParse, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, offset, fileCreator);
} | @Override
public IASTCompletionNode getCompletionNode(IFile fileToParse, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, offset, fileCreator);
} |
8,894 | 0 | // TODO(multidisplay): Extend to multiple displays.
/**
* Returns the focused window in the following format:
* windowHashCodeInHexadecimal windowName
*
* @param client The remote client to send the listing to.
* @return False if an error occurred, true otherwise.
*/ | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
}
}
return result;
} | IMPLEMENTATION | true | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
}
}
return result;
} | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
}
}
return result;
} | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
}
}
return result;
} |
8,894 | 1 | // Any uncaught exception will crash the system process | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
}
}
return result;
} | NONSATD | true | WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream(); | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush(); | boolean viewServerGetFocusedWindow(Socket client) {
if (isSystemSecure()) {
return false;
}
boolean result = true;
WindowState focusedWindow = getFocusedWindow();
BufferedWriter out = null;
// Any uncaught exception will crash the system process
try {
OutputStream clientStream = client.getOutputStream();
out = new BufferedWriter(new OutputStreamWriter(clientStream), 8 * 1024);
if(focusedWindow != null) {
out.write(Integer.toHexString(System.identityHashCode(focusedWindow)));
out.write(' ');
out.append(focusedWindow.mAttrs.getTitle());
}
out.write('\n');
out.flush();
} catch (Exception e) {
result = false;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
result = false;
}
} |
25,277 | 0 | //TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided | @Override
public IASTCompletionNode getCompletionNode(IStorage fileToParse, IProject project, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, project, offset, fileCreator);
} | IMPLEMENTATION | true | public IASTCompletionNode getCompletionNode(IStorage fileToParse, IProject project, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, project, offset, fileCreator);
} | @Override
public IASTCompletionNode getCompletionNode(IStorage fileToParse, IProject project, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, project, offset, fileCreator);
} | @Override
public IASTCompletionNode getCompletionNode(IStorage fileToParse, IProject project, int offset,
ICodeReaderFactory fileCreator) throws UnsupportedDialectException {
//TODO - At this time, we purely delegate blindly
//In the future, we may need to delegate based upon context provided
return defaultService.getCompletionNode(fileToParse, project, offset, fileCreator);
} |
17,081 | 0 | // TODO: do we need to call logout() on the LoginContext? | @Override
public void refresh()
throws LoginException, GSSException
{
// TODO: do we need to call logout() on the LoginContext?
loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
ImmutableMap.Builder<String, String> options = ImmutableMap.builder();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
options.put("useKeyTab", "true");
if (getBoolean("trino.client.debugKerberos")) {
options.put("debug", "true");
}
keytab.ifPresent(file -> options.put("keyTab", file.getAbsolutePath()));
credentialCache.ifPresent(file -> {
options.put("ticketCache", file.getAbsolutePath());
options.put("renewTGT", "true");
});
if (!keytab.isPresent() || credentialCache.isPresent()) {
options.put("useTicketCache", "true");
}
principal.ifPresent(value -> options.put("principal", value));
return new AppConfigurationEntry[] {
new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options.buildOrThrow())
};
}
});
loginContext.login();
} | DESIGN | true | throws LoginException, GSSException
{
// TODO: do we need to call logout() on the LoginContext?
loginContext = new LoginContext("", null, null, new Configuration()
{ | @Override
public void refresh()
throws LoginException, GSSException
{
// TODO: do we need to call logout() on the LoginContext?
loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
ImmutableMap.Builder<String, String> options = ImmutableMap.builder();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
options.put("useKeyTab", "true");
if (getBoolean("trino.client.debugKerberos")) { | @Override
public void refresh()
throws LoginException, GSSException
{
// TODO: do we need to call logout() on the LoginContext?
loginContext = new LoginContext("", null, null, new Configuration()
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name)
{
ImmutableMap.Builder<String, String> options = ImmutableMap.builder();
options.put("refreshKrb5Config", "true");
options.put("doNotPrompt", "true");
options.put("useKeyTab", "true");
if (getBoolean("trino.client.debugKerberos")) {
options.put("debug", "true");
}
keytab.ifPresent(file -> options.put("keyTab", file.getAbsolutePath()));
credentialCache.ifPresent(file -> {
options.put("ticketCache", file.getAbsolutePath());
options.put("renewTGT", "true");
});
if (!keytab.isPresent() || credentialCache.isPresent()) {
options.put("useTicketCache", "true");
} |
17,089 | 0 | // We only expect two values, a DEF and a reference, but there might be more. | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | DESIGN | true | int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) { | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
} |
17,089 | 1 | // TODO check for more than one def, should not happen | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | IMPLEMENTATION | true | }
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null; | // We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
} | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else { |
17,089 | 2 | // this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes. | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | DESIGN | true | }
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1); | String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString())); | if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} |
17,089 | 3 | // node is defined but not referenced | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | NONSATD | true | context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1); | keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced | refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} |
17,089 | 4 | // node is defined and referenced | @Override
public void reduce(BytesWritable key, Iterable<BytesWritable> values, Context context)
throws IOException, InterruptedException {
int defCount = 0;
refs.clear();
// We only expect two values, a DEF and a reference, but there might be more.
for (BytesWritable type : values) {
if (type.getLength() == DEF.getLength()) {
defCount++;
} else {
byte[] bytes = new byte[type.getLength()];
System.arraycopy(type.getBytes(), 0, bytes, 0, type.getLength());
refs.add(bytes);
}
}
// TODO check for more than one def, should not happen
List<String> refsList = new ArrayList<>(refs.size());
String keyString = null;
if (defCount == 0 || refs.size() != 1) {
for (byte[] ref : refs) {
refsList.add(COMMA_JOINER.join(Bytes.getLong(ref), Bytes.getLong(ref, 8)));
}
keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | NONSATD | true | context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
} | // node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} | keyString = COMMA_JOINER.join(Bytes.getLong(key.getBytes()),
Bytes.getLong(key.getBytes(), 8));
LOG.error("Linked List error: Key = " + keyString + " References = " + refsList);
}
if (defCount == 0 && refs.size() > 0) {
// this is bad, found a node that is referenced but not defined. It must have been
// lost, emit some info about this node for debugging purposes.
context.write(new Text(keyString), new Text(refsList.toString()));
context.getCounter(Counts.UNDEFINED).increment(1);
} else if (defCount > 0 && refs.size() == 0) {
// node is defined but not referenced
context.write(new Text(keyString), new Text("none"));
context.getCounter(Counts.UNREFERENCED).increment(1);
} else {
if (refs.size() > 1) {
if (refsList != null) {
context.write(new Text(keyString), new Text(refsList.toString()));
}
context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1);
}
// node is defined and referenced
context.getCounter(Counts.REFERENCED).increment(1);
}
} |
17,091 | 0 | // TODO(ulfjack): Consider acquiring local resources here before trying to write the file. | @Override
public List<SpawnResult> writeOutputToFile(
AbstractAction action,
ActionExecutionContext actionExecutionContext,
DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) {
outputPath.setExecutable(true);
}
} catch (IOException e) {
throw new EnvironmentalExecException("IOException during file write", e);
}
}
return ImmutableList.of();
} | IMPLEMENTATION | true | Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged( | @Override
public List<SpawnResult> writeOutputToFile(
AbstractAction action,
ActionExecutionContext actionExecutionContext,
DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) { | @Override
public List<SpawnResult> writeOutputToFile(
AbstractAction action,
ActionExecutionContext actionExecutionContext,
DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) {
outputPath.setExecutable(true);
}
} catch (IOException e) {
throw new EnvironmentalExecException("IOException during file write", e);
}
}
return ImmutableList.of();
} |
17,091 | 1 | /*minTimeForLoggingInMilliseconds=*/ | @Override
public List<SpawnResult> writeOutputToFile(
AbstractAction action,
ActionExecutionContext actionExecutionContext,
DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) {
outputPath.setExecutable(true);
}
} catch (IOException e) {
throw new EnvironmentalExecException("IOException during file write", e);
}
}
return ImmutableList.of();
} | NONSATD | true | "running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) { | DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) {
outputPath.setExecutable(true);
}
} catch (IOException e) {
throw new EnvironmentalExecException("IOException during file write", e);
} | @Override
public List<SpawnResult> writeOutputToFile(
AbstractAction action,
ActionExecutionContext actionExecutionContext,
DeterministicWriter deterministicWriter,
boolean makeExecutable, boolean isRemotable)
throws ExecException {
Path outputPath =
actionExecutionContext.getInputPath(Iterables.getOnlyElement(action.getOutputs()));
// TODO(ulfjack): Consider acquiring local resources here before trying to write the file.
try (AutoProfiler p =
AutoProfiler.logged(
"running write for action " + action.prettyPrint(),
logger,
/*minTimeForLoggingInMilliseconds=*/ 100)) {
try {
try (OutputStream out = new BufferedOutputStream(outputPath.getOutputStream())) {
deterministicWriter.writeOutputFile(out);
}
if (makeExecutable) {
outputPath.setExecutable(true);
}
} catch (IOException e) {
throw new EnvironmentalExecException("IOException during file write", e);
}
}
return ImmutableList.of();
} |
8,907 | 0 | // TODO: document me | public void setWs(boolean b) {
// TODO: document me
this.tr.setSkipws(b);
} | DOCUMENTATION | true | public void setWs(boolean b) {
// TODO: document me
this.tr.setSkipws(b);
} | public void setWs(boolean b) {
// TODO: document me
this.tr.setSkipws(b);
} | public void setWs(boolean b) {
// TODO: document me
this.tr.setSkipws(b);
} |
8,908 | 0 | // TODO: document me | public void setCl(boolean b) {
// TODO: document me
this.tr.setResolveContLines(b);
} | DOCUMENTATION | true | public void setCl(boolean b) {
// TODO: document me
this.tr.setResolveContLines(b);
} | public void setCl(boolean b) {
// TODO: document me
this.tr.setResolveContLines(b);
} | public void setCl(boolean b) {
// TODO: document me
this.tr.setResolveContLines(b);
} |
33,483 | 0 | //TODO: Think about using System.arrayCopy here(what is faster?) | public void copyData(DataWrapper body, byte[] data, int offset) {
for(int i = 0; i < body.getReadableSize(); i++) {
//TODO: Think about using System.arrayCopy here(what is faster?)
data[offset + i] = body.readByteAt(i);
}
} | DESIGN | true | public void copyData(DataWrapper body, byte[] data, int offset) {
for(int i = 0; i < body.getReadableSize(); i++) {
//TODO: Think about using System.arrayCopy here(what is faster?)
data[offset + i] = body.readByteAt(i);
} | public void copyData(DataWrapper body, byte[] data, int offset) {
for(int i = 0; i < body.getReadableSize(); i++) {
//TODO: Think about using System.arrayCopy here(what is faster?)
data[offset + i] = body.readByteAt(i);
}
} | public void copyData(DataWrapper body, byte[] data, int offset) {
for(int i = 0; i < body.getReadableSize(); i++) {
//TODO: Think about using System.arrayCopy here(what is faster?)
data[offset + i] = body.readByteAt(i);
}
} |
33,494 | 0 | /**
* Checks if expiration process needs to be performed for the given Iceberg table
* by comparing stored last expiration time.
* If difference between last expiration time and current time is more or equal to
* expiration period, launches expiration process.
* If expiration period is zero or negative, no expiration process will be launched.
*
* @param table Iceberg table instance
* @return true if expiration process was launched, false otherwise
*/ | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} | NONSATD | true | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} |
33,494 | 1 | // TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit(); | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} | DESIGN | true | .expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
}); | long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} | public boolean expire(Table table) {
if (expirationPeriod <= 0) {
return false;
}
long current = System.currentTimeMillis();
Long last = expirationStatus.putIfAbsent(table.location(), current);
if (last != null && current - last >= expirationPeriod) {
expirationStatus.put(table.location(), current);
ExecutorService executorService = executorService();
executorService.submit(() -> {
logger.debug("Expiring Iceberg table [{}] metadata", table.location());
table.expireSnapshots()
.expireOlderThan(current)
.commit();
// TODO: Replace with table metadata expiration through Iceberg API
// when https://github.com/apache/incubator-iceberg/issues/181 is resolved
// table.expireTableMetadata().expireOlderThan(current).commit();
expireTableMetadata(table);
});
return true;
}
return false;
} |
728 | 0 | // TODO(crbug.com/948518): This is a band-aid fix for not crashing when undo the last closed
// tab, should remove later.
/**
* @return Whether filter should notify observers about the SetIndex call.
*/ | protected boolean shouldNotifyObserversOnSetIndex() {
return true;
} | DEFECT | true | protected boolean shouldNotifyObserversOnSetIndex() {
return true;
} | protected boolean shouldNotifyObserversOnSetIndex() {
return true;
} | protected boolean shouldNotifyObserversOnSetIndex() {
return true;
} |
33,498 | 0 | // special handling for escaped lines | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false); | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--; | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first? |
33,498 | 1 | // NOI18N | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | } else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else { | c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out; | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out; |
33,498 | 2 | // if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first? | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | DESIGN | true | } else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': { | if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out; | while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{ |
33,498 | 3 | // char literal | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote"; | } else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/': | if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment |
33,498 | 4 | // in single-line or doxygen comment | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true); | assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true); | // if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
} |
33,498 | 5 | // found /= | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment | return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH); | return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') { |
33,498 | 6 | // in multi-line or doxygen comment | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true); | case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') { | case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
} |
33,498 | 7 | // end of switch() | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH); | return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) { | Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1); |
33,498 | 8 | // >> | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ); | backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT); | }
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=': |
33,498 | 9 | // >= | @SuppressWarnings("fallthrough")
@Override
public Token<CppTokenId> nextToken() {
while (true) {
// special handling for escaped lines
if (lastTokenEndedByEscapedLine > 0) {
int c = read(false);
lastTokenEndedByEscapedLine--;
assert c == '\\' : "there must be \\";
c = read(false);
assert c == '\n' || c == '\r' : "there must be \r or \n";
if (c == '\r') {
lastTokenEndedByEscapedLine--;
if (input.consumeNewline()) {
lastTokenEndedByEscapedLine--;
}
return token(CppTokenId.ESCAPED_LINE);
} else {
lastTokenEndedByEscapedLine--;
return token(CppTokenId.ESCAPED_LINE, "\\\n", PartType.COMPLETE); // NOI18N
}
} else {
int c = read(true);
// if read of the first char caused skipping escaped line
// do we need to backup and create escaped lines first?
switch (c) {
case '"': {
Token<CppTokenId> out = finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
}
case '\'': {// char literal
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
}
case '#': {
Token<CppTokenId> out = finishSharp();
assert out != null : "not handled #";
return out;
}
case '/':
switch (read(true)) {
case '/': // in single-line or doxygen comment
{
Token<CppTokenId> out = finishLineComment(true);
assert out != null : "not handled //";
return out;
}
case '=': // found /=
return token(CppTokenId.SLASHEQ);
case '*': // in multi-line or doxygen comment
{
Token<CppTokenId> out = finishBlockComment(true);
assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) {
case '-':
return token(CppTokenId.MINUSMINUS);
case '>':
if (read(true) == '*') {
return token(CppTokenId.ARROWMBR);
}
backup(1);
return token(CppTokenId.ARROW);
case '=':
return token(CppTokenId.MINUSEQ);
}
backup(1);
return token(CppTokenId.MINUS);
case '*':
switch (read(true)) {
case '/': // invalid comment end - */ or int*/* */
if (read(true) == '*') {
backup(2);
return token(CppTokenId.STAR);
}
backup(1);
return token(CppTokenId.INVALID_COMMENT_END);
case '=':
return token(CppTokenId.STAREQ);
}
backup(1);
return token(CppTokenId.STAR);
case '|':
switch (read(true)) {
case '|':
return token(CppTokenId.BARBAR);
case '=':
return token(CppTokenId.BAREQ);
}
backup(1);
return token(CppTokenId.BAR);
case '&':
switch (read(true)) {
case '&':
return token(CppTokenId.AMPAMP);
case '=':
return token(CppTokenId.AMPEQ);
}
backup(1);
return token(CppTokenId.AMP);
case '%': {
Token<CppTokenId> out = finishPercent();
assert out != null : "not handled %";
return out;
}
case '^':
if (read(true) == '=') {
return token(CppTokenId.CARETEQ);
}
backup(1);
return token(CppTokenId.CARET);
case '!':
if (read(true) == '=') {
return token(CppTokenId.NOTEQ);
}
backup(1);
return token(CppTokenId.NOT);
case '.':
if ((c = read(true)) == '.') {
if (read(true) == '.') { // ellipsis ...
return token(CppTokenId.ELLIPSIS);
} else {
input.backup(2);
}
} else if ('0' <= c && c <= '9') { // float literal
return finishNumberLiteral(read(true), true);
} else if (c == '*') {
return token(CppTokenId.DOTMBR);
} else {
backup(1);
}
return token(CppTokenId.DOT);
case ':':
if (read(true) == ':') {
return token(CppTokenId.SCOPE);
}
backup(1);
return token(CppTokenId.COLON);
case '~':
return token(CppTokenId.TILDE);
case ',':
return token(CppTokenId.COMMA);
case ';':
return token(CppTokenId.SEMICOLON);
case '?':
return token(CppTokenId.QUESTION);
case '(':
return token(CppTokenId.LPAREN);
case ')':
return token(CppTokenId.RPAREN);
case '[':
return token(CppTokenId.LBRACKET);
case ']':
return token(CppTokenId.RBRACKET);
case '{':
return token(CppTokenId.LBRACE);
case '}':
return token(CppTokenId.RBRACE);
case '`':
return token(CppTokenId.GRAVE_ACCENT);
case '@':
return token(CppTokenId.AT);
case '0': // in a number literal
c = read(true);
if (c == 'x' || c == 'X' || // in hexadecimal (possibly floating-point) literal
c == 'b' || c == 'B' ) { // in bianry literal
boolean inFraction = false;
while (true) {
switch (read(true)) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
break;
case '.': // hex float literal
if (!inFraction) {
inFraction = true;
} else { // two dots in the float literal
return token(CppTokenId.FLOAT_LITERAL_INVALID);
}
break;
case 'l':
case 'L': // 0x1234l or 0x1234L
return finishLongLiteral(read(true));
case 'p':
case 'P': // binary exponent
return finishFloatExponent();
case 'u':
case 'U':
return finishUnsignedLiteral(read(true));
default:
backup(1);
// if float then before mandatory binary exponent => invalid
return token(inFraction ? CppTokenId.FLOAT_LITERAL_INVALID
: CppTokenId.INT_LITERAL);
}
} // end of while(true)
}
return finishNumberLiteral(c, false);
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return finishNumberLiteral(read(true), false);
case '\\':
return token(CppTokenId.BACK_SLASH);
case '\r':
consumeNewline();
return token(CppTokenId.NEW_LINE);
case '\n':
return token(CppTokenId.NEW_LINE, "\n", PartType.COMPLETE); // NOI18N
// All Character.isWhitespace(c) below 0x80 follow
// ['\t' - '\f'] and [0x1c - ' ']
case '\t':
case 0x0b:
case '\f':
case 0x1c:
case 0x1d:
case 0x1e:
case 0x1f:
return finishWhitespace();
case ' ':
c = read(true);
if (c == EOF || !Character.isWhitespace(c) || c == '\n' || c == '\r') { // Return single space as flyweight token
backup(1);
return token(CppTokenId.WHITESPACE, " ", PartType.COMPLETE); // NOI18N
}
return finishWhitespace();
case EOF:
if (isTokenSplittedByEscapedLine()) {
backup(1);
assert lastTokenEndedByEscapedLine > 0 : "lastTokenEndedByEscapedLine is " + lastTokenEndedByEscapedLine;
break;
}
return null;
case '$':
// dollar is extension in gcc and msvc $ is a valid start of identifiers
// return token(CppTokenId.DOLLAR);
default:
c = translateSurrogates(c);
if (CndLexerUtilities.isCppIdentifierStart(c)) {
if (c == 'L' || c == 'U' || c == 'u' || c == 'R') {
int next = read(true);
boolean raw_string = (c == 'R');
if (next == 'R' && (c == 'u' || c == 'U' || c == 'L')) {
// uR, UR or LR
raw_string = true;
next = read(true);
} else if (next == '8' && c == 'u') {
// u8
next = read(true);
if (next == 'R') {
// u8R
raw_string = true;
next = read(true);
}
}
if (next == '"') {
// string with L/U/u/R prefixes
Token<CppTokenId> out = raw_string ? finishRawString() : finishDblQuote();
assert out != null : "not handled dobule quote";
return out;
} else if (next == '\'' && !raw_string) {
// char with L or U/u prefix
Token<CppTokenId> out = finishSingleQuote();
assert out != null : "not handled single quote";
return out;
} else {
backup(1);
}
}
if (c == 'E') {
if(isExecSQL(c)) {
Token<CppTokenId> out = finishExecSQL();
assert out != null : "not handled exec sql";
return out;
}
}
return keywordOrIdentifier(c);
}
if (Character.isWhitespace(c)) {
return finishWhitespace();
}
// Invalid char
return token(CppTokenId.ERROR);
}
} // end of switch (c)
} // end of while(true)
} | NONSATD | true | backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
} | backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+': | assert out != null : "not handled /*";
return out;
}
} // end of switch()
backup(1);
return token(CppTokenId.SLASH);
case '=':
if (read(true) == '=') {
return token(CppTokenId.EQEQ);
}
backup(1);
return token(CppTokenId.EQ);
case '>':
switch (read(true)) {
case '>': // >>
if (read(true) == '=') {
return token(CppTokenId.GTGTEQ);
}
backup(1);
return token(CppTokenId.GTGT);
case '=': // >=
return token(CppTokenId.GTEQ);
}
backup(1);
return token(CppTokenId.GT);
case '<': {
Token<CppTokenId> out = finishLT();
assert out != null : "not handled '<'";
return out;
}
case '+':
switch (read(true)) {
case '+':
return token(CppTokenId.PLUSPLUS);
case '=':
return token(CppTokenId.PLUSEQ);
}
backup(1);
return token(CppTokenId.PLUS);
case '-':
switch (read(true)) { |