id
int64 22
34.9k
| original_code
stringlengths 31
107k
| code_wo_comment
stringlengths 29
77.3k
| cleancode
stringlengths 25
62.1k
| repo
stringlengths 6
65
| label
sequencelengths 4
4
|
---|---|---|---|---|---|
18,505 | public void testPaintContentBorderLeftEdge() {
// Note: painting code, cannot test
} | public void testPaintContentBorderLeftEdge() {
} | public void testpaintcontentborderleftedge() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,506 | public void testPaintContentBorderRightEdge() {
// Note: painting code, cannot test
} | public void testPaintContentBorderRightEdge() {
} | public void testpaintcontentborderrightedge() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,507 | public void testPaintContentBorderTopEdge() {
// Note: painting code, cannot test
} | public void testPaintContentBorderTopEdge() {
} | public void testpaintcontentbordertopedge() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,508 | public void testPaintFocusIndicator() {
// Note: painting code, cannot test
} | public void testPaintFocusIndicator() {
} | public void testpaintfocusindicator() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,509 | public void testPaintTabBackground() {
// Note: painting code, cannot test
} | public void testPaintTabBackground() {
} | public void testpainttabbackground() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,510 | public void testPaintTabBorder() {
// Note: painting code, cannot test
} | public void testPaintTabBorder() {
} | public void testpainttabborder() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,511 | public void testPaintHighlightBelowTab() {
// Note: painting code, cannot test
} | public void testPaintHighlightBelowTab() {
} | public void testpainthighlightbelowtab() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,512 | public void testPaintBottomTabBorder() {
// Note: painting code, cannot test
} | public void testPaintBottomTabBorder() {
} | public void testpaintbottomtabborder() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,513 | public void testPaintLeftTabBorder() {
// Note: painting code, cannot test
} | public void testPaintLeftTabBorder() {
} | public void testpaintlefttabborder() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,514 | public void testPaintRightTabBorder() {
// Note: painting code, cannot test
} | public void testPaintRightTabBorder() {
} | public void testpaintrighttabborder() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,515 | public void testPaintTopTabBorder() {
// Note: painting code, cannot test
} | public void testPaintTopTabBorder() {
} | public void testpainttoptabborder() { } | qinFamily/freeVM | [
0,
0,
0,
1
] |
18,636 | @Override
public Set<JavaPlatform> instantiate() throws IOException {
//TODO: Download (in background?)
String downloadedFolder = (String) wizard.getProperty(DiscoPlatformIt.PROP_DOWNLOAD);
if (downloadedFolder != null) {
File f = new File(downloadedFolder);
if (!f.isDirectory()) {
//open the file manager for the parent folder
Desktop.getDesktop().open(f.getParentFile());
return Collections.EMPTY_SET;
}
String name = state.selection.getJavaPlatformDisplayName();
return Collections.singleton(J2SEPlatformUtils.register(new File(downloadedFolder), name));
} else {
//TODO: notifcation?
return Collections.EMPTY_SET;
}
} | @Override
public Set<JavaPlatform> instantiate() throws IOException {
String downloadedFolder = (String) wizard.getProperty(DiscoPlatformIt.PROP_DOWNLOAD);
if (downloadedFolder != null) {
File f = new File(downloadedFolder);
if (!f.isDirectory()) {
Desktop.getDesktop().open(f.getParentFile());
return Collections.EMPTY_SET;
}
String name = state.selection.getJavaPlatformDisplayName();
return Collections.singleton(J2SEPlatformUtils.register(new File(downloadedFolder), name));
} else {
return Collections.EMPTY_SET;
}
} | @override public set<javaplatform> instantiate() throws ioexception { string downloadedfolder = (string) wizard.getproperty(discoplatformit.prop_download); if (downloadedfolder != null) { file f = new file(downloadedfolder); if (!f.isdirectory()) { desktop.getdesktop().open(f.getparentfile()); return collections.empty_set; } string name = state.selection.getjavaplatformdisplayname(); return collections.singleton(j2seplatformutils.register(new file(downloadedfolder), name)); } else { return collections.empty_set; } } | oyarzun/incubator-netbeans | [
1,
1,
0,
0
] |
2,277 | @SuppressWarnings("unused")
private void receive() {
// TODO 7: [Optional] Fill with your own implementation for handling
// asynchronous data from the driver layer to the device service
ModbusDevice device = null;
String result = "";
ResourceOperation operation = null;
objectCache.putReadings(device, operation, result);
} | @SuppressWarnings("unused")
private void receive() {
ModbusDevice device = null;
String result = "";
ResourceOperation operation = null;
objectCache.putReadings(device, operation, result);
} | @suppresswarnings("unused") private void receive() { modbusdevice device = null; string result = ""; resourceoperation operation = null; objectcache.putreadings(device, operation, result); } | pk-80/device-modbus | [
0,
1,
0,
0
] |
18,772 | private void initState(){
//TODO Switch s and w
this.s = BigDecimal.ONE; //new BigDecimal(this.cloudsNum); // @todo ERRO? (x = BigDecimal.ONE)
this.w = super.getInitValue();
//Clear message received buffer
this.msgRcvBuffer = new HashMap<String, Message<?>>();
} | private void initState(){
this.s = BigDecimal.ONE;
this.w = super.getInitValue();
this.msgRcvBuffer = new HashMap<String, Message<?>>();
} | private void initstate(){ this.s = bigdecimal.one; this.w = super.getinitvalue(); this.msgrcvbuffer = new hashmap<string, message<?>>(); } | pcjesus/NetworkSimulator | [
1,
1,
0,
0
] |
10,767 | @Override
public void shutdown() throws Exception {
synchronized (shutDownLock) {
// This is problematic as the user code class loader is not
// available at this point.
for (Savepoint savepoint : savepoints.values()) {
try {
savepoint.dispose();
} catch (Throwable t) {
LOG.warn("Failed to dispose savepoint " + savepoint.getCheckpointId(), t);
}
}
savepoints.clear();
// Remove shutdown hook to prevent resource leaks, unless this is
// invoked by the shutdown hook itself.
if (shutdownHook != null && shutdownHook != Thread.currentThread()) {
try {
Runtime.getRuntime().removeShutdownHook(shutdownHook);
} catch (IllegalStateException ignored) {
// Race, JVM is in shutdown already, we can safely ignore this
} catch (Throwable t) {
LOG.warn("Failed to unregister shut down hook.");
}
}
shutDown = true;
}
} | @Override
public void shutdown() throws Exception {
synchronized (shutDownLock) {
for (Savepoint savepoint : savepoints.values()) {
try {
savepoint.dispose();
} catch (Throwable t) {
LOG.warn("Failed to dispose savepoint " + savepoint.getCheckpointId(), t);
}
}
savepoints.clear();
if (shutdownHook != null && shutdownHook != Thread.currentThread()) {
try {
Runtime.getRuntime().removeShutdownHook(shutdownHook);
} catch (IllegalStateException ignored) {
} catch (Throwable t) {
LOG.warn("Failed to unregister shut down hook.");
}
}
shutDown = true;
}
} | @override public void shutdown() throws exception { synchronized (shutdownlock) { for (savepoint savepoint : savepoints.values()) { try { savepoint.dispose(); } catch (throwable t) { log.warn("failed to dispose savepoint " + savepoint.getcheckpointid(), t); } } savepoints.clear(); if (shutdownhook != null && shutdownhook != thread.currentthread()) { try { runtime.getruntime().removeshutdownhook(shutdownhook); } catch (illegalstateexception ignored) { } catch (throwable t) { log.warn("failed to unregister shut down hook."); } } shutdown = true; } } | qingdao81/flink | [
0,
0,
1,
0
] |
2,671 | public void beginStep() {
// TODO: probably, we don't need to synchronize data here,
// because this method is always called from the same thread
// Synchronize all cached data
synchronized (readData) {
synchronized (writeData) {
super.beginStep();
}
}
} | public void beginStep() {
synchronized (readData) {
synchronized (writeData) {
super.beginStep();
}
}
} | public void beginstep() { synchronized (readdata) { synchronized (writedata) { super.beginstep(); } } } | monadius/spark-abm | [
1,
0,
0,
0
] |
2,740 | public static String jsDtobasestr(int base, double dParam) {
if (!(2 <= base && base <= 36)) {
throw new IllegalArgumentException("Bad base: " + base);
}
double d = dParam;
/* Check for Infinity and NaN */
if (Double.isNaN(d)) {
return "NaN";
} else if (Double.isInfinite(d)) {
return (d > 0.0) ? "Infinity" : "-Infinity";
} else if (d == 0) {
// ALERT: should it distinguish -0.0 from +0.0 ?
return "0";
}
boolean negative;
if (d >= 0.0) {
negative = false;
} else {
negative = true;
d = -d;
}
/* Get the integer part of d including '-' sign. */
String intDigits;
double dfloor = Math.floor(d);
long lfloor = (long) dfloor;
if (lfloor == dfloor) {
// int part fits long
if (lfloor == 0 && negative) {
intDigits = "-0"; // CWirth fix
} else {
intDigits = Long.toString((negative) ? -lfloor : lfloor, base);
}
} else {
// BigInteger should be used
long floorBits = Double.doubleToLongBits(dfloor);
int exp = (int) (floorBits >> Exp_shiftL) & Exp_mask_shifted;
long mantissa;
if (exp == 0) {
mantissa = (floorBits & Frac_maskL) << 1;
} else {
mantissa = (floorBits & Frac_maskL) | Exp_msk1L;
}
if (negative) {
mantissa = -mantissa;
}
exp -= 1075;
BigInteger x = BigInteger.valueOf(mantissa);
if (exp > 0) {
x = x.shiftLeft(exp);
} else if (exp < 0) {
x = x.shiftRight(-exp);
}
intDigits = x.toString(base);
}
if (d == dfloor) {
// No fraction part
return intDigits;
} else {
/* We have a fraction. */
StringBuilder buffer; /* The output string */
int digit;
double df; /* The fractional part of d */
BigInteger b;
buffer = new StringBuilder();
buffer.append(intDigits).append('.');
df = d - dfloor;
long dBits = Double.doubleToLongBits(d);
int word0 = (int) (dBits >> 32);
int word1 = (int) (dBits);
int[] e = new int[1];
int[] bbits = new int[1];
b = d2b(df, e, bbits);
/* At this point df = b * 2^e. e must be less than zero because 0 < df < 1. */
int s2 = -(word0 >>> Exp_shift1 & Exp_mask >> Exp_shift1);
if (s2 == 0) {
s2 = -1;
}
s2 += Bias + P;
/* 1/2^s2 = (nextDouble(d) - d)/2 */
BigInteger mlo = BigInteger.ONE;
BigInteger mhi = mlo;
if ((word1 == 0) && ((word0 & Bndry_mask) == 0) && ((word0 & (Exp_mask & Exp_mask << 1)) != 0)) {
/*
* The special case. Here we want to be within a quarter of the last input
* significant digit instead of one half of it when the output string's value is
* less than d.
*/
s2 += Log2P;
mhi = BigInteger.valueOf(1 << Log2P);
}
b = b.shiftLeft(e[0] + s2);
BigInteger s = BigInteger.ONE;
s = s.shiftLeft(s2);
/*
* @formatter:off
* At this point we have the following:
* s = 2^s2;
* 1 > df = b/2^s2 > 0;
* (d - prevDouble(d))/2 = mlo/2^s2;
* (nextDouble(d) - d)/2 = mhi/2^s2.
* @formatter:on
*/
BigInteger bigBase = BigInteger.valueOf(base);
boolean done = false;
do {
b = b.multiply(bigBase);
BigInteger[] divResult = b.divideAndRemainder(s);
b = divResult[1];
digit = (char) (divResult[0].intValue());
if (mlo == mhi) {
mlo = mhi = mlo.multiply(bigBase);
} else {
mlo = mlo.multiply(bigBase);
mhi = mhi.multiply(bigBase);
}
/* Do we yet have the shortest string that will round to d? */
int j = b.compareTo(mlo);
/* j is b/2^s2 compared with mlo/2^s2. */
BigInteger delta = s.subtract(mhi);
int j1 = (delta.signum() <= 0) ? 1 : b.compareTo(delta);
/* j1 is b/2^s2 compared with 1 - mhi/2^s2. */
if (j1 == 0 && ((word1 & 1) == 0)) {
if (j > 0) {
digit++;
}
done = true;
} else if (j < 0 || (j == 0 && ((word1 & 1) == 0))) {
if (j1 > 0) {
/*
* Either dig or dig+1 would work here as the least significant digit. Use
* whichever would produce an output value closer to d.
*/
b = b.shiftLeft(1);
j1 = b.compareTo(s);
if (j1 > 0) {
/*
* The even test (|| (j1 == 0 && (digit & 1))) is not here because it
* messes up odd base output such as 3.5 in base 3.
*/
digit++;
}
}
done = true;
} else if (j1 > 0) {
digit++;
done = true;
}
buffer.append(basedigit(digit));
} while (!done);
return buffer.toString();
}
} | public static String jsDtobasestr(int base, double dParam) {
if (!(2 <= base && base <= 36)) {
throw new IllegalArgumentException("Bad base: " + base);
}
double d = dParam;
if (Double.isNaN(d)) {
return "NaN";
} else if (Double.isInfinite(d)) {
return (d > 0.0) ? "Infinity" : "-Infinity";
} else if (d == 0) {
return "0";
}
boolean negative;
if (d >= 0.0) {
negative = false;
} else {
negative = true;
d = -d;
}
String intDigits;
double dfloor = Math.floor(d);
long lfloor = (long) dfloor;
if (lfloor == dfloor) {
if (lfloor == 0 && negative) {
intDigits = "-0";
} else {
intDigits = Long.toString((negative) ? -lfloor : lfloor, base);
}
} else {
long floorBits = Double.doubleToLongBits(dfloor);
int exp = (int) (floorBits >> Exp_shiftL) & Exp_mask_shifted;
long mantissa;
if (exp == 0) {
mantissa = (floorBits & Frac_maskL) << 1;
} else {
mantissa = (floorBits & Frac_maskL) | Exp_msk1L;
}
if (negative) {
mantissa = -mantissa;
}
exp -= 1075;
BigInteger x = BigInteger.valueOf(mantissa);
if (exp > 0) {
x = x.shiftLeft(exp);
} else if (exp < 0) {
x = x.shiftRight(-exp);
}
intDigits = x.toString(base);
}
if (d == dfloor) {
return intDigits;
} else {
StringBuilder buffer;
int digit;
double df;
BigInteger b;
buffer = new StringBuilder();
buffer.append(intDigits).append('.');
df = d - dfloor;
long dBits = Double.doubleToLongBits(d);
int word0 = (int) (dBits >> 32);
int word1 = (int) (dBits);
int[] e = new int[1];
int[] bbits = new int[1];
b = d2b(df, e, bbits);
int s2 = -(word0 >>> Exp_shift1 & Exp_mask >> Exp_shift1);
if (s2 == 0) {
s2 = -1;
}
s2 += Bias + P;
BigInteger mlo = BigInteger.ONE;
BigInteger mhi = mlo;
if ((word1 == 0) && ((word0 & Bndry_mask) == 0) && ((word0 & (Exp_mask & Exp_mask << 1)) != 0)) {
s2 += Log2P;
mhi = BigInteger.valueOf(1 << Log2P);
}
b = b.shiftLeft(e[0] + s2);
BigInteger s = BigInteger.ONE;
s = s.shiftLeft(s2);
BigInteger bigBase = BigInteger.valueOf(base);
boolean done = false;
do {
b = b.multiply(bigBase);
BigInteger[] divResult = b.divideAndRemainder(s);
b = divResult[1];
digit = (char) (divResult[0].intValue());
if (mlo == mhi) {
mlo = mhi = mlo.multiply(bigBase);
} else {
mlo = mlo.multiply(bigBase);
mhi = mhi.multiply(bigBase);
}
int j = b.compareTo(mlo);
BigInteger delta = s.subtract(mhi);
int j1 = (delta.signum() <= 0) ? 1 : b.compareTo(delta);
if (j1 == 0 && ((word1 & 1) == 0)) {
if (j > 0) {
digit++;
}
done = true;
} else if (j < 0 || (j == 0 && ((word1 & 1) == 0))) {
if (j1 > 0) {
b = b.shiftLeft(1);
j1 = b.compareTo(s);
if (j1 > 0) {
digit++;
}
}
done = true;
} else if (j1 > 0) {
digit++;
done = true;
}
buffer.append(basedigit(digit));
} while (!done);
return buffer.toString();
}
} | public static string jsdtobasestr(int base, double dparam) { if (!(2 <= base && base <= 36)) { throw new illegalargumentexception("bad base: " + base); } double d = dparam; if (double.isnan(d)) { return "nan"; } else if (double.isinfinite(d)) { return (d > 0.0) ? "infinity" : "-infinity"; } else if (d == 0) { return "0"; } boolean negative; if (d >= 0.0) { negative = false; } else { negative = true; d = -d; } string intdigits; double dfloor = math.floor(d); long lfloor = (long) dfloor; if (lfloor == dfloor) { if (lfloor == 0 && negative) { intdigits = "-0"; } else { intdigits = long.tostring((negative) ? -lfloor : lfloor, base); } } else { long floorbits = double.doubletolongbits(dfloor); int exp = (int) (floorbits >> exp_shiftl) & exp_mask_shifted; long mantissa; if (exp == 0) { mantissa = (floorbits & frac_maskl) << 1; } else { mantissa = (floorbits & frac_maskl) | exp_msk1l; } if (negative) { mantissa = -mantissa; } exp -= 1075; biginteger x = biginteger.valueof(mantissa); if (exp > 0) { x = x.shiftleft(exp); } else if (exp < 0) { x = x.shiftright(-exp); } intdigits = x.tostring(base); } if (d == dfloor) { return intdigits; } else { stringbuilder buffer; int digit; double df; biginteger b; buffer = new stringbuilder(); buffer.append(intdigits).append('.'); df = d - dfloor; long dbits = double.doubletolongbits(d); int word0 = (int) (dbits >> 32); int word1 = (int) (dbits); int[] e = new int[1]; int[] bbits = new int[1]; b = d2b(df, e, bbits); int s2 = -(word0 >>> exp_shift1 & exp_mask >> exp_shift1); if (s2 == 0) { s2 = -1; } s2 += bias + p; biginteger mlo = biginteger.one; biginteger mhi = mlo; if ((word1 == 0) && ((word0 & bndry_mask) == 0) && ((word0 & (exp_mask & exp_mask << 1)) != 0)) { s2 += log2p; mhi = biginteger.valueof(1 << log2p); } b = b.shiftleft(e[0] + s2); biginteger s = biginteger.one; s = s.shiftleft(s2); biginteger bigbase = biginteger.valueof(base); boolean done = false; do { b = b.multiply(bigbase); biginteger[] divresult = b.divideandremainder(s); b = divresult[1]; digit = (char) (divresult[0].intvalue()); if (mlo == mhi) { mlo = mhi = mlo.multiply(bigbase); } else { mlo = mlo.multiply(bigbase); mhi = mhi.multiply(bigbase); } int j = b.compareto(mlo); biginteger delta = s.subtract(mhi); int j1 = (delta.signum() <= 0) ? 1 : b.compareto(delta); if (j1 == 0 && ((word1 & 1) == 0)) { if (j > 0) { digit++; } done = true; } else if (j < 0 || (j == 0 && ((word1 & 1) == 0))) { if (j1 > 0) { b = b.shiftleft(1); j1 = b.compareto(s); if (j1 > 0) { digit++; } } done = true; } else if (j1 > 0) { digit++; done = true; } buffer.append(basedigit(digit)); } while (!done); return buffer.tostring(); } } | pitbox46/graaljs-forge | [
1,
0,
0,
0
] |
11,112 | @Override
public final boolean equals(final Object other) {
if (!(other instanceof Production)) {
return false;
}
// XXX NOTE: this assumes that the reference of the generator value does not change!
final Production otherProduction = (Production) other;
return this.id == otherProduction.id
&& this.ownClass.equals(otherProduction.ownClass)
&& this.generatorValue == otherProduction.generatorValue;
} | @Override
public final boolean equals(final Object other) {
if (!(other instanceof Production)) {
return false;
}
final Production otherProduction = (Production) other;
return this.id == otherProduction.id
&& this.ownClass.equals(otherProduction.ownClass)
&& this.generatorValue == otherProduction.generatorValue;
} | @override public final boolean equals(final object other) { if (!(other instanceof production)) { return false; } final production otherproduction = (production) other; return this.id == otherproduction.id && this.ownclass.equals(otherproduction.ownclass) && this.generatorvalue == otherproduction.generatorvalue; } | mskamp/StarSmith | [
1,
0,
0,
0
] |
2,951 | private boolean shouldRollToNewFile() {
// TODO: ORC file now not support target file size before closed
return !format.equals(FileFormat.ORC) &&
currentRows % ROWS_DIVISOR == 0 && length(currentWriter) >= targetFileSize;
} | private boolean shouldRollToNewFile() {
return !format.equals(FileFormat.ORC) &&
currentRows % ROWS_DIVISOR == 0 && length(currentWriter) >= targetFileSize;
} | private boolean shouldrolltonewfile() { return !format.equals(fileformat.orc) && currentrows % rows_divisor == 0 && length(currentwriter) >= targetfilesize; } | rajarshisarkar/iceberg | [
0,
1,
0,
0
] |
19,374 | boolean isAlreadyLogged(StackTraceElement[] stack)
{
HashStackTraceElement hse = new HashStackTraceElement(stack);
// Ok, it's block the code, but for a small period
synchronized (this)
{
// Detect with the precalculated hash value
return !logged.add(hse);
}
} | boolean isAlreadyLogged(StackTraceElement[] stack)
{
HashStackTraceElement hse = new HashStackTraceElement(stack);
synchronized (this)
{
return !logged.add(hse);
}
} | boolean isalreadylogged(stacktraceelement[] stack) { hashstacktraceelement hse = new hashstacktraceelement(stack); synchronized (this) { return !logged.add(hse); } } | octo-online/reactive-aud | [
1,
0,
0,
0
] |
2,992 | public int doEndTag()
throws JspTagException
{
HttpServletRequest request = (HttpServletRequest)super.pageContext.getRequest();
RequestProperties reqState = (RequestProperties)request.getAttribute(SECTION_REQUESTPROPS);
PrivateLabel privLabel = (reqState != null)? reqState.getPrivateLabel() : RequestProperties.NullPrivateLabel;
JspWriter out = super.pageContext.getOut();
String s = this.getSection().toLowerCase();
/* ignore blank section definitions */
if (StringTools.isBlank(s)) {
// -- ignore
return EVAL_PAGE;
}
/* not a match? */
if (!this.isMatch()) {
// -- ignore
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* "onload='...'" */
if (s.equalsIgnoreCase(SECTION_BODY_ONLOAD)) {
String bodyOnLoad = (String)request.getAttribute(SECTION_BODY_ONLOAD);
if (!StringTools.isBlank(bodyOnLoad)) {
try {
out.print(bodyOnLoad);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* "onunload='...'" */
if (s.equalsIgnoreCase(SECTION_BODY_ONUNLOAD)) {
String bodyOnUnload = (String)request.getAttribute(SECTION_BODY_ONUNLOAD);
if (!StringTools.isBlank(bodyOnUnload)) {
try {
out.print(bodyOnUnload);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* expandMenu style */
if (s.equalsIgnoreCase(SECTION_MENU_STYLE)) {
try {
ExpandMenu.writeStyle(out, reqState);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
/* expandMenu javascript */
if (s.equalsIgnoreCase(SECTION_MENU_JAVASCRIPT)) {
try {
ExpandMenu.writeJavaScript(out, reqState);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
/* expandMenu */
if (s.equalsIgnoreCase(SECTION_MENU)) {
try {
ExpandMenu.writeMenu(out, reqState,
null/*menuID*/, true/*expandableMenu*/,
false/*showIcon*/, ExpandMenu.DESC_LONG, false/*showMenuHelp*/);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* content table class */
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_TABLE)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
try {
String tableClass = content.getTableClass();
out.write(!StringTools.isBlank(tableClass)? tableClass : "contentTableClass");
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* content cell class */
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_CELL)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
try {
String cellClass = content.getCellClass();
out.write(!StringTools.isBlank(cellClass)? cellClass : "contentCellClass");
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* content message id */
if (s.equalsIgnoreCase(SECTION_CONTENT_ID_MESSAGE)) {
try {
out.write(CommonServlet.ID_CONTENT_MESSAGE);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
/* content message class */
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_MESSAGE)) {
try {
out.write(CommonServlet.CSS_CONTENT_MESSAGE);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
/* content menubar */
if (s.equalsIgnoreCase(SECTION_CONTENT_MENUBAR)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
String contentClass = content.getTableClass();
try {
if (ListTools.contains(CommonServlet.CSS_MENUBAR_OK,contentClass)) {
MenuBar.writeTableRow(out, reqState.getPageName(), reqState);
} else {
out.write("<!-- no menubar ['"+contentClass+"'] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* content message */
if (s.equalsIgnoreCase(SECTION_CONTENT_MESSAGE)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
String msg = (content != null)? StringTools.trim(content.getTableMessage()) : "";
try {
out.write(msg); // TODO: HTML encode?
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* request context path */
if (s.equalsIgnoreCase(SECTION_REQUEST_CONTEXT)) {
try {
out.write(request.getContextPath());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* CSS file */
if (s.equalsIgnoreCase(SECTION_CSSFILE)) {
String cssFilePath = this.getArg();
if (!StringTools.isBlank(cssFilePath)) {
try {
PrintWriter pw = new PrintWriter(out, out.isAutoFlush());
WebPageAdaptor.writeCssLink(pw, reqState, cssFilePath, null);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* Banner Image Height */
if (s.equalsIgnoreCase(SECTION_BANNER_WIDTH)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String bannerWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_width + kSfx, null);
if (StringTools.isBlank(bannerWidth)) {
bannerWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
}
// minimum valie
if (StringTools.isBlank(bannerWidth)) {
bannerWidth = this.hasDefault()? this.getDefault() : "100%";
} else
if (!bannerWidth.endsWith("%")) {
int W = StringTools.parseInt(bannerWidth, 0);
bannerWidth = String.valueOf((W < MIN_BANNER_WIDTH)? MIN_BANNER_WIDTH : W);
}
// generate html
try {
out.write(bannerWidth);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
/* Banner Style */
if (s.equalsIgnoreCase(SECTION_BANNER_STYLE)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String bannerStyle = privLabel.getStringProperty(PrivateLabel.PROP_Banner_style + kSfx, null);
// generate html
if (!StringTools.isBlank(bannerStyle)) {
try {
out.write(bannerStyle);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* Banner Image */
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String imgLink = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageLink + kSfx, null);
String imgSrc = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageSource + kSfx, null);
String imgWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
String imgHeight = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageHeight + kSfx, null);
// generate html
if (!StringTools.isBlank(imgSrc)) {
StringBuffer sb = new StringBuffer();
if (!StringTools.isBlank(imgLink)) {
sb.append("<a href='").append(imgLink).append("' target='_blank'>");
}
sb.append("<img src='").append(imgSrc).append("' border='0'");
if (!StringTools.isBlank(imgWidth)) {
sb.append(" width='").append(imgWidth).append("'");
}
if (!StringTools.isBlank(imgHeight)) {
sb.append(" height='").append(imgHeight).append("'");
}
sb.append(">");
if (!StringTools.isBlank(imgLink)) {
sb.append("</a>");
}
try {
out.write(sb.toString());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* Banner Image */
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_SOURCE)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String imgSrc = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageSource + kSfx, null);
// generate html
if (!StringTools.isBlank(imgSrc)) {
//Print.sysPrintln("Property Image Source: " + imgSrc);
try {
out.write(imgSrc);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
//Print.sysPrintln("Default Image Source: " + this.getDefault());
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* Banner Image Height */
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_WIDTH)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String imgWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
// generate html
if (!StringTools.isBlank(imgWidth)) {
try {
out.write(imgWidth);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
/* Banner Image Height */
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_HEIGHT)) {
// key suffix
String kSfx = StringTools.trim(this.getArg());
// property values
String imgHeight = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageHeight + kSfx, null);
// generate html
if (!StringTools.isBlank(imgHeight)) {
try {
out.write(imgHeight);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* JavaScript */
if (s.equalsIgnoreCase(SECTION_JAVASCRIPT)) {
try {
// always write "utils.js"
JavaScriptTools.writeUtilsJS(out, request);
// check for other javascript
Object obj = request.getAttribute(SECTION_JAVASCRIPT);
if (obj instanceof HTMLOutput) {
((HTMLOutput)obj).write(out);
} else {
out.write("<!-- Unexpected section type '" + s + "' [" + StringTools.className(obj) + "] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* current page name */
if (s.equalsIgnoreCase(SECTION_PAGE_NAME)) { // "pagename"
String pageName = reqState.getPageName();
if (!StringTools.isBlank(pageName)) {
try {
out.write(pageName);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* Page URL */
if (s.equalsIgnoreCase(SECTION_PAGE_URL)) { // "pageurl"
String pageName = this.getArg();
String cmd = null;
String cmdArg = null;
WebPage wp = privLabel.getWebPage(pageName);
String url = (wp != null)? wp.encodePageURL(reqState,cmd,cmdArg) : null;
if (!StringTools.isBlank(url)) {
try {
out.write(url);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
// --------------------------------------------------------------------
/* HTMLOutput */
try {
Object obj = request.getAttribute(s);
if (obj == null) {
out.write("<!-- Undefined section '" + s + "' -->");
} else
if (obj instanceof HTMLOutput) {
((HTMLOutput)obj).write(out);
} else {
out.write("<!-- Unexpected section type '" + s + "' [" + StringTools.className(obj) + "] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
} | public int doEndTag()
throws JspTagException
{
HttpServletRequest request = (HttpServletRequest)super.pageContext.getRequest();
RequestProperties reqState = (RequestProperties)request.getAttribute(SECTION_REQUESTPROPS);
PrivateLabel privLabel = (reqState != null)? reqState.getPrivateLabel() : RequestProperties.NullPrivateLabel;
JspWriter out = super.pageContext.getOut();
String s = this.getSection().toLowerCase();
if (StringTools.isBlank(s)) {
return EVAL_PAGE;
}
if (!this.isMatch()) {
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BODY_ONLOAD)) {
String bodyOnLoad = (String)request.getAttribute(SECTION_BODY_ONLOAD);
if (!StringTools.isBlank(bodyOnLoad)) {
try {
out.print(bodyOnLoad);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BODY_ONUNLOAD)) {
String bodyOnUnload = (String)request.getAttribute(SECTION_BODY_ONUNLOAD);
if (!StringTools.isBlank(bodyOnUnload)) {
try {
out.print(bodyOnUnload);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_MENU_STYLE)) {
try {
ExpandMenu.writeStyle(out, reqState);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_MENU_JAVASCRIPT)) {
try {
ExpandMenu.writeJavaScript(out, reqState);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_MENU)) {
try {
ExpandMenu.writeMenu(out, reqState,
nul, tru,
fals, ExpandMenu.DESC_LONG, fals);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_TABLE)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
try {
String tableClass = content.getTableClass();
out.write(!StringTools.isBlank(tableClass)? tableClass : "contentTableClass");
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_CELL)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
try {
String cellClass = content.getCellClass();
out.write(!StringTools.isBlank(cellClass)? cellClass : "contentCellClass");
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_ID_MESSAGE)) {
try {
out.write(CommonServlet.ID_CONTENT_MESSAGE);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_CLASS_MESSAGE)) {
try {
out.write(CommonServlet.CSS_CONTENT_MESSAGE);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_MENUBAR)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
if (content != null) {
String contentClass = content.getTableClass();
try {
if (ListTools.contains(CommonServlet.CSS_MENUBAR_OK,contentClass)) {
MenuBar.writeTableRow(out, reqState.getPageName(), reqState);
} else {
out.write("<!-- no menubar ['"+contentClass+"'] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CONTENT_MESSAGE)) {
HTMLOutput content = (HTMLOutput)request.getAttribute(SECTION_CONTENT_BODY);
String msg = (content != null)? StringTools.trim(content.getTableMessage()) : "";
try {
out.write(msg);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_REQUEST_CONTEXT)) {
try {
out.write(request.getContextPath());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_CSSFILE)) {
String cssFilePath = this.getArg();
if (!StringTools.isBlank(cssFilePath)) {
try {
PrintWriter pw = new PrintWriter(out, out.isAutoFlush());
WebPageAdaptor.writeCssLink(pw, reqState, cssFilePath, null);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_WIDTH)) {
String kSfx = StringTools.trim(this.getArg());
String bannerWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_width + kSfx, null);
if (StringTools.isBlank(bannerWidth)) {
bannerWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
}
if (StringTools.isBlank(bannerWidth)) {
bannerWidth = this.hasDefault()? this.getDefault() : "100%";
} else
if (!bannerWidth.endsWith("%")) {
int W = StringTools.parseInt(bannerWidth, 0);
bannerWidth = String.valueOf((W < MIN_BANNER_WIDTH)? MIN_BANNER_WIDTH : W);
}
try {
out.write(bannerWidth);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_STYLE)) {
String kSfx = StringTools.trim(this.getArg());
String bannerStyle = privLabel.getStringProperty(PrivateLabel.PROP_Banner_style + kSfx, null);
if (!StringTools.isBlank(bannerStyle)) {
try {
out.write(bannerStyle);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE)) {
String kSfx = StringTools.trim(this.getArg());
String imgLink = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageLink + kSfx, null);
String imgSrc = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageSource + kSfx, null);
String imgWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
String imgHeight = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageHeight + kSfx, null);
if (!StringTools.isBlank(imgSrc)) {
StringBuffer sb = new StringBuffer();
if (!StringTools.isBlank(imgLink)) {
sb.append("<a href='").append(imgLink).append("' target='_blank'>");
}
sb.append("<img src='").append(imgSrc).append("' border='0'");
if (!StringTools.isBlank(imgWidth)) {
sb.append(" width='").append(imgWidth).append("'");
}
if (!StringTools.isBlank(imgHeight)) {
sb.append(" height='").append(imgHeight).append("'");
}
sb.append(">");
if (!StringTools.isBlank(imgLink)) {
sb.append("</a>");
}
try {
out.write(sb.toString());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_SOURCE)) {
String kSfx = StringTools.trim(this.getArg());
String imgSrc = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageSource + kSfx, null);
if (!StringTools.isBlank(imgSrc)) {
try {
out.write(imgSrc);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_WIDTH)) {
String kSfx = StringTools.trim(this.getArg());
String imgWidth = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageWidth + kSfx, null);
if (!StringTools.isBlank(imgWidth)) {
try {
out.write(imgWidth);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_BANNER_IMAGE_HEIGHT)) {
String kSfx = StringTools.trim(this.getArg());
String imgHeight = privLabel.getStringProperty(PrivateLabel.PROP_Banner_imageHeight + kSfx, null);
if (!StringTools.isBlank(imgHeight)) {
try {
out.write(imgHeight);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
} else
if (this.hasDefault()) {
try {
out.write(this.getDefault());
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_JAVASCRIPT)) {
try {
JavaScriptTools.writeUtilsJS(out, request);
Object obj = request.getAttribute(SECTION_JAVASCRIPT);
if (obj instanceof HTMLOutput) {
((HTMLOutput)obj).write(out);
} else {
out.write("<!-- Unexpected section type '" + s + "' [" + StringTools.className(obj) + "] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_PAGE_NAME)) {
String pageName = reqState.getPageName();
if (!StringTools.isBlank(pageName)) {
try {
out.write(pageName);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
if (s.equalsIgnoreCase(SECTION_PAGE_URL)) {
String pageName = this.getArg();
String cmd = null;
String cmdArg = null;
WebPage wp = privLabel.getWebPage(pageName);
String url = (wp != null)? wp.encodePageURL(reqState,cmd,cmdArg) : null;
if (!StringTools.isBlank(url)) {
try {
out.write(url);
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
}
return EVAL_PAGE;
}
try {
Object obj = request.getAttribute(s);
if (obj == null) {
out.write("<!-- Undefined section '" + s + "' -->");
} else
if (obj instanceof HTMLOutput) {
((HTMLOutput)obj).write(out);
} else {
out.write("<!-- Unexpected section type '" + s + "' [" + StringTools.className(obj) + "] -->");
}
} catch (IOException ioe) {
throw new JspTagException(ioe.toString());
}
return EVAL_PAGE;
} | public int doendtag() throws jsptagexception { httpservletrequest request = (httpservletrequest)super.pagecontext.getrequest(); requestproperties reqstate = (requestproperties)request.getattribute(section_requestprops); privatelabel privlabel = (reqstate != null)? reqstate.getprivatelabel() : requestproperties.nullprivatelabel; jspwriter out = super.pagecontext.getout(); string s = this.getsection().tolowercase(); if (stringtools.isblank(s)) { return eval_page; } if (!this.ismatch()) { return eval_page; } if (s.equalsignorecase(section_body_onload)) { string bodyonload = (string)request.getattribute(section_body_onload); if (!stringtools.isblank(bodyonload)) { try { out.print(bodyonload); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_body_onunload)) { string bodyonunload = (string)request.getattribute(section_body_onunload); if (!stringtools.isblank(bodyonunload)) { try { out.print(bodyonunload); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_menu_style)) { try { expandmenu.writestyle(out, reqstate); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_menu_javascript)) { try { expandmenu.writejavascript(out, reqstate); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_menu)) { try { expandmenu.writemenu(out, reqstate, nul, tru, fals, expandmenu.desc_long, fals); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_content_class_table)) { htmloutput content = (htmloutput)request.getattribute(section_content_body); if (content != null) { try { string tableclass = content.gettableclass(); out.write(!stringtools.isblank(tableclass)? tableclass : "contenttableclass"); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_content_class_cell)) { htmloutput content = (htmloutput)request.getattribute(section_content_body); if (content != null) { try { string cellclass = content.getcellclass(); out.write(!stringtools.isblank(cellclass)? cellclass : "contentcellclass"); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_content_id_message)) { try { out.write(commonservlet.id_content_message); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_content_class_message)) { try { out.write(commonservlet.css_content_message); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_content_menubar)) { htmloutput content = (htmloutput)request.getattribute(section_content_body); if (content != null) { string contentclass = content.gettableclass(); try { if (listtools.contains(commonservlet.css_menubar_ok,contentclass)) { menubar.writetablerow(out, reqstate.getpagename(), reqstate); } else { out.write("<!-- no menubar ['"+contentclass+"'] -->"); } } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_content_message)) { htmloutput content = (htmloutput)request.getattribute(section_content_body); string msg = (content != null)? stringtools.trim(content.gettablemessage()) : ""; try { out.write(msg); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_request_context)) { try { out.write(request.getcontextpath()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_cssfile)) { string cssfilepath = this.getarg(); if (!stringtools.isblank(cssfilepath)) { try { printwriter pw = new printwriter(out, out.isautoflush()); webpageadaptor.writecsslink(pw, reqstate, cssfilepath, null); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_banner_width)) { string ksfx = stringtools.trim(this.getarg()); string bannerwidth = privlabel.getstringproperty(privatelabel.prop_banner_width + ksfx, null); if (stringtools.isblank(bannerwidth)) { bannerwidth = privlabel.getstringproperty(privatelabel.prop_banner_imagewidth + ksfx, null); } if (stringtools.isblank(bannerwidth)) { bannerwidth = this.hasdefault()? this.getdefault() : "100%"; } else if (!bannerwidth.endswith("%")) { int w = stringtools.parseint(bannerwidth, 0); bannerwidth = string.valueof((w < min_banner_width)? min_banner_width : w); } try { out.write(bannerwidth); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_banner_style)) { string ksfx = stringtools.trim(this.getarg()); string bannerstyle = privlabel.getstringproperty(privatelabel.prop_banner_style + ksfx, null); if (!stringtools.isblank(bannerstyle)) { try { out.write(bannerstyle); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } else if (this.hasdefault()) { try { out.write(this.getdefault()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_banner_image)) { string ksfx = stringtools.trim(this.getarg()); string imglink = privlabel.getstringproperty(privatelabel.prop_banner_imagelink + ksfx, null); string imgsrc = privlabel.getstringproperty(privatelabel.prop_banner_imagesource + ksfx, null); string imgwidth = privlabel.getstringproperty(privatelabel.prop_banner_imagewidth + ksfx, null); string imgheight = privlabel.getstringproperty(privatelabel.prop_banner_imageheight + ksfx, null); if (!stringtools.isblank(imgsrc)) { stringbuffer sb = new stringbuffer(); if (!stringtools.isblank(imglink)) { sb.append("<a href='").append(imglink).append("' target='_blank'>"); } sb.append("<img src='").append(imgsrc).append("' border='0'"); if (!stringtools.isblank(imgwidth)) { sb.append(" width='").append(imgwidth).append("'"); } if (!stringtools.isblank(imgheight)) { sb.append(" height='").append(imgheight).append("'"); } sb.append(">"); if (!stringtools.isblank(imglink)) { sb.append("</a>"); } try { out.write(sb.tostring()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } else if (this.hasdefault()) { try { out.write(this.getdefault()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_banner_image_source)) { string ksfx = stringtools.trim(this.getarg()); string imgsrc = privlabel.getstringproperty(privatelabel.prop_banner_imagesource + ksfx, null); if (!stringtools.isblank(imgsrc)) { try { out.write(imgsrc); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } else if (this.hasdefault()) { try { out.write(this.getdefault()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_banner_image_width)) { string ksfx = stringtools.trim(this.getarg()); string imgwidth = privlabel.getstringproperty(privatelabel.prop_banner_imagewidth + ksfx, null); if (!stringtools.isblank(imgwidth)) { try { out.write(imgwidth); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } else if (this.hasdefault()) { try { out.write(this.getdefault()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_banner_image_height)) { string ksfx = stringtools.trim(this.getarg()); string imgheight = privlabel.getstringproperty(privatelabel.prop_banner_imageheight + ksfx, null); if (!stringtools.isblank(imgheight)) { try { out.write(imgheight); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } else if (this.hasdefault()) { try { out.write(this.getdefault()); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_javascript)) { try { javascripttools.writeutilsjs(out, request); object obj = request.getattribute(section_javascript); if (obj instanceof htmloutput) { ((htmloutput)obj).write(out); } else { out.write("<!-- unexpected section type '" + s + "' [" + stringtools.classname(obj) + "] -->"); } } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } if (s.equalsignorecase(section_page_name)) { string pagename = reqstate.getpagename(); if (!stringtools.isblank(pagename)) { try { out.write(pagename); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } if (s.equalsignorecase(section_page_url)) { string pagename = this.getarg(); string cmd = null; string cmdarg = null; webpage wp = privlabel.getwebpage(pagename); string url = (wp != null)? wp.encodepageurl(reqstate,cmd,cmdarg) : null; if (!stringtools.isblank(url)) { try { out.write(url); } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } } return eval_page; } try { object obj = request.getattribute(s); if (obj == null) { out.write("<!-- undefined section '" + s + "' -->"); } else if (obj instanceof htmloutput) { ((htmloutput)obj).write(out); } else { out.write("<!-- unexpected section type '" + s + "' [" + stringtools.classname(obj) + "] -->"); } } catch (ioexception ioe) { throw new jsptagexception(ioe.tostring()); } return eval_page; } | paragp/GTS-PreUAT | [
1,
0,
0,
0
] |
19,480 | synchronized boolean acceptFrame(
@NotNull VP8Frame frame,
int incomingIndex,
int externalTargetIndex, long receivedMs)
{
// We make local copies of the externalTemporalLayerIdTarget and the
// externalEncodingTarget (as they may be updated by some other
// thread).
int externalTemporalLayerIdTarget
= RtpLayerDesc.getTidFromIndex(externalTargetIndex);
int externalEncodingIdTarget
= RtpLayerDesc.getEidFromIndex(externalTargetIndex);
if (externalEncodingIdTarget != internalEncodingIdTarget)
{
// The externalEncodingIdTarget has changed since accept last
// run; perhaps we should request a keyframe.
internalEncodingIdTarget = externalEncodingIdTarget;
if (externalEncodingIdTarget > SUSPENDED_ENCODING_ID)
{
needsKeyframe = true;
}
}
if (externalEncodingIdTarget < 0
|| externalTemporalLayerIdTarget < 0)
{
// We stop forwarding immediately. We will need a keyframe in order
// to resume.
currentEncodingId = SUSPENDED_ENCODING_ID;
return false;
}
int temporalLayerIdOfFrame = frame.getTemporalLayer();
if (temporalLayerIdOfFrame < 0)
{
// temporal scalability is not enabled. Pretend that
// this is the base temporal layer.
temporalLayerIdOfFrame = 0;
}
int encodingId = RtpLayerDesc.getEidFromIndex(incomingIndex);
if (frame.isKeyframe())
{
logger.debug(() -> "Quality filter got keyframe for stream "
+ frame.getSsrc());
return acceptKeyframe(encodingId, receivedMs);
}
else if (currentEncodingId > SUSPENDED_ENCODING_ID)
{
if (isOutOfSwitchingPhase(receivedMs) && isPossibleToSwitch(encodingId))
{
// XXX(george) i've noticed some "rogue" base layer keyframes
// that trigger this. what happens is the client sends a base
// layer key frame, the bridge switches to that layer because
// for all it knows it may be the only keyframe sent by the
// client engine. then the bridge notices that packets from the
// higher quality streams are flowing and execution ends-up
// here. it is a mystery why the engine is "leaking" base layer
// key frames
needsKeyframe = true;
}
if (encodingId != currentEncodingId)
{
// for non-keyframes, we can't route anything but the current encoding
return false;
}
// This branch reads the {@link #currentEncodingId} and it
// filters packets based on their temporal layer.
if (currentEncodingId > externalEncodingIdTarget)
{
// pending downscale, decrease the frame rate until we
// downscale.
return temporalLayerIdOfFrame < 1;
}
else if (currentEncodingId < externalEncodingIdTarget)
{
// pending upscale, increase the frame rate until we upscale.
return true;
}
else
{
// The currentSpatialLayerId matches exactly the target
// currentSpatialLayerId.
return temporalLayerIdOfFrame <= externalTemporalLayerIdTarget;
}
}
else
{
// In this branch we're not processing a keyframe and the
// currentSpatialLayerId is in suspended state, which means we need
// a keyframe to start streaming again. Reaching this point also
// means that we want to forward something (because both
// externalEncodingIdTarget and externalTemporalLayerIdTarget
// are greater than 0) so we set the request keyframe flag.
// assert needsKeyframe == true;
return false;
}
} | synchronized boolean acceptFrame(
@NotNull VP8Frame frame,
int incomingIndex,
int externalTargetIndex, long receivedMs)
{
int externalTemporalLayerIdTarget
= RtpLayerDesc.getTidFromIndex(externalTargetIndex);
int externalEncodingIdTarget
= RtpLayerDesc.getEidFromIndex(externalTargetIndex);
if (externalEncodingIdTarget != internalEncodingIdTarget)
{
internalEncodingIdTarget = externalEncodingIdTarget;
if (externalEncodingIdTarget > SUSPENDED_ENCODING_ID)
{
needsKeyframe = true;
}
}
if (externalEncodingIdTarget < 0
|| externalTemporalLayerIdTarget < 0)
{
currentEncodingId = SUSPENDED_ENCODING_ID;
return false;
}
int temporalLayerIdOfFrame = frame.getTemporalLayer();
if (temporalLayerIdOfFrame < 0)
{
temporalLayerIdOfFrame = 0;
}
int encodingId = RtpLayerDesc.getEidFromIndex(incomingIndex);
if (frame.isKeyframe())
{
logger.debug(() -> "Quality filter got keyframe for stream "
+ frame.getSsrc());
return acceptKeyframe(encodingId, receivedMs);
}
else if (currentEncodingId > SUSPENDED_ENCODING_ID)
{
if (isOutOfSwitchingPhase(receivedMs) && isPossibleToSwitch(encodingId))
{
needsKeyframe = true;
}
if (encodingId != currentEncodingId)
{
return false;
}
if (currentEncodingId > externalEncodingIdTarget)
{
return temporalLayerIdOfFrame < 1;
}
else if (currentEncodingId < externalEncodingIdTarget)
{
return true;
}
else
{
return temporalLayerIdOfFrame <= externalTemporalLayerIdTarget;
}
}
else
{
return false;
}
} | synchronized boolean acceptframe( @notnull vp8frame frame, int incomingindex, int externaltargetindex, long receivedms) { int externaltemporallayeridtarget = rtplayerdesc.gettidfromindex(externaltargetindex); int externalencodingidtarget = rtplayerdesc.geteidfromindex(externaltargetindex); if (externalencodingidtarget != internalencodingidtarget) { internalencodingidtarget = externalencodingidtarget; if (externalencodingidtarget > suspended_encoding_id) { needskeyframe = true; } } if (externalencodingidtarget < 0 || externaltemporallayeridtarget < 0) { currentencodingid = suspended_encoding_id; return false; } int temporallayeridofframe = frame.gettemporallayer(); if (temporallayeridofframe < 0) { temporallayeridofframe = 0; } int encodingid = rtplayerdesc.geteidfromindex(incomingindex); if (frame.iskeyframe()) { logger.debug(() -> "quality filter got keyframe for stream " + frame.getssrc()); return acceptkeyframe(encodingid, receivedms); } else if (currentencodingid > suspended_encoding_id) { if (isoutofswitchingphase(receivedms) && ispossibletoswitch(encodingid)) { needskeyframe = true; } if (encodingid != currentencodingid) { return false; } if (currentencodingid > externalencodingidtarget) { return temporallayeridofframe < 1; } else if (currentencodingid < externalencodingidtarget) { return true; } else { return temporallayeridofframe <= externaltemporallayeridtarget; } } else { return false; } } | ngi-nix/jitsi-videobridge | [
1,
0,
0,
0
] |
19,563 | public static List getCRLs(X509Extension cert) {
// What follows is a poor man's CRL extractor, for those lacking
// a BouncyCastle "bcprov.jar" in their classpath.
// It's a very basic state-machine: look for a standard URL scheme
// (such as http), and then start looking for a terminator. After
// running hexdump a few times on these things, it looks to me like
// the UTF-8 value "65533" seems to happen near where these things
// terminate. (Of course this stuff is ASN.1 and not UTF-8, but
// I happen to like some of the functions available to the String
// object). - [email protected], May 10th, 2006
byte[] bytes = cert.getExtensionValue(CRL_EXTENSION);
LinkedList httpCRLS = new LinkedList();
LinkedList ftpCRLS = new LinkedList();
LinkedList otherCRLS = new LinkedList();
if (bytes == null) {
// just return empty list
return httpCRLS;
} else {
String s;
try {
s = new String(bytes, "UTF-8");
}
catch (UnsupportedEncodingException uee) {
// We're screwed if this thing has more than one CRL, because
// the "indeOf( (char) 65533 )" below isn't going to work.
s = new String(bytes);
}
int pos = 0;
while (pos >= 0) {
int x = -1, y;
int[] indexes = new int[4];
indexes[0] = s.indexOf("http", pos);
indexes[1] = s.indexOf("ldap", pos);
indexes[2] = s.indexOf("file", pos);
indexes[3] = s.indexOf("ftp", pos);
Arrays.sort(indexes);
for (int i = 0; i < indexes.length; i++) {
if (indexes[i] >= 0) {
x = indexes[i];
break;
}
}
if (x >= 0) {
y = s.indexOf((char) 65533, x);
String crl = y > x ? s.substring(x, y - 1) : s.substring(x);
if (y > x && crl.endsWith("0")) {
crl = crl.substring(0, crl.length() - 1);
}
String crlTest = crl.trim().toLowerCase();
if (crlTest.startsWith("http")) {
httpCRLS.add(crl);
} else if (crlTest.startsWith("ftp")) {
ftpCRLS.add(crl);
} else {
otherCRLS.add(crl);
}
pos = y;
} else {
pos = -1;
}
}
}
httpCRLS.addAll(ftpCRLS);
httpCRLS.addAll(otherCRLS);
return httpCRLS;
} | public static List getCRLs(X509Extension cert) {
byte[] bytes = cert.getExtensionValue(CRL_EXTENSION);
LinkedList httpCRLS = new LinkedList();
LinkedList ftpCRLS = new LinkedList();
LinkedList otherCRLS = new LinkedList();
if (bytes == null) {
return httpCRLS;
} else {
String s;
try {
s = new String(bytes, "UTF-8");
}
catch (UnsupportedEncodingException uee) {
s = new String(bytes);
}
int pos = 0;
while (pos >= 0) {
int x = -1, y;
int[] indexes = new int[4];
indexes[0] = s.indexOf("http", pos);
indexes[1] = s.indexOf("ldap", pos);
indexes[2] = s.indexOf("file", pos);
indexes[3] = s.indexOf("ftp", pos);
Arrays.sort(indexes);
for (int i = 0; i < indexes.length; i++) {
if (indexes[i] >= 0) {
x = indexes[i];
break;
}
}
if (x >= 0) {
y = s.indexOf((char) 65533, x);
String crl = y > x ? s.substring(x, y - 1) : s.substring(x);
if (y > x && crl.endsWith("0")) {
crl = crl.substring(0, crl.length() - 1);
}
String crlTest = crl.trim().toLowerCase();
if (crlTest.startsWith("http")) {
httpCRLS.add(crl);
} else if (crlTest.startsWith("ftp")) {
ftpCRLS.add(crl);
} else {
otherCRLS.add(crl);
}
pos = y;
} else {
pos = -1;
}
}
}
httpCRLS.addAll(ftpCRLS);
httpCRLS.addAll(otherCRLS);
return httpCRLS;
} | public static list getcrls(x509extension cert) { byte[] bytes = cert.getextensionvalue(crl_extension); linkedlist httpcrls = new linkedlist(); linkedlist ftpcrls = new linkedlist(); linkedlist othercrls = new linkedlist(); if (bytes == null) { return httpcrls; } else { string s; try { s = new string(bytes, "utf-8"); } catch (unsupportedencodingexception uee) { s = new string(bytes); } int pos = 0; while (pos >= 0) { int x = -1, y; int[] indexes = new int[4]; indexes[0] = s.indexof("http", pos); indexes[1] = s.indexof("ldap", pos); indexes[2] = s.indexof("file", pos); indexes[3] = s.indexof("ftp", pos); arrays.sort(indexes); for (int i = 0; i < indexes.length; i++) { if (indexes[i] >= 0) { x = indexes[i]; break; } } if (x >= 0) { y = s.indexof((char) 65533, x); string crl = y > x ? s.substring(x, y - 1) : s.substring(x); if (y > x && crl.endswith("0")) { crl = crl.substring(0, crl.length() - 1); } string crltest = crl.trim().tolowercase(); if (crltest.startswith("http")) { httpcrls.add(crl); } else if (crltest.startswith("ftp")) { ftpcrls.add(crl); } else { othercrls.add(crl); } pos = y; } else { pos = -1; } } } httpcrls.addall(ftpcrls); httpcrls.addall(othercrls); return httpcrls; } | rcbj/apigee-jwt-aad-timeapi-proxy | [
0,
0,
1,
0
] |
11,379 | public static int XColorByName(String nm) {
for (int i=0; i<data.length; i++) {
Setting d = data[i];
// System.out.println("Looking for " + nm + "; trying " + d.name);
if (nm.equalsIgnoreCase(d.name)) {
return d.value;
}
}
// How sad... none of the 752 color names matched...
throw new IllegalArgumentException("Unknown color " + nm);
} | public static int XColorByName(String nm) {
for (int i=0; i<data.length; i++) {
Setting d = data[i];
if (nm.equalsIgnoreCase(d.name)) {
return d.value;
}
}
throw new IllegalArgumentException("Unknown color " + nm);
} | public static int xcolorbyname(string nm) { for (int i=0; i<data.length; i++) { setting d = data[i]; if (nm.equalsignorecase(d.name)) { return d.value; } } throw new illegalargumentexception("unknown color " + nm); } | oritelad/libro-de-actividades | [
0,
0,
0,
0
] |
11,380 | public static String XColorByValue(int val) {
for (int i=0; i<data.length; i++) {
Setting d = data[i];
// System.out.println("Looking for " + nm + "; trying " + d.name);
if (val == d.value) {
return d.name;
}
}
// How sad... none of the 752 color names matched...
return "XColor(" + val + ")";
} | public static String XColorByValue(int val) {
for (int i=0; i<data.length; i++) {
Setting d = data[i];
if (val == d.value) {
return d.name;
}
}
return "XColor(" + val + ")";
} | public static string xcolorbyvalue(int val) { for (int i=0; i<data.length; i++) { setting d = data[i]; if (val == d.value) { return d.name; } } return "xcolor(" + val + ")"; } | oritelad/libro-de-actividades | [
0,
0,
0,
0
] |
11,618 | private static final boolean isHalfUlp (final double x) {
// TODO: do we need to check for NaN and infinity?
return (0.0 != x) && (0L == Doubles.significand(x)); } | private static final boolean isHalfUlp (final double x) {
return (0.0 != x) && (0L == Doubles.significand(x)); } | private static final boolean ishalfulp (final double x) { return (0.0 != x) && (0l == doubles.significand(x)); } | palisades-lakes/xfp-jmh | [
1,
0,
0,
0
] |
11,619 | private static final double halfUlp (final double x) {
// TODO: do we need to check for NaN and infinity?
// TODO: compare to c++ implementation
// TODO: return zero when x is zero?
if (0.0 == x) { return 0.0; }
return 0.5 * Math.ulp(x); } | private static final double halfUlp (final double x) {
if (0.0 == x) { return 0.0; }
return 0.5 * Math.ulp(x); } | private static final double halfulp (final double x) { if (0.0 == x) { return 0.0; } return 0.5 * math.ulp(x); } | palisades-lakes/xfp-jmh | [
1,
0,
0,
0
] |
11,774 | @Override
protected void parseSynchronusly() throws IOException {
final Optional<RDFFormat> formatByMimeType = getContentType().flatMap(Rio::getParserFormatForMIMEType);
final String base = getBase().map(IRI::getIRIString).orElse(null);
final ParserConfig parserConfig = getParserConfig();
// TODO: Should we need to set anything?
final RDFLoader loader = new RDFLoader(parserConfig, rdf4jTermFactory.getValueFactory());
final RDFHandler rdfHandler = makeRDFHandler();
if (getSourceFile().isPresent()) {
// NOTE: While we could have used
// loader.load(sourcePath.toFile()
// if the path fs provider == FileSystems.getDefault(),
// that RDFLoader method does not use absolute path
// as the base URI, so to be consistent
// we'll always do it with our own input stream
//
// That means we may have to guess format by extensions:
final Optional<RDFFormat> formatByFilename = getSourceFile().map(Path::getFileName).map(Path::toString)
.flatMap(Rio::getParserFormatForFileName);
// TODO: for the excited.. what about the extension after following
// symlinks?
final RDFFormat format = formatByMimeType.orElse(formatByFilename.orElse(null));
try (InputStream in = Files.newInputStream(getSourceFile().get())) {
loader.load(in, base, format, rdfHandler);
}
} else if (getSourceIri().isPresent()) {
try {
// TODO: Handle international IRIs properly
// (Unicode support for for hostname, path and query)
final URL url = new URL(getSourceIri().get().getIRIString());
// TODO: This probably does not support https:// -> http://
// redirections
loader.load(url, base, formatByMimeType.orElse(null), makeRDFHandler());
} catch (final MalformedURLException ex) {
throw new IOException("Can't handle source URL: " + getSourceIri().get(), ex);
}
}
// must be getSourceInputStream then, this is guaranteed by
// super.checkSource();
loader.load(getSourceInputStream().get(), base, formatByMimeType.orElse(null), rdfHandler);
} | @Override
protected void parseSynchronusly() throws IOException {
final Optional<RDFFormat> formatByMimeType = getContentType().flatMap(Rio::getParserFormatForMIMEType);
final String base = getBase().map(IRI::getIRIString).orElse(null);
final ParserConfig parserConfig = getParserConfig();
final RDFLoader loader = new RDFLoader(parserConfig, rdf4jTermFactory.getValueFactory());
final RDFHandler rdfHandler = makeRDFHandler();
if (getSourceFile().isPresent()) {
final Optional<RDFFormat> formatByFilename = getSourceFile().map(Path::getFileName).map(Path::toString)
.flatMap(Rio::getParserFormatForFileName);
final RDFFormat format = formatByMimeType.orElse(formatByFilename.orElse(null));
try (InputStream in = Files.newInputStream(getSourceFile().get())) {
loader.load(in, base, format, rdfHandler);
}
} else if (getSourceIri().isPresent()) {
try {
final URL url = new URL(getSourceIri().get().getIRIString());
loader.load(url, base, formatByMimeType.orElse(null), makeRDFHandler());
} catch (final MalformedURLException ex) {
throw new IOException("Can't handle source URL: " + getSourceIri().get(), ex);
}
}
loader.load(getSourceInputStream().get(), base, formatByMimeType.orElse(null), rdfHandler);
} | @override protected void parsesynchronusly() throws ioexception { final optional<rdfformat> formatbymimetype = getcontenttype().flatmap(rio::getparserformatformimetype); final string base = getbase().map(iri::getiristring).orelse(null); final parserconfig parserconfig = getparserconfig(); final rdfloader loader = new rdfloader(parserconfig, rdf4jtermfactory.getvaluefactory()); final rdfhandler rdfhandler = makerdfhandler(); if (getsourcefile().ispresent()) { final optional<rdfformat> formatbyfilename = getsourcefile().map(path::getfilename).map(path::tostring) .flatmap(rio::getparserformatforfilename); final rdfformat format = formatbymimetype.orelse(formatbyfilename.orelse(null)); try (inputstream in = files.newinputstream(getsourcefile().get())) { loader.load(in, base, format, rdfhandler); } } else if (getsourceiri().ispresent()) { try { final url url = new url(getsourceiri().get().getiristring()); loader.load(url, base, formatbymimetype.orelse(null), makerdfhandler()); } catch (final malformedurlexception ex) { throw new ioexception("can't handle source url: " + getsourceiri().get(), ex); } } loader.load(getsourceinputstream().get(), base, formatbymimetype.orelse(null), rdfhandler); } | nikosnikolaidis/commons-rdf | [
1,
1,
0,
0
] |
20,076 | int denybble(byte[] data, int pos)
{
// The spec is wrong: it's LSB, then MSB
// int v = (data[pos] << 4) | data[pos+1];
int v = (data[pos + 1] << 4) | data[pos];
// Some of the dark star stuff is 8-bit, so we have to make sure we're positive
if (v < 0) v += 256;
return v;
} | int denybble(byte[] data, int pos)
{
int v = (data[pos + 1] << 4) | data[pos];
if (v < 0) v += 256;
return v;
} | int denybble(byte[] data, int pos) { int v = (data[pos + 1] << 4) | data[pos]; if (v < 0) v += 256; return v; } | oco27/edisyn | [
0,
0,
1,
0
] |
20,077 | void addData(byte[] data, int pos, int val)
{
// The spec is wrong: it's LSB, then MSB
data[pos] = (byte)((val >> 0) & 0x0F);
data[pos + 1] = (byte)((val >> 4) & 0x0F);
//data[pos + 1] = (byte)((val >> 0) & 0x0F);
//data[pos] = (byte)((val >> 4) & 0x0F);
} | void addData(byte[] data, int pos, int val)
{
data[pos] = (byte)((val >> 0) & 0x0F);
data[pos + 1] = (byte)((val >> 4) & 0x0F);
} | void adddata(byte[] data, int pos, int val) { data[pos] = (byte)((val >> 0) & 0x0f); data[pos + 1] = (byte)((val >> 4) & 0x0f); } | oco27/edisyn | [
0,
0,
1,
0
] |
11,963 | protected void paint(GraphicsContext gc) {
// Get a sorted copy of the series data. Once we have this we can release locks.
Rectangle2D rectViewport = viewport;
HashMap<Series, LinkedList<T>> data = new HashMap<>();
for(Series<T> series : this.series) {
LinkedList<T> seriesData = new LinkedList<>(series.getData());
seriesData.sort((o1, o2) -> fnXValue.apply(o1).compareTo(fnXValue.apply(o2)));
data.put(series, seriesData);
}
//Make sure we have data before continuing.
if(data.size() == 0) {
return;
}
if(data.values().stream().flatMap(LinkedList::stream).count() == 0) {
return;
}
// Calculate the range on each axis.
Range<TX> axisX;
Range<TY> axisY;
//TODO: Since we just sorted by X, we can optimize this a bit.
if(rangeX != null) {
axisX = new Range<>(
rangeX.min == null ? data.values().stream().flatMap(LinkedList::stream).map(fnXValue).min(TX::compareTo).get() : rangeX.min,
rangeX.max == null ? data.values().stream().flatMap(LinkedList::stream).map(fnXValue).max(TX::compareTo).get() : rangeX.max
);
} else {
axisX = new Range<>(
data.values().stream().flatMap(LinkedList::stream).map(fnXValue).min(TX::compareTo).get(),
data.values().stream().flatMap(LinkedList::stream).map(fnXValue).max(TX::compareTo).get()
);
}
if(rangeY != null) {
axisY = new Range<>(
rangeY.min == null ? data.values().stream().flatMap(LinkedList::stream).map(fnYValue).min(TY::compareTo).get() : rangeY.min,
rangeY.max == null ? data.values().stream().flatMap(LinkedList::stream).map(fnYValue).max(TY::compareTo).get() : rangeY.max
);
} else {
axisY = new Range<>(
data.values().stream().flatMap(LinkedList::stream).map(fnYValue).min(TY::compareTo).get(),
data.values().stream().flatMap(LinkedList::stream).map(fnYValue).max(TY::compareTo).get()
);
}
final List<TX> ticksX = fnTicksX.generateTicks(axisX, viewport.getMinX(), viewport.getMaxX());
//axisX = new Range<>(ticksX.get(0), ticksX.get(ticksX.size() - 1));
//rangeXRendered = axisX;
final List<TY> ticksY = fnTicksY.generateTicks(axisY, viewport.getMinY(), viewport.getMaxY());
//axisY = new Range<>(ticksY.get(0), ticksY.get(ticksY.size() - 1));
// Calculate the width of the widest Y-axis label and the height of the tallest X-axis label.
double maxWidth = 0.0;
double pxMinSpacingBetweenTicks = 0.0;
for(TY tickY : ticksY) {
textForMeasuring.setText(fnFormatY.apply(tickY));
maxWidth = Math.max(maxWidth, textForMeasuring.getLayoutBounds().getWidth());
pxMinSpacingBetweenTicks = Math.max(pxMinSpacingBetweenTicks, 2.0 * textForMeasuring.getLayoutBounds().getHeight());
}
double maxHeight = 0.0;
for(TX tickX : ticksX) {
//X-labels are displayed at a 30-degree incline.
//The approximate width of the rotated text is 0.87*{width}
//The distance from the top of the bounding to the origin from which text should be drawn is 0.5*{length} + 0.87*{height}
textForMeasuring.setText(fnFormatX.apply(tickX));
final Bounds boundsText = textForMeasuring.getLayoutBounds();
maxHeight = Math.max(maxHeight, 0.5 * boundsText.getWidth() + 0.87 * boundsText.getHeight());
//TODO: Also check maxWidth against the amount by which this would underflow the X=0 line
}
final Rectangle2D sizeAxisLabel = new Rectangle2D(0.0, 0.0, maxWidth, maxHeight);
if(getWidth() <= sizeAxisLabel.getWidth() || getHeight() <= sizeAxisLabel.getHeight()) {
return;
}
rectChart = new Rectangle2D(sizeAxisLabel.getWidth() + pxTickLength, 0.0, getWidth() - sizeAxisLabel.getWidth() - pxTickLength, getHeight() - sizeAxisLabel.getHeight() - pxTickLength);
// Render series data, build tooltip cache
renderedPoints.clear();
for(Map.Entry<Series, LinkedList<T>> entry : data.entrySet()) {
Point2D ptPrev = null;
gc.setStroke(entry.getKey().getColor());
//TODO: Make this customizable
gc.setLineWidth(2.0);
for(T value : entry.getValue()) {
TX x = fnXValue.apply(value);
TY y = fnYValue.apply(value);
// Add rectViewport.getMinY() instead of subtracting because we're mirroring the Y coordinate around the X-axis.
Point2D ptNew = new Point2D(
chartXFromDataX(axisX, x),
chartYFromDataY(axisY, y));
Point<T, TX, TY> pt = new Point<>(
value,
x,
y,
ptNew,
entry.getKey().getColor()
);
renderedPoints.add(pt);
if(ptPrev != null) {
gc.strokeLine(ptPrev.getX(), ptPrev.getY(), ptNew.getX(), ptNew.getY());
}
gc.strokeOval(ptNew.getX() - pointRadius, ptNew.getY() - pointRadius, pointRadius * 2, pointRadius * 2);
ptPrev = ptNew;
}
}
// Render axes (last so it overwrites any values near an axis)
//Clear the axis area
gc.clearRect(0.0, 0.0, sizeAxisLabel.getWidth() + pxTickLength, getHeight());
gc.clearRect(0.0, getHeight() - sizeAxisLabel.getHeight() - pxTickLength, getWidth(), sizeAxisLabel.getHeight());
//Draw the axes
gc.setStroke(Color.BLACK);
gc.setLineWidth(0.5);
gc.strokeLine(rectChart.getMinX(), 0.0, rectChart.getMinX(), rectChart.getMaxY());
gc.strokeLine(rectChart.getMinX(), rectChart.getMaxY(), rectChart.getMaxX(), rectChart.getMaxY());
Font font = Font.font("MONOSPACE", 12.0);
gc.setFont(font);
//ticksX and ticksY are lists of the corresponding values; they need to be handed of to the corresponding fnNormalize and then scaled for display.
double pxLast = -pxMinSpacingBetweenTicks;
for(TX tickX : ticksX) {
final double pxX = chartXFromDataX(axisX, tickX);
if(pxLast + pxMinSpacingBetweenTicks > pxX) {
continue;
}
pxLast = pxX;
gc.strokeLine(pxX, rectChart.getMaxY(), pxX, rectChart.getMaxY() + pxTickLength);
final String textLabel = fnFormatX.apply(tickX);
textForMeasuring.setText(textLabel);
final Bounds boundsText = textForMeasuring.getLayoutBounds();
double offsetY = 0.5 * boundsText.getWidth() + 0.87 * boundsText.getHeight();
double offsetX = -0.87 * boundsText.getWidth();
gc.save();
// Translate then rotate to rotate text around local origin rather than rotating around the canvas origin.
// Rotating and drawing at an offset results in a rotation around the origin.
gc.translate(pxX + offsetX, rectChart.getMaxY() + offsetY);
gc.rotate(-30.0);
gc.strokeText(textLabel, 0.0, 0.0);
gc.restore();
}
for(TY tickY : ticksY) {
final double pxY = chartYFromDataY(axisY, tickY);
gc.strokeLine(rectChart.getMinX() - pxTickLength, pxY, rectChart.getMinX(), pxY);
final String textLabel = fnFormatY.apply(tickY);
textForMeasuring.setText(textLabel);
gc.strokeText(fnFormatY.apply(tickY), 0.0, pxY + textForMeasuring.getLayoutBounds().getHeight());
}
} | protected void paint(GraphicsContext gc) {
Rectangle2D rectViewport = viewport;
HashMap<Series, LinkedList<T>> data = new HashMap<>();
for(Series<T> series : this.series) {
LinkedList<T> seriesData = new LinkedList<>(series.getData());
seriesData.sort((o1, o2) -> fnXValue.apply(o1).compareTo(fnXValue.apply(o2)));
data.put(series, seriesData);
}
if(data.size() == 0) {
return;
}
if(data.values().stream().flatMap(LinkedList::stream).count() == 0) {
return;
}
Range<TX> axisX;
Range<TY> axisY;
if(rangeX != null) {
axisX = new Range<>(
rangeX.min == null ? data.values().stream().flatMap(LinkedList::stream).map(fnXValue).min(TX::compareTo).get() : rangeX.min,
rangeX.max == null ? data.values().stream().flatMap(LinkedList::stream).map(fnXValue).max(TX::compareTo).get() : rangeX.max
);
} else {
axisX = new Range<>(
data.values().stream().flatMap(LinkedList::stream).map(fnXValue).min(TX::compareTo).get(),
data.values().stream().flatMap(LinkedList::stream).map(fnXValue).max(TX::compareTo).get()
);
}
if(rangeY != null) {
axisY = new Range<>(
rangeY.min == null ? data.values().stream().flatMap(LinkedList::stream).map(fnYValue).min(TY::compareTo).get() : rangeY.min,
rangeY.max == null ? data.values().stream().flatMap(LinkedList::stream).map(fnYValue).max(TY::compareTo).get() : rangeY.max
);
} else {
axisY = new Range<>(
data.values().stream().flatMap(LinkedList::stream).map(fnYValue).min(TY::compareTo).get(),
data.values().stream().flatMap(LinkedList::stream).map(fnYValue).max(TY::compareTo).get()
);
}
final List<TX> ticksX = fnTicksX.generateTicks(axisX, viewport.getMinX(), viewport.getMaxX());
final List<TY> ticksY = fnTicksY.generateTicks(axisY, viewport.getMinY(), viewport.getMaxY());
double maxWidth = 0.0;
double pxMinSpacingBetweenTicks = 0.0;
for(TY tickY : ticksY) {
textForMeasuring.setText(fnFormatY.apply(tickY));
maxWidth = Math.max(maxWidth, textForMeasuring.getLayoutBounds().getWidth());
pxMinSpacingBetweenTicks = Math.max(pxMinSpacingBetweenTicks, 2.0 * textForMeasuring.getLayoutBounds().getHeight());
}
double maxHeight = 0.0;
for(TX tickX : ticksX) {
textForMeasuring.setText(fnFormatX.apply(tickX));
final Bounds boundsText = textForMeasuring.getLayoutBounds();
maxHeight = Math.max(maxHeight, 0.5 * boundsText.getWidth() + 0.87 * boundsText.getHeight());
}
final Rectangle2D sizeAxisLabel = new Rectangle2D(0.0, 0.0, maxWidth, maxHeight);
if(getWidth() <= sizeAxisLabel.getWidth() || getHeight() <= sizeAxisLabel.getHeight()) {
return;
}
rectChart = new Rectangle2D(sizeAxisLabel.getWidth() + pxTickLength, 0.0, getWidth() - sizeAxisLabel.getWidth() - pxTickLength, getHeight() - sizeAxisLabel.getHeight() - pxTickLength);
renderedPoints.clear();
for(Map.Entry<Series, LinkedList<T>> entry : data.entrySet()) {
Point2D ptPrev = null;
gc.setStroke(entry.getKey().getColor());
gc.setLineWidth(2.0);
for(T value : entry.getValue()) {
TX x = fnXValue.apply(value);
TY y = fnYValue.apply(value);
Point2D ptNew = new Point2D(
chartXFromDataX(axisX, x),
chartYFromDataY(axisY, y));
Point<T, TX, TY> pt = new Point<>(
value,
x,
y,
ptNew,
entry.getKey().getColor()
);
renderedPoints.add(pt);
if(ptPrev != null) {
gc.strokeLine(ptPrev.getX(), ptPrev.getY(), ptNew.getX(), ptNew.getY());
}
gc.strokeOval(ptNew.getX() - pointRadius, ptNew.getY() - pointRadius, pointRadius * 2, pointRadius * 2);
ptPrev = ptNew;
}
}
gc.clearRect(0.0, 0.0, sizeAxisLabel.getWidth() + pxTickLength, getHeight());
gc.clearRect(0.0, getHeight() - sizeAxisLabel.getHeight() - pxTickLength, getWidth(), sizeAxisLabel.getHeight());
gc.setStroke(Color.BLACK);
gc.setLineWidth(0.5);
gc.strokeLine(rectChart.getMinX(), 0.0, rectChart.getMinX(), rectChart.getMaxY());
gc.strokeLine(rectChart.getMinX(), rectChart.getMaxY(), rectChart.getMaxX(), rectChart.getMaxY());
Font font = Font.font("MONOSPACE", 12.0);
gc.setFont(font);
double pxLast = -pxMinSpacingBetweenTicks;
for(TX tickX : ticksX) {
final double pxX = chartXFromDataX(axisX, tickX);
if(pxLast + pxMinSpacingBetweenTicks > pxX) {
continue;
}
pxLast = pxX;
gc.strokeLine(pxX, rectChart.getMaxY(), pxX, rectChart.getMaxY() + pxTickLength);
final String textLabel = fnFormatX.apply(tickX);
textForMeasuring.setText(textLabel);
final Bounds boundsText = textForMeasuring.getLayoutBounds();
double offsetY = 0.5 * boundsText.getWidth() + 0.87 * boundsText.getHeight();
double offsetX = -0.87 * boundsText.getWidth();
gc.save();
gc.translate(pxX + offsetX, rectChart.getMaxY() + offsetY);
gc.rotate(-30.0);
gc.strokeText(textLabel, 0.0, 0.0);
gc.restore();
}
for(TY tickY : ticksY) {
final double pxY = chartYFromDataY(axisY, tickY);
gc.strokeLine(rectChart.getMinX() - pxTickLength, pxY, rectChart.getMinX(), pxY);
final String textLabel = fnFormatY.apply(tickY);
textForMeasuring.setText(textLabel);
gc.strokeText(fnFormatY.apply(tickY), 0.0, pxY + textForMeasuring.getLayoutBounds().getHeight());
}
} | protected void paint(graphicscontext gc) { rectangle2d rectviewport = viewport; hashmap<series, linkedlist<t>> data = new hashmap<>(); for(series<t> series : this.series) { linkedlist<t> seriesdata = new linkedlist<>(series.getdata()); seriesdata.sort((o1, o2) -> fnxvalue.apply(o1).compareto(fnxvalue.apply(o2))); data.put(series, seriesdata); } if(data.size() == 0) { return; } if(data.values().stream().flatmap(linkedlist::stream).count() == 0) { return; } range<tx> axisx; range<ty> axisy; if(rangex != null) { axisx = new range<>( rangex.min == null ? data.values().stream().flatmap(linkedlist::stream).map(fnxvalue).min(tx::compareto).get() : rangex.min, rangex.max == null ? data.values().stream().flatmap(linkedlist::stream).map(fnxvalue).max(tx::compareto).get() : rangex.max ); } else { axisx = new range<>( data.values().stream().flatmap(linkedlist::stream).map(fnxvalue).min(tx::compareto).get(), data.values().stream().flatmap(linkedlist::stream).map(fnxvalue).max(tx::compareto).get() ); } if(rangey != null) { axisy = new range<>( rangey.min == null ? data.values().stream().flatmap(linkedlist::stream).map(fnyvalue).min(ty::compareto).get() : rangey.min, rangey.max == null ? data.values().stream().flatmap(linkedlist::stream).map(fnyvalue).max(ty::compareto).get() : rangey.max ); } else { axisy = new range<>( data.values().stream().flatmap(linkedlist::stream).map(fnyvalue).min(ty::compareto).get(), data.values().stream().flatmap(linkedlist::stream).map(fnyvalue).max(ty::compareto).get() ); } final list<tx> ticksx = fnticksx.generateticks(axisx, viewport.getminx(), viewport.getmaxx()); final list<ty> ticksy = fnticksy.generateticks(axisy, viewport.getminy(), viewport.getmaxy()); double maxwidth = 0.0; double pxminspacingbetweenticks = 0.0; for(ty ticky : ticksy) { textformeasuring.settext(fnformaty.apply(ticky)); maxwidth = math.max(maxwidth, textformeasuring.getlayoutbounds().getwidth()); pxminspacingbetweenticks = math.max(pxminspacingbetweenticks, 2.0 * textformeasuring.getlayoutbounds().getheight()); } double maxheight = 0.0; for(tx tickx : ticksx) { textformeasuring.settext(fnformatx.apply(tickx)); final bounds boundstext = textformeasuring.getlayoutbounds(); maxheight = math.max(maxheight, 0.5 * boundstext.getwidth() + 0.87 * boundstext.getheight()); } final rectangle2d sizeaxislabel = new rectangle2d(0.0, 0.0, maxwidth, maxheight); if(getwidth() <= sizeaxislabel.getwidth() || getheight() <= sizeaxislabel.getheight()) { return; } rectchart = new rectangle2d(sizeaxislabel.getwidth() + pxticklength, 0.0, getwidth() - sizeaxislabel.getwidth() - pxticklength, getheight() - sizeaxislabel.getheight() - pxticklength); renderedpoints.clear(); for(map.entry<series, linkedlist<t>> entry : data.entryset()) { point2d ptprev = null; gc.setstroke(entry.getkey().getcolor()); gc.setlinewidth(2.0); for(t value : entry.getvalue()) { tx x = fnxvalue.apply(value); ty y = fnyvalue.apply(value); point2d ptnew = new point2d( chartxfromdatax(axisx, x), chartyfromdatay(axisy, y)); point<t, tx, ty> pt = new point<>( value, x, y, ptnew, entry.getkey().getcolor() ); renderedpoints.add(pt); if(ptprev != null) { gc.strokeline(ptprev.getx(), ptprev.gety(), ptnew.getx(), ptnew.gety()); } gc.strokeoval(ptnew.getx() - pointradius, ptnew.gety() - pointradius, pointradius * 2, pointradius * 2); ptprev = ptnew; } } gc.clearrect(0.0, 0.0, sizeaxislabel.getwidth() + pxticklength, getheight()); gc.clearrect(0.0, getheight() - sizeaxislabel.getheight() - pxticklength, getwidth(), sizeaxislabel.getheight()); gc.setstroke(color.black); gc.setlinewidth(0.5); gc.strokeline(rectchart.getminx(), 0.0, rectchart.getminx(), rectchart.getmaxy()); gc.strokeline(rectchart.getminx(), rectchart.getmaxy(), rectchart.getmaxx(), rectchart.getmaxy()); font font = font.font("monospace", 12.0); gc.setfont(font); double pxlast = -pxminspacingbetweenticks; for(tx tickx : ticksx) { final double pxx = chartxfromdatax(axisx, tickx); if(pxlast + pxminspacingbetweenticks > pxx) { continue; } pxlast = pxx; gc.strokeline(pxx, rectchart.getmaxy(), pxx, rectchart.getmaxy() + pxticklength); final string textlabel = fnformatx.apply(tickx); textformeasuring.settext(textlabel); final bounds boundstext = textformeasuring.getlayoutbounds(); double offsety = 0.5 * boundstext.getwidth() + 0.87 * boundstext.getheight(); double offsetx = -0.87 * boundstext.getwidth(); gc.save(); gc.translate(pxx + offsetx, rectchart.getmaxy() + offsety); gc.rotate(-30.0); gc.stroketext(textlabel, 0.0, 0.0); gc.restore(); } for(ty ticky : ticksy) { final double pxy = chartyfromdatay(axisy, ticky); gc.strokeline(rectchart.getminx() - pxticklength, pxy, rectchart.getminx(), pxy); final string textlabel = fnformaty.apply(ticky); textformeasuring.settext(textlabel); gc.stroketext(fnformaty.apply(ticky), 0.0, pxy + textformeasuring.getlayoutbounds().getheight()); } } | nathanawmk/ICS-Security-Tools | [
1,
1,
0,
0
] |
12,044 | private void createView() {
FontIcon closeBtn = Controls.fontIcon(Material2AL.CLOSE, "close-icon");
closeBtn.setCursor(Cursor.HAND);
closeBtn.setOnMouseClicked(e -> close());
GridPane.setValignment(closeBtn, VPos.TOP);
GridPane.setHalignment(closeBtn, HPos.RIGHT);
TextFlow textFlow = Containers.create(TextFlow::new, "text");
// sadly setMaxHeight() isn't working with TextFlow
// so, we have to clip notification text manually
Text text = new Text(notification.getClippedText(Toast.MAX_MESSAGE_LEN));
textFlow.getChildren().addAll(text);
Hyperlink expandBtn = new Hyperlink(t(ACTION_MORE));
expandBtn.setOnAction(e -> expand());
if (expandHandler == null) { expandBtn.setManaged(false); }
add(textFlow, 0, 0);
add(closeBtn, 1, 0);
add(expandBtn, 0, 2, REMAINING, 1);
getColumnConstraints().addAll(HGROW_ALWAYS, HGROW_NEVER);
getRowConstraints().addAll(VGROW_ALWAYS, VGROW_NEVER);
VBox.setVgrow(this, Priority.NEVER);
getStyleClass().addAll("toast", notification.getType().name().toLowerCase());
} | private void createView() {
FontIcon closeBtn = Controls.fontIcon(Material2AL.CLOSE, "close-icon");
closeBtn.setCursor(Cursor.HAND);
closeBtn.setOnMouseClicked(e -> close());
GridPane.setValignment(closeBtn, VPos.TOP);
GridPane.setHalignment(closeBtn, HPos.RIGHT);
TextFlow textFlow = Containers.create(TextFlow::new, "text");
Text text = new Text(notification.getClippedText(Toast.MAX_MESSAGE_LEN));
textFlow.getChildren().addAll(text);
Hyperlink expandBtn = new Hyperlink(t(ACTION_MORE));
expandBtn.setOnAction(e -> expand());
if (expandHandler == null) { expandBtn.setManaged(false); }
add(textFlow, 0, 0);
add(closeBtn, 1, 0);
add(expandBtn, 0, 2, REMAINING, 1);
getColumnConstraints().addAll(HGROW_ALWAYS, HGROW_NEVER);
getRowConstraints().addAll(VGROW_ALWAYS, VGROW_NEVER);
VBox.setVgrow(this, Priority.NEVER);
getStyleClass().addAll("toast", notification.getType().name().toLowerCase());
} | private void createview() { fonticon closebtn = controls.fonticon(material2al.close, "close-icon"); closebtn.setcursor(cursor.hand); closebtn.setonmouseclicked(e -> close()); gridpane.setvalignment(closebtn, vpos.top); gridpane.sethalignment(closebtn, hpos.right); textflow textflow = containers.create(textflow::new, "text"); text text = new text(notification.getclippedtext(toast.max_message_len)); textflow.getchildren().addall(text); hyperlink expandbtn = new hyperlink(t(action_more)); expandbtn.setonaction(e -> expand()); if (expandhandler == null) { expandbtn.setmanaged(false); } add(textflow, 0, 0); add(closebtn, 1, 0); add(expandbtn, 0, 2, remaining, 1); getcolumnconstraints().addall(hgrow_always, hgrow_never); getrowconstraints().addall(vgrow_always, vgrow_never); vbox.setvgrow(this, priority.never); getstyleclass().addall("toast", notification.gettype().name().tolowercase()); } | mkpaz/telek | [
0,
0,
0,
0
] |
12,086 | public static void main(String[] args) {
// TODO code application logic here
// cria o objeto Scanner
Scanner input = new Scanner(System.in);
// entrada de dados
System.out.print("Digite três números de ponto-flutuante: ");
double numero1 = input.nextDouble(); // primeiro número
double numero2 = input.nextDouble(); // segundo número
double numero3 = input.nextDouble(); // terceiro número
// detremina o maior valor
double resultado = maiorValor( numero1, numero2, numero3 );
// imprime resultado
System.out.printf("O maior valor é %.2f%n", resultado );
} | public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Digite três números de ponto-flutuante: ");
double numero1 = input.nextDouble(); double numero2 = input.nextDouble(); double numero3 = input.nextDouble();
double resultado = maiorValor( numero1, numero2, numero3 );
System.out.printf("O maior valor é %.2f%n", resultado );
} | public static void main(string[] args) { scanner input = new scanner(system.in); system.out.print("digite três números de ponto-flutuante: "); double numero1 = input.nextdouble(); double numero2 = input.nextdouble(); double numero3 = input.nextdouble(); double resultado = maiorvalor( numero1, numero2, numero3 ); system.out.printf("o maior valor é %.2f%n", resultado ); } | pedro-filho-81/LivrosDeJava | [
0,
1,
0,
0
] |
20,687 | @Test
public void fetchExistingAggregateById_populatesValues()
{
try(PhotonTransaction transaction = photon.beginTransaction())
{
PhotonTestTable photonTestTable = transaction.query(PhotonTestTable.class).fetchById(1);
// The database does not store a time zone, so we assume the date is in the system's time zone. But to make these tests
// compare epoch times but still work with any system time zone, we have to offset the epoch to the system's time zone.
int currentUtcOffset = TimeZone.getDefault().getOffset(new Date().getTime());
assertNotNull(photonTestTable);
assertEquals(1, photonTestTable.getId());
assertEquals(UUID.fromString("8ED1E1BD-253E-4469-B4CB-71E1217825B7"), photonTestTable.getUuidColumn());
assertEquals(ZonedDateTime.ofInstant(Instant.ofEpochMilli(1489915698000L - currentUtcOffset), ZoneId.systemDefault()), photonTestTable.getDateColumn());
assertEquals("Test String", photonTestTable.getVarcharColumn());
}
} | @Test
public void fetchExistingAggregateById_populatesValues()
{
try(PhotonTransaction transaction = photon.beginTransaction())
{
PhotonTestTable photonTestTable = transaction.query(PhotonTestTable.class).fetchById(1);
int currentUtcOffset = TimeZone.getDefault().getOffset(new Date().getTime());
assertNotNull(photonTestTable);
assertEquals(1, photonTestTable.getId());
assertEquals(UUID.fromString("8ED1E1BD-253E-4469-B4CB-71E1217825B7"), photonTestTable.getUuidColumn());
assertEquals(ZonedDateTime.ofInstant(Instant.ofEpochMilli(1489915698000L - currentUtcOffset), ZoneId.systemDefault()), photonTestTable.getDateColumn());
assertEquals("Test String", photonTestTable.getVarcharColumn());
}
} | @test public void fetchexistingaggregatebyid_populatesvalues() { try(photontransaction transaction = photon.begintransaction()) { photontesttable photontesttable = transaction.query(photontesttable.class).fetchbyid(1); int currentutcoffset = timezone.getdefault().getoffset(new date().gettime()); assertnotnull(photontesttable); assertequals(1, photontesttable.getid()); assertequals(uuid.fromstring("8ed1e1bd-253e-4469-b4cb-71e1217825b7"), photontesttable.getuuidcolumn()); assertequals(zoneddatetime.ofinstant(instant.ofepochmilli(1489915698000l - currentutcoffset), zoneid.systemdefault()), photontesttable.getdatecolumn()); assertequals("test string", photontesttable.getvarcharcolumn()); } } | molcikas/photon | [
0,
0,
1,
0
] |
12,596 | public static BufferedImage boofCVBinarization(BufferedImage image, int threshold) {
GrayU8 input8 = ConvertBufferedImage.convertFrom(image,(GrayU8)null);
GrayF32 input32 = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class);
GrayU8 binary8 = new GrayU8(input8.getWidth(), input8.getHeight());
// GrayF32 binary32 = new GrayF32(input8.getWidth(), input8.getHeight());
boolean down = false;
ThresholdImageOps.threshold(input8, binary8, threshold, down);
// GThresholdImageOps.threshold(input32, binary8, threshold, down);
BufferedImage outputImage = null;
// outputImage = ConvertBufferedImage.convertTo(binary8, null);
boolean invert = false;
outputImage = VisualizeBinaryData.renderBinary(binary8, invert, null);
//
// outputImage = ImageUtil.removeAlpha(outputImage);
// outputImage = ImageUtil.convertRGB(outputImage,
// new int[] {0x0d0d0d}, new int[] {0xffffff});
return outputImage;
} | public static BufferedImage boofCVBinarization(BufferedImage image, int threshold) {
GrayU8 input8 = ConvertBufferedImage.convertFrom(image,(GrayU8)null);
GrayF32 input32 = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class);
GrayU8 binary8 = new GrayU8(input8.getWidth(), input8.getHeight());
boolean down = false;
ThresholdImageOps.threshold(input8, binary8, threshold, down);
BufferedImage outputImage = null;
boolean invert = false;
outputImage = VisualizeBinaryData.renderBinary(binary8, invert, null);
return outputImage;
} | public static bufferedimage boofcvbinarization(bufferedimage image, int threshold) { grayu8 input8 = convertbufferedimage.convertfrom(image,(grayu8)null); grayf32 input32 = convertbufferedimage.convertfromsingle(image, null, grayf32.class); grayu8 binary8 = new grayu8(input8.getwidth(), input8.getheight()); boolean down = false; thresholdimageops.threshold(input8, binary8, threshold, down); bufferedimage outputimage = null; boolean invert = false; outputimage = visualizebinarydata.renderbinary(binary8, invert, null); return outputimage; } | nuest/ami3 | [
0,
0,
1,
0
] |
20,812 | static int examineVar(final int var, final ArrayList<Read> list, final IntList collection, final int[] rvector, LinkedHashMap<Integer, ArrayList<Read>> map){
collection.clear();
for(Read r : list){
final ReadKey rk=(ReadKey) r.obj;
final IntList vars=rk.vars;
for(int i=0; i<vars.size; i++){
final int v2=vars.get(i);
if(v2!=var){
collection.add(v2);
}
}
}
collection.sort();
final int varCount=list.size();
int lastVar2=-1, bestVar2=-1;
int sharedCount=0, bestSharedCount=0, bestDifferent=999;
for(int i=0; i<collection.size; i++){//TODO: Note that not all reads actually cover a given var
int currentVar2=collection.get(i);
if(currentVar2==lastVar2){sharedCount++;}
else{
if(sharedCount>bestSharedCount){
final int different1=(sharedCount==varCount ? 0 : countDifferentAlleles(lastVar2, list));
if(different1*8<varCount){
ArrayList<Read> list2=map.get(lastVar2);
final int varCount2=(list2==null ? 0 : list2.size());
final int different2=(sharedCount==varCount2 ? 0 : countDifferentAlleles(var, list2));
if(different2*8<varCount2){
bestVar2=lastVar2;
bestSharedCount=sharedCount;
bestDifferent=Tools.max(different1, different2);
}
}
}
sharedCount=1;
}
lastVar2=currentVar2;
}
if(sharedCount>bestSharedCount){
final int different1=(sharedCount==varCount ? 0 : countDifferentAlleles(lastVar2, list));
if(different1*8<varCount){
ArrayList<Read> list2=map.get(lastVar2);
final int varCount2=(list2==null ? 0 : list2.size());
final int different2=(sharedCount==varCount2 ? 0 : countDifferentAlleles(var, list2));
if(different2*8<varCount2){
bestVar2=lastVar2;
bestSharedCount=sharedCount;
bestDifferent=Tools.max(different1, different2);
}
}
}
rvector[0]=var;
rvector[1]=list.size();
rvector[2]=bestVar2;
rvector[3]=sharedCount;
rvector[4]=bestDifferent;
return bestVar2;
} | static int examineVar(final int var, final ArrayList<Read> list, final IntList collection, final int[] rvector, LinkedHashMap<Integer, ArrayList<Read>> map){
collection.clear();
for(Read r : list){
final ReadKey rk=(ReadKey) r.obj;
final IntList vars=rk.vars;
for(int i=0; i<vars.size; i++){
final int v2=vars.get(i);
if(v2!=var){
collection.add(v2);
}
}
}
collection.sort();
final int varCount=list.size();
int lastVar2=-1, bestVar2=-1;
int sharedCount=0, bestSharedCount=0, bestDifferent=999;
for(int i=0; i<collection.size; i++)
int currentVar2=collection.get(i);
if(currentVar2==lastVar2){sharedCount++;}
else{
if(sharedCount>bestSharedCount){
final int different1=(sharedCount==varCount ? 0 : countDifferentAlleles(lastVar2, list));
if(different1*8<varCount){
ArrayList<Read> list2=map.get(lastVar2);
final int varCount2=(list2==null ? 0 : list2.size());
final int different2=(sharedCount==varCount2 ? 0 : countDifferentAlleles(var, list2));
if(different2*8<varCount2){
bestVar2=lastVar2;
bestSharedCount=sharedCount;
bestDifferent=Tools.max(different1, different2);
}
}
}
sharedCount=1;
}
lastVar2=currentVar2;
}
if(sharedCount>bestSharedCount){
final int different1=(sharedCount==varCount ? 0 : countDifferentAlleles(lastVar2, list));
if(different1*8<varCount){
ArrayList<Read> list2=map.get(lastVar2);
final int varCount2=(list2==null ? 0 : list2.size());
final int different2=(sharedCount==varCount2 ? 0 : countDifferentAlleles(var, list2));
if(different2*8<varCount2){
bestVar2=lastVar2;
bestSharedCount=sharedCount;
bestDifferent=Tools.max(different1, different2);
}
}
}
rvector[0]=var;
rvector[1]=list.size();
rvector[2]=bestVar2;
rvector[3]=sharedCount;
rvector[4]=bestDifferent;
return bestVar2;
} | static int examinevar(final int var, final arraylist<read> list, final intlist collection, final int[] rvector, linkedhashmap<integer, arraylist<read>> map){ collection.clear(); for(read r : list){ final readkey rk=(readkey) r.obj; final intlist vars=rk.vars; for(int i=0; i<vars.size; i++){ final int v2=vars.get(i); if(v2!=var){ collection.add(v2); } } } collection.sort(); final int varcount=list.size(); int lastvar2=-1, bestvar2=-1; int sharedcount=0, bestsharedcount=0, bestdifferent=999; for(int i=0; i<collection.size; i++) int currentvar2=collection.get(i); if(currentvar2==lastvar2){sharedcount++;} else{ if(sharedcount>bestsharedcount){ final int different1=(sharedcount==varcount ? 0 : countdifferentalleles(lastvar2, list)); if(different1*8<varcount){ arraylist<read> list2=map.get(lastvar2); final int varcount2=(list2==null ? 0 : list2.size()); final int different2=(sharedcount==varcount2 ? 0 : countdifferentalleles(var, list2)); if(different2*8<varcount2){ bestvar2=lastvar2; bestsharedcount=sharedcount; bestdifferent=tools.max(different1, different2); } } } sharedcount=1; } lastvar2=currentvar2; } if(sharedcount>bestsharedcount){ final int different1=(sharedcount==varcount ? 0 : countdifferentalleles(lastvar2, list)); if(different1*8<varcount){ arraylist<read> list2=map.get(lastvar2); final int varcount2=(list2==null ? 0 : list2.size()); final int different2=(sharedcount==varcount2 ? 0 : countdifferentalleles(var, list2)); if(different2*8<varcount2){ bestvar2=lastvar2; bestsharedcount=sharedcount; bestdifferent=tools.max(different1, different2); } } } rvector[0]=var; rvector[1]=list.size(); rvector[2]=bestvar2; rvector[3]=sharedcount; rvector[4]=bestdifferent; return bestvar2; } | nedru004/LibraryAnalysis2 | [
1,
0,
0,
0
] |
12,626 | @RequestMapping(path = "/0/admin/applications/{appId}/block", method = RequestMethod.POST)
public ResponseEntity<Boolean> updateDeveloperApplication(
@PathVariable String appId
) {
// Let's get the user from principal and validate the userId against it.
User user = userService.findAuthenticatedUser();
if (user == null)
return new ResponseEntity<>(HttpStatus.FORBIDDEN);
// TODO check and ensure user is Moderator role
Application application = applicationService.findApplication(appId);
if (application == null){
logger.debug("Application not found pre condition failed");
return new ResponseEntity<>(HttpStatus.PRECONDITION_FAILED);
}
// TODO what if Application was already blocked?
Boolean result = applicationService.blockApplication(application);
return new ResponseEntity<>(result, HttpStatus.OK);
} | @RequestMapping(path = "/0/admin/applications/{appId}/block", method = RequestMethod.POST)
public ResponseEntity<Boolean> updateDeveloperApplication(
@PathVariable String appId
) {
User user = userService.findAuthenticatedUser();
if (user == null)
return new ResponseEntity<>(HttpStatus.FORBIDDEN);
Application application = applicationService.findApplication(appId);
if (application == null){
logger.debug("Application not found pre condition failed");
return new ResponseEntity<>(HttpStatus.PRECONDITION_FAILED);
}
Boolean result = applicationService.blockApplication(application);
return new ResponseEntity<>(result, HttpStatus.OK);
} | @requestmapping(path = "/0/admin/applications/{appid}/block", method = requestmethod.post) public responseentity<boolean> updatedeveloperapplication( @pathvariable string appid ) { user user = userservice.findauthenticateduser(); if (user == null) return new responseentity<>(httpstatus.forbidden); application application = applicationservice.findapplication(appid); if (application == null){ logger.debug("application not found pre condition failed"); return new responseentity<>(httpstatus.precondition_failed); } boolean result = applicationservice.blockapplication(application); return new responseentity<>(result, httpstatus.ok); } | niuyiming/alpha-umi | [
1,
1,
0,
0
] |
13,141 | private boolean useSwitchingProvider() {
if (!isFastInit) {
return false;
}
switch (binding.kind()) {
case BOUND_INSTANCE:
case COMPONENT:
case COMPONENT_DEPENDENCY:
case DELEGATE:
case MEMBERS_INJECTOR: // TODO(b/199889259): Consider optimizing this for fastInit mode.
// These binding kinds avoid SwitchingProvider when the backing instance already exists,
// e.g. a component provider can use FactoryInstance.create(this).
return false;
case MULTIBOUND_SET:
case MULTIBOUND_MAP:
case OPTIONAL:
// These binding kinds avoid SwitchingProvider when their are no dependencies,
// e.g. a multibound set with no dependency can use a singleton, SetFactory.empty().
return !binding.dependencies().isEmpty();
case INJECTION:
case PROVISION:
case ASSISTED_INJECTION:
case ASSISTED_FACTORY:
case COMPONENT_PROVISION:
case SUBCOMPONENT_CREATOR:
case PRODUCTION:
case COMPONENT_PRODUCTION:
case MEMBERS_INJECTION:
return true;
}
throw new AssertionError(String.format("No such binding kind: %s", binding.kind()));
} | private boolean useSwitchingProvider() {
if (!isFastInit) {
return false;
}
switch (binding.kind()) {
case BOUND_INSTANCE:
case COMPONENT:
case COMPONENT_DEPENDENCY:
case DELEGATE:
case MEMBERS_INJECTOR:
return false;
case MULTIBOUND_SET:
case MULTIBOUND_MAP:
case OPTIONAL:
return !binding.dependencies().isEmpty();
case INJECTION:
case PROVISION:
case ASSISTED_INJECTION:
case ASSISTED_FACTORY:
case COMPONENT_PROVISION:
case SUBCOMPONENT_CREATOR:
case PRODUCTION:
case COMPONENT_PRODUCTION:
case MEMBERS_INJECTION:
return true;
}
throw new AssertionError(String.format("No such binding kind: %s", binding.kind()));
} | private boolean useswitchingprovider() { if (!isfastinit) { return false; } switch (binding.kind()) { case bound_instance: case component: case component_dependency: case delegate: case members_injector: return false; case multibound_set: case multibound_map: case optional: return !binding.dependencies().isempty(); case injection: case provision: case assisted_injection: case assisted_factory: case component_provision: case subcomponent_creator: case production: case component_production: case members_injection: return true; } throw new assertionerror(string.format("no such binding kind: %s", binding.kind())); } | priamm/dagger | [
1,
0,
0,
0
] |
13,305 | @Test
public void openPrivateRepository() {
//TODO : need to implement. first need to figure out a way to hide the credentials dialog.
} | @Test
public void openPrivateRepository() {
} | @test public void openprivaterepository() { } | mvm-sap/ADT_Frontend | [
0,
1,
0,
0
] |
13,339 | @Override
public void validateDataOnEntry()
throws DataModelException {
// TODO auto-generated method stub, to be implemented by parser
} | @Override
public void validateDataOnEntry()
throws DataModelException {
} | @override public void validatedataonentry() throws datamodelexception { } | onekeynet/OConf | [
0,
1,
0,
0
] |
13,382 | private boolean writeResponseBodyToDisk(ResponseBody body) {
try {
// todo change the file location/name according to your needs
File futureStudioIconFile = new File(mOtaSaveFilePath);
InputStream inputStream = null;
OutputStream outputStream = null;
try {
byte[] fileReader = new byte[1024];
long fileSize = body.contentLength();
long fileSizeDownloaded = 0;
inputStream = body.byteStream();
outputStream = new FileOutputStream(futureStudioIconFile);
while (true) {
int read = inputStream.read(fileReader);
if (read == -1) {
break;
}
outputStream.write(fileReader, 0, read);
fileSizeDownloaded += read;
}
outputStream.flush();
return true;
} catch (IOException e) {
return false;
} finally {
if (inputStream != null) {
inputStream.close();
}
if (outputStream != null) {
outputStream.close();
}
}
} catch (IOException e) {
return false;
}
} | private boolean writeResponseBodyToDisk(ResponseBody body) {
try {
File futureStudioIconFile = new File(mOtaSaveFilePath);
InputStream inputStream = null;
OutputStream outputStream = null;
try {
byte[] fileReader = new byte[1024];
long fileSize = body.contentLength();
long fileSizeDownloaded = 0;
inputStream = body.byteStream();
outputStream = new FileOutputStream(futureStudioIconFile);
while (true) {
int read = inputStream.read(fileReader);
if (read == -1) {
break;
}
outputStream.write(fileReader, 0, read);
fileSizeDownloaded += read;
}
outputStream.flush();
return true;
} catch (IOException e) {
return false;
} finally {
if (inputStream != null) {
inputStream.close();
}
if (outputStream != null) {
outputStream.close();
}
}
} catch (IOException e) {
return false;
}
} | private boolean writeresponsebodytodisk(responsebody body) { try { file futurestudioiconfile = new file(motasavefilepath); inputstream inputstream = null; outputstream outputstream = null; try { byte[] filereader = new byte[1024]; long filesize = body.contentlength(); long filesizedownloaded = 0; inputstream = body.bytestream(); outputstream = new fileoutputstream(futurestudioiconfile); while (true) { int read = inputstream.read(filereader); if (read == -1) { break; } outputstream.write(filereader, 0, read); filesizedownloaded += read; } outputstream.flush(); return true; } catch (ioexception e) { return false; } finally { if (inputstream != null) { inputstream.close(); } if (outputstream != null) { outputstream.close(); } } } catch (ioexception e) { return false; } } | qiaolw/AndroidTest | [
1,
0,
0,
0
] |
21,838 | @Override
public void operate(OpContext context) throws ProcessingException {
final TableDataSource table = (TableDataSource)context.get(tableInput);
int ipaTidx = super.getColumnIndex(table, SystemTierType.IPATarget.getName());
if(ipaTidx < 0) {
throw new ProcessingException(null, "Table has no " + SystemTierType.IPATarget.getName() + " column.");
}
int ipaAidx = super.getColumnIndex(table, SystemTierType.IPAActual.getName());
if(ipaAidx < 0) {
throw new ProcessingException(null, "Table has no " + SystemTierType.IPAActual.getName() + " column.");
}
// group by session if info is available
// TODO make this an option
int sessionIdx = super.getColumnIndex(table, "Session");
boolean ignoreDiacritics = isIgnoreDiacritics();
if(context.containsKey("ignoreDiacritics")) {
ignoreDiacritics = (boolean)context.get("ignoreDiacritics");
}
final Map<GroupKey, IpaTernaryTree<List<IPATranscript>>> tokenCounts =
new LinkedHashMap<>();
for(int row = 0; row < table.getRowCount(); row++) {
checkCanceled();
final Object groupVal =
(sessionIdx >= 0 ? table.getValueAt(row, sessionIdx) : "*");
final GroupKey groupKey = new GroupKey(groupVal, ignoreDiacritics);
IpaTernaryTree<List<IPATranscript>> tokenCount =
tokenCounts.get(groupKey);
if(tokenCount == null) {
tokenCount = new IpaTernaryTree<>();
tokenCounts.put(groupKey, tokenCount);
}
IPATranscript ipaT =
IPATranscript.class.cast(table.getValueAt(row, ipaTidx));
IPATranscript ipaA =
IPATranscript.class.cast(table.getValueAt(row, ipaAidx));
if(ignoreDiacritics) {
ipaT = ipaT.removePunctuation().stripDiacritics();
ipaA = ipaA.removePunctuation().stripDiacritics();
}
List<IPATranscript> productions = tokenCount.get(ipaT);
if(productions == null) {
productions = new ArrayList<>();
tokenCount.put(ipaT, productions);
}
productions.add(ipaA);
}
final DefaultTableDataSource outputTable = new DefaultTableDataSource();
for(GroupKey groupKey:tokenCounts.keySet()) {
final Object[] rowData = new Object[7];
// produce table
int numRepatedWords = 0;
int numAllCorrect = 0;
int numOneOrMoreCorrect = 0;
int numSameError = 0;
int numDifferentErrors = 0;
float sumOfAvgDistances = 0;
final IpaTernaryTree<List<IPATranscript>> tokenCount = tokenCounts.get(groupKey);
final List<IPATranscript> repeatedTokens =
tokenCount.keySet().stream()
.filter( (ipa) -> tokenCount.get(ipa).size() > 1 )
.collect(Collectors.toList());
numRepatedWords = repeatedTokens.size();
for(IPATranscript ipa:repeatedTokens) {
checkCanceled();
int numCorrect = 0;
final List<IPATranscript> productions = tokenCount.get(ipa);
final Set<IPATranscript> distinctProductions = new LinkedHashSet<>(productions);
for(IPATranscript production:tokenCount.get(ipa)) {
if(TableUtils.checkEquals(ipa, production, false, ignoreDiacritics)) {
++numCorrect;
}
}
if(numCorrect == productions.size()) {
++numAllCorrect;
} else {
if(numCorrect > 0 && numCorrect < productions.size()) {
++numOneOrMoreCorrect;
}
distinctProductions.remove(ipa);
if(distinctProductions.size() == 1) {
++numSameError;
} else {
++numDifferentErrors;
}
int totalDistance = 0;
for(IPATranscript production:productions) {
totalDistance += LevenshteinDistance.distance(ipa, production);
}
float avg = ((float)totalDistance)/((float)productions.size());
sumOfAvgDistances += avg;
}
}
// append row to table
rowData[0] = groupKey.key;
rowData[1] = numRepatedWords;
rowData[2] = numAllCorrect;
rowData[3] = numOneOrMoreCorrect;
rowData[4] = numSameError;
rowData[5] = numDifferentErrors;
rowData[6] = sumOfAvgDistances / numRepatedWords;
outputTable.addRow(rowData);
}
outputTable.setColumnTitle(0, "Session");
outputTable.setColumnTitle(1, "# Repeated IPA Target");
outputTable.setColumnTitle(2, "# All Correct");
outputTable.setColumnTitle(3, "# One or More Correct");
outputTable.setColumnTitle(4, "# Same Error");
outputTable.setColumnTitle(5, "# Different Errors");
outputTable.setColumnTitle(6, "Avg Distance");
context.put(tableOutput, outputTable);
} | @Override
public void operate(OpContext context) throws ProcessingException {
final TableDataSource table = (TableDataSource)context.get(tableInput);
int ipaTidx = super.getColumnIndex(table, SystemTierType.IPATarget.getName());
if(ipaTidx < 0) {
throw new ProcessingException(null, "Table has no " + SystemTierType.IPATarget.getName() + " column.");
}
int ipaAidx = super.getColumnIndex(table, SystemTierType.IPAActual.getName());
if(ipaAidx < 0) {
throw new ProcessingException(null, "Table has no " + SystemTierType.IPAActual.getName() + " column.");
}
int sessionIdx = super.getColumnIndex(table, "Session");
boolean ignoreDiacritics = isIgnoreDiacritics();
if(context.containsKey("ignoreDiacritics")) {
ignoreDiacritics = (boolean)context.get("ignoreDiacritics");
}
final Map<GroupKey, IpaTernaryTree<List<IPATranscript>>> tokenCounts =
new LinkedHashMap<>();
for(int row = 0; row < table.getRowCount(); row++) {
checkCanceled();
final Object groupVal =
(sessionIdx >= 0 ? table.getValueAt(row, sessionIdx) : "*");
final GroupKey groupKey = new GroupKey(groupVal, ignoreDiacritics);
IpaTernaryTree<List<IPATranscript>> tokenCount =
tokenCounts.get(groupKey);
if(tokenCount == null) {
tokenCount = new IpaTernaryTree<>();
tokenCounts.put(groupKey, tokenCount);
}
IPATranscript ipaT =
IPATranscript.class.cast(table.getValueAt(row, ipaTidx));
IPATranscript ipaA =
IPATranscript.class.cast(table.getValueAt(row, ipaAidx));
if(ignoreDiacritics) {
ipaT = ipaT.removePunctuation().stripDiacritics();
ipaA = ipaA.removePunctuation().stripDiacritics();
}
List<IPATranscript> productions = tokenCount.get(ipaT);
if(productions == null) {
productions = new ArrayList<>();
tokenCount.put(ipaT, productions);
}
productions.add(ipaA);
}
final DefaultTableDataSource outputTable = new DefaultTableDataSource();
for(GroupKey groupKey:tokenCounts.keySet()) {
final Object[] rowData = new Object[7];
int numRepatedWords = 0;
int numAllCorrect = 0;
int numOneOrMoreCorrect = 0;
int numSameError = 0;
int numDifferentErrors = 0;
float sumOfAvgDistances = 0;
final IpaTernaryTree<List<IPATranscript>> tokenCount = tokenCounts.get(groupKey);
final List<IPATranscript> repeatedTokens =
tokenCount.keySet().stream()
.filter( (ipa) -> tokenCount.get(ipa).size() > 1 )
.collect(Collectors.toList());
numRepatedWords = repeatedTokens.size();
for(IPATranscript ipa:repeatedTokens) {
checkCanceled();
int numCorrect = 0;
final List<IPATranscript> productions = tokenCount.get(ipa);
final Set<IPATranscript> distinctProductions = new LinkedHashSet<>(productions);
for(IPATranscript production:tokenCount.get(ipa)) {
if(TableUtils.checkEquals(ipa, production, false, ignoreDiacritics)) {
++numCorrect;
}
}
if(numCorrect == productions.size()) {
++numAllCorrect;
} else {
if(numCorrect > 0 && numCorrect < productions.size()) {
++numOneOrMoreCorrect;
}
distinctProductions.remove(ipa);
if(distinctProductions.size() == 1) {
++numSameError;
} else {
++numDifferentErrors;
}
int totalDistance = 0;
for(IPATranscript production:productions) {
totalDistance += LevenshteinDistance.distance(ipa, production);
}
float avg = ((float)totalDistance)/((float)productions.size());
sumOfAvgDistances += avg;
}
}
rowData[0] = groupKey.key;
rowData[1] = numRepatedWords;
rowData[2] = numAllCorrect;
rowData[3] = numOneOrMoreCorrect;
rowData[4] = numSameError;
rowData[5] = numDifferentErrors;
rowData[6] = sumOfAvgDistances / numRepatedWords;
outputTable.addRow(rowData);
}
outputTable.setColumnTitle(0, "Session");
outputTable.setColumnTitle(1, "# Repeated IPA Target");
outputTable.setColumnTitle(2, "# All Correct");
outputTable.setColumnTitle(3, "# One or More Correct");
outputTable.setColumnTitle(4, "# Same Error");
outputTable.setColumnTitle(5, "# Different Errors");
outputTable.setColumnTitle(6, "Avg Distance");
context.put(tableOutput, outputTable);
} | @override public void operate(opcontext context) throws processingexception { final tabledatasource table = (tabledatasource)context.get(tableinput); int ipatidx = super.getcolumnindex(table, systemtiertype.ipatarget.getname()); if(ipatidx < 0) { throw new processingexception(null, "table has no " + systemtiertype.ipatarget.getname() + " column."); } int ipaaidx = super.getcolumnindex(table, systemtiertype.ipaactual.getname()); if(ipaaidx < 0) { throw new processingexception(null, "table has no " + systemtiertype.ipaactual.getname() + " column."); } int sessionidx = super.getcolumnindex(table, "session"); boolean ignorediacritics = isignorediacritics(); if(context.containskey("ignorediacritics")) { ignorediacritics = (boolean)context.get("ignorediacritics"); } final map<groupkey, ipaternarytree<list<ipatranscript>>> tokencounts = new linkedhashmap<>(); for(int row = 0; row < table.getrowcount(); row++) { checkcanceled(); final object groupval = (sessionidx >= 0 ? table.getvalueat(row, sessionidx) : "*"); final groupkey groupkey = new groupkey(groupval, ignorediacritics); ipaternarytree<list<ipatranscript>> tokencount = tokencounts.get(groupkey); if(tokencount == null) { tokencount = new ipaternarytree<>(); tokencounts.put(groupkey, tokencount); } ipatranscript ipat = ipatranscript.class.cast(table.getvalueat(row, ipatidx)); ipatranscript ipaa = ipatranscript.class.cast(table.getvalueat(row, ipaaidx)); if(ignorediacritics) { ipat = ipat.removepunctuation().stripdiacritics(); ipaa = ipaa.removepunctuation().stripdiacritics(); } list<ipatranscript> productions = tokencount.get(ipat); if(productions == null) { productions = new arraylist<>(); tokencount.put(ipat, productions); } productions.add(ipaa); } final defaulttabledatasource outputtable = new defaulttabledatasource(); for(groupkey groupkey:tokencounts.keyset()) { final object[] rowdata = new object[7]; int numrepatedwords = 0; int numallcorrect = 0; int numoneormorecorrect = 0; int numsameerror = 0; int numdifferenterrors = 0; float sumofavgdistances = 0; final ipaternarytree<list<ipatranscript>> tokencount = tokencounts.get(groupkey); final list<ipatranscript> repeatedtokens = tokencount.keyset().stream() .filter( (ipa) -> tokencount.get(ipa).size() > 1 ) .collect(collectors.tolist()); numrepatedwords = repeatedtokens.size(); for(ipatranscript ipa:repeatedtokens) { checkcanceled(); int numcorrect = 0; final list<ipatranscript> productions = tokencount.get(ipa); final set<ipatranscript> distinctproductions = new linkedhashset<>(productions); for(ipatranscript production:tokencount.get(ipa)) { if(tableutils.checkequals(ipa, production, false, ignorediacritics)) { ++numcorrect; } } if(numcorrect == productions.size()) { ++numallcorrect; } else { if(numcorrect > 0 && numcorrect < productions.size()) { ++numoneormorecorrect; } distinctproductions.remove(ipa); if(distinctproductions.size() == 1) { ++numsameerror; } else { ++numdifferenterrors; } int totaldistance = 0; for(ipatranscript production:productions) { totaldistance += levenshteindistance.distance(ipa, production); } float avg = ((float)totaldistance)/((float)productions.size()); sumofavgdistances += avg; } } rowdata[0] = groupkey.key; rowdata[1] = numrepatedwords; rowdata[2] = numallcorrect; rowdata[3] = numoneormorecorrect; rowdata[4] = numsameerror; rowdata[5] = numdifferenterrors; rowdata[6] = sumofavgdistances / numrepatedwords; outputtable.addrow(rowdata); } outputtable.setcolumntitle(0, "session"); outputtable.setcolumntitle(1, "# repeated ipa target"); outputtable.setcolumntitle(2, "# all correct"); outputtable.setcolumntitle(3, "# one or more correct"); outputtable.setcolumntitle(4, "# same error"); outputtable.setcolumntitle(5, "# different errors"); outputtable.setcolumntitle(6, "avg distance"); context.put(tableoutput, outputtable); } | phon-ca/phon | [
1,
0,
0,
0
] |
30,178 | public static String[] split( String value, Font font, int firstLineWidth, int lineWidth ) {
return wrap(value, font, firstLineWidth, lineWidth);
} | public static String[] split( String value, Font font, int firstLineWidth, int lineWidth ) {
return wrap(value, font, firstLineWidth, lineWidth);
} | public static string[] split( string value, font font, int firstlinewidth, int linewidth ) { return wrap(value, font, firstlinewidth, linewidth); } | mobabel/MoMeGo | [
0,
0,
0,
0
] |
13,831 | private void removeDanglingTransitions(AbstractWorkflow workflow) {
if (workflow.getTransitions() == null || workflow.getTransitions().isEmpty()) {
return;
}
Set<String> activityIds = new HashSet<>();
for (Activity activity : workflow.getActivities()) {
activityIds.add(activity.getId());
// Transitions from Boundary event timers should be included as well
// todo: make generic
List<Timer> activityTimers = activity.getTimers();
if (activityTimers != null) {
for (Timer timer : activityTimers) {
if (timer instanceof BoundaryEventTimer) {
BoundaryEvent boundaryEvent = ((BoundaryEventTimer) timer).boundaryEvent;
activityIds.add(boundaryEvent.getBoundaryId());
activityIds.addAll(boundaryEvent.getToTransitionIds());
}
}
}
}
ListIterator<Transition> transitionIterator = workflow.getTransitions().listIterator();
while(transitionIterator.hasNext()){
Transition transition = transitionIterator.next();
if (!activityIds.contains(transition.getFromId()) || !activityIds.contains(transition.getToId())) {
transitionIterator.remove();
}
}
} | private void removeDanglingTransitions(AbstractWorkflow workflow) {
if (workflow.getTransitions() == null || workflow.getTransitions().isEmpty()) {
return;
}
Set<String> activityIds = new HashSet<>();
for (Activity activity : workflow.getActivities()) {
activityIds.add(activity.getId());
List<Timer> activityTimers = activity.getTimers();
if (activityTimers != null) {
for (Timer timer : activityTimers) {
if (timer instanceof BoundaryEventTimer) {
BoundaryEvent boundaryEvent = ((BoundaryEventTimer) timer).boundaryEvent;
activityIds.add(boundaryEvent.getBoundaryId());
activityIds.addAll(boundaryEvent.getToTransitionIds());
}
}
}
}
ListIterator<Transition> transitionIterator = workflow.getTransitions().listIterator();
while(transitionIterator.hasNext()){
Transition transition = transitionIterator.next();
if (!activityIds.contains(transition.getFromId()) || !activityIds.contains(transition.getToId())) {
transitionIterator.remove();
}
}
} | private void removedanglingtransitions(abstractworkflow workflow) { if (workflow.gettransitions() == null || workflow.gettransitions().isempty()) { return; } set<string> activityids = new hashset<>(); for (activity activity : workflow.getactivities()) { activityids.add(activity.getid()); list<timer> activitytimers = activity.gettimers(); if (activitytimers != null) { for (timer timer : activitytimers) { if (timer instanceof boundaryeventtimer) { boundaryevent boundaryevent = ((boundaryeventtimer) timer).boundaryevent; activityids.add(boundaryevent.getboundaryid()); activityids.addall(boundaryevent.gettotransitionids()); } } } } listiterator<transition> transitioniterator = workflow.gettransitions().listiterator(); while(transitioniterator.hasnext()){ transition transition = transitioniterator.next(); if (!activityids.contains(transition.getfromid()) || !activityids.contains(transition.gettoid())) { transitioniterator.remove(); } } } | pharod/effektif | [
1,
0,
0,
0
] |
14,220 | @Override
public void startElement (String uri, String name, String qName, Attributes atts) throws SAXException {
try {
//log.debug("S: '" + uri + "' " + qName);
// ----- <xfml> -----------------------------------------------------------
if (EL_XFML.equals(qName)) {
String version = atts.getValue("version");
if (version == null)
log.warn("No version attribute on 'xfml' element");
if (!"1.0".equals(version))
log.warn("Unsupported XFML version: " + version);
String mapurl = atts.getValue("url");
if (mapurl == null)
log.warn("No url attribute on 'xfml' element");
else {
try {
map_uri = new URILocator(mapurl);
TopicMapStoreIF store = topicmap.getStore();
if (store instanceof AbstractTopicMapStore && store.getBaseAddress() == null)
((AbstractTopicMapStore) store).setBaseAddress(map_uri);
doc_address = map_uri;
// Add this document to the list of processed documents.
processed_documents_accumulated.add(getBaseAddress());
} catch (MalformedURLException e) {
log.warn("Invalid xfml base URL: " + mapurl);
}
}
// FIXME: what to do about language?
}
// ----- <facet> ----------------------------------------------------------
else if (EL_FACET.equals(qName)) {
String id = atts.getValue("id");
// FIXME: complain if no id
current_topic = builder.makeTopic();
registerSourceLocator(current_topic, id);
keep_content = true;
}
// ----- <topic> ----------------------------------------------------------
else if (EL_TOPIC.equals(qName)) {
String id = atts.getValue("id");
// FIXME: complain if no id
current_topic = builder.makeTopic();
registerSourceLocator(current_topic, id);
String parentid = atts.getValue("parentTopicid");
if (parentid == null)
parentid = atts.getValue("facetid");
// FIXME: complain if no refs
TopicIF parent = resolveTopicRef("#" + parentid);
parentBuilder.makeAssociation(parent, current_topic);
}
// ----- <page> -----------------------------------------------------------
else if (EL_PAGE.equals(qName)) {
String url = atts.getValue("url");
// FIXME: complain if no url
current_topic = builder.makeTopic();
current_topic.addSubjectLocator(createLocator(url));
}
// ----- <occurrence>------------------------------------------------------
else if (EL_OCCURRENCE.equals(qName)) {
String topicid = atts.getValue("topicid");
// FIXME: complain if none
TopicIF subject = resolveTopicRef("#" + topicid);
occursBuilder.makeAssociation(subject, current_topic);
}
// ----- <name> -----------------------------------------------------------
// ----- <psi> ------------------------------------------------------------
// ----- <description> ----------------------------------------------------
// ----- <title> ----------------------------------------------------------
// ----- <connect> --------------------------------------------------------
else if (EL_NAME.equals(qName) || EL_PSI.equals(qName) || EL_DESCRIPTION.equals(qName) ||
EL_TITLE.equals(qName) || EL_CONNECT.equals(qName))
keep_content = true;
} catch (RuntimeException e) {
e.printStackTrace();
throw e;
}
} | @Override
public void startElement (String uri, String name, String qName, Attributes atts) throws SAXException {
try {
if (EL_XFML.equals(qName)) {
String version = atts.getValue("version");
if (version == null)
log.warn("No version attribute on 'xfml' element");
if (!"1.0".equals(version))
log.warn("Unsupported XFML version: " + version);
String mapurl = atts.getValue("url");
if (mapurl == null)
log.warn("No url attribute on 'xfml' element");
else {
try {
map_uri = new URILocator(mapurl);
TopicMapStoreIF store = topicmap.getStore();
if (store instanceof AbstractTopicMapStore && store.getBaseAddress() == null)
((AbstractTopicMapStore) store).setBaseAddress(map_uri);
doc_address = map_uri;
processed_documents_accumulated.add(getBaseAddress());
} catch (MalformedURLException e) {
log.warn("Invalid xfml base URL: " + mapurl);
}
}
}
else if (EL_FACET.equals(qName)) {
String id = atts.getValue("id");
current_topic = builder.makeTopic();
registerSourceLocator(current_topic, id);
keep_content = true;
}
else if (EL_TOPIC.equals(qName)) {
String id = atts.getValue("id");
current_topic = builder.makeTopic();
registerSourceLocator(current_topic, id);
String parentid = atts.getValue("parentTopicid");
if (parentid == null)
parentid = atts.getValue("facetid");
TopicIF parent = resolveTopicRef("#" + parentid);
parentBuilder.makeAssociation(parent, current_topic);
}
else if (EL_PAGE.equals(qName)) {
String url = atts.getValue("url");
current_topic = builder.makeTopic();
current_topic.addSubjectLocator(createLocator(url));
}
else if (EL_OCCURRENCE.equals(qName)) {
String topicid = atts.getValue("topicid");
TopicIF subject = resolveTopicRef("#" + topicid);
occursBuilder.makeAssociation(subject, current_topic);
}
else if (EL_NAME.equals(qName) || EL_PSI.equals(qName) || EL_DESCRIPTION.equals(qName) ||
EL_TITLE.equals(qName) || EL_CONNECT.equals(qName))
keep_content = true;
} catch (RuntimeException e) {
e.printStackTrace();
throw e;
}
} | @override public void startelement (string uri, string name, string qname, attributes atts) throws saxexception { try { if (el_xfml.equals(qname)) { string version = atts.getvalue("version"); if (version == null) log.warn("no version attribute on 'xfml' element"); if (!"1.0".equals(version)) log.warn("unsupported xfml version: " + version); string mapurl = atts.getvalue("url"); if (mapurl == null) log.warn("no url attribute on 'xfml' element"); else { try { map_uri = new urilocator(mapurl); topicmapstoreif store = topicmap.getstore(); if (store instanceof abstracttopicmapstore && store.getbaseaddress() == null) ((abstracttopicmapstore) store).setbaseaddress(map_uri); doc_address = map_uri; processed_documents_accumulated.add(getbaseaddress()); } catch (malformedurlexception e) { log.warn("invalid xfml base url: " + mapurl); } } } else if (el_facet.equals(qname)) { string id = atts.getvalue("id"); current_topic = builder.maketopic(); registersourcelocator(current_topic, id); keep_content = true; } else if (el_topic.equals(qname)) { string id = atts.getvalue("id"); current_topic = builder.maketopic(); registersourcelocator(current_topic, id); string parentid = atts.getvalue("parenttopicid"); if (parentid == null) parentid = atts.getvalue("facetid"); topicif parent = resolvetopicref("#" + parentid); parentbuilder.makeassociation(parent, current_topic); } else if (el_page.equals(qname)) { string url = atts.getvalue("url"); current_topic = builder.maketopic(); current_topic.addsubjectlocator(createlocator(url)); } else if (el_occurrence.equals(qname)) { string topicid = atts.getvalue("topicid"); topicif subject = resolvetopicref("#" + topicid); occursbuilder.makeassociation(subject, current_topic); } else if (el_name.equals(qname) || el_psi.equals(qname) || el_description.equals(qname) || el_title.equals(qname) || el_connect.equals(qname)) keep_content = true; } catch (runtimeexception e) { e.printstacktrace(); throw e; } } | ontopia/ontopia | [
1,
0,
1,
0
] |
22,550 | public T dequeue(){
//TODO resolve queue with single element
T result = current.get();
current = current.next();
return result;
} | public T dequeue(){
T result = current.get();
current = current.next();
return result;
} | public t dequeue(){ t result = current.get(); current = current.next(); return result; } | pcordemans/exampleAlgorithms | [
0,
1,
0,
0
] |
22,632 | public void copyResourcesToCacheIfRequired1(Activity activity){
if( ContextCompat.checkSelfPermission(activity, Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED )
return;
String sigFilePath = resFolderPath + "/sarvamoola/aitareya.txt";
File sigFile = new File(sigFilePath);
if(!sigFile.exists())
{
for (Pair<String, String> item : mFilesToCopy) {
String fileName ;
if(item.first.indexOf('/')!= -1)
fileName = item.first.substring(item.first.lastIndexOf("/")+1);
else
fileName = item.first;
String destUrl = resFolderPath;
if(!item.second.isEmpty()) {
// if(new File(item.second).isAbsolute()){
if(File.separator.equals(item.second.substring(0,1))){ // Shortcut to check isAbsolute()
// Its an absolute url. set the destUrl to empty so that item.second is taken as is
destUrl = "";
}
destUrl = destUrl + item.second + '/';
}
destUrl = destUrl + fileName;
copyFile(item.first, destUrl);
}
}
} | public void copyResourcesToCacheIfRequired1(Activity activity){
if( ContextCompat.checkSelfPermission(activity, Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED )
return;
String sigFilePath = resFolderPath + "/sarvamoola/aitareya.txt";
File sigFile = new File(sigFilePath);
if(!sigFile.exists())
{
for (Pair<String, String> item : mFilesToCopy) {
String fileName ;
if(item.first.indexOf('/')!= -1)
fileName = item.first.substring(item.first.lastIndexOf("/")+1);
else
fileName = item.first;
String destUrl = resFolderPath;
if(!item.second.isEmpty()) {
if(File.separator.equals(item.second.substring(0,1))){
destUrl = "";
}
destUrl = destUrl + item.second + '/';
}
destUrl = destUrl + fileName;
copyFile(item.first, destUrl);
}
}
} | public void copyresourcestocacheifrequired1(activity activity){ if( contextcompat.checkselfpermission(activity, manifest.permission.write_external_storage) != packagemanager.permission_granted ) return; string sigfilepath = resfolderpath + "/sarvamoola/aitareya.txt"; file sigfile = new file(sigfilepath); if(!sigfile.exists()) { for (pair<string, string> item : mfilestocopy) { string filename ; if(item.first.indexof('/')!= -1) filename = item.first.substring(item.first.lastindexof("/")+1); else filename = item.first; string desturl = resfolderpath; if(!item.second.isempty()) { if(file.separator.equals(item.second.substring(0,1))){ desturl = ""; } desturl = desturl + item.second + '/'; } desturl = desturl + filename; copyfile(item.first, desturl); } } } | openmuthu/jaya | [
0,
1,
0,
0
] |
14,534 | public RawMessage handleCborRequest(ChannelId channelId, byte[] payload) {
if (payload.length == 0) {
return RawMessage.error(channelId, HidError.INVALID_LEN);
}
Ctap2Method method = Ctap2Method.from(payload[0]);
byte[] params = new byte[payload.length - 1];
System.arraycopy(payload, 1, params, 0, payload.length - 1);
try {
log.debug("Received CBOR request with method {} and body {}", method, CborDecoder.decode(params));
} catch (CborException e) {
log.error("Unable to deserialize CBOR parameters", e);
return failure(channelId, Ctap2ResponseCode.INVALID_CBOR);
}
try {
switch (method) {
case GET_ASSERTION:
GetAssertionRequest gar = GetAssertionRequest.fromBytes(params);
byte[] assertion = authenticator.getAssertion(gar)
.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(value -> asPayload(Ctap2ResponseCode.OK, value.asCborMap()));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(assertion)
.build();
case GET_INFO:
GetInfoResponse response = authenticator.getInfo();
log.trace("Authenticator supports options {}", response);
byte[] info = asPayload(Ctap2ResponseCode.OK, response.asCborMap());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(info)
.build();
case MAKE_CREDENTIAL:
MakeCredentialRequest request = MakeCredentialRequest.fromBytes(params);
Result<MakeCredentialResponse, Ctap2ResponseCode> result = authenticator.makeCredential(request.clientDataHash(),
request.relayingPartyId(), request.userId(), request.supportedAlgorithmIds(), request.pinAuth());
byte[] credential = result
.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(value -> asPayload(Ctap2ResponseCode.OK, value.asCborMap()));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(credential)
.build();
case CLIENT_PIN:
Map pinParams = (Map) CborDecoder.decode(params).get(0);
UnsignedInteger pinProtocol = (UnsignedInteger) pinParams.get(new UnsignedInteger(0x01));
// TODO: handle pinProtocol != 1 somehow
UnsignedInteger subCommand = (UnsignedInteger) pinParams.get(new UnsignedInteger(0x02));
switch(subCommand.getValue().intValue()) {
case 0x01: // get retries
List<DataItem> retries = pinRequestHandler.getRetries();
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(Ctap2ResponseCode.OK, retries))
.build();
case 0x02: // get key
List<DataItem> key = pinRequestHandler.getKeyAgreementKey();
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(Ctap2ResponseCode.OK, key))
.build();
case 0x03: // set pin
Map hostCoseKey = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinAuth = (ByteString) pinParams.get(new UnsignedInteger(0x04));
ByteString newPinEnc = (ByteString) pinParams.get(new UnsignedInteger(0x05));
Ctap2ResponseCode code = pinRequestHandler.setPin(hostCoseKey, newPinEnc.getBytes(), pinAuth.getBytes());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(code, Collections.emptyList()))
.build();
case 0x04: // change pin
Map hostCoseKey2 = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinAuth2 = (ByteString) pinParams.get(new UnsignedInteger(0x04));
ByteString newPinEnc2 = (ByteString) pinParams.get(new UnsignedInteger(0x05));
ByteString PinHashEnc = (ByteString) pinParams.get(new UnsignedInteger(0x06));
Ctap2ResponseCode code2 = pinRequestHandler.changePin(hostCoseKey2, PinHashEnc.getBytes(), newPinEnc2.getBytes(), pinAuth2.getBytes());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(code2, Collections.emptyList()))
.build();
case 0x05: // get pin token
Map hostCoseKey3 = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinHashEnc2 = (ByteString) pinParams.get(new UnsignedInteger(0x06));
Result<List<DataItem>, Ctap2ResponseCode> pinToken = pinRequestHandler.getPinToken(hostCoseKey3, pinHashEnc2.getBytes());
byte[] pinTokenPayload = pinToken.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(val -> asPayload(Ctap2ResponseCode.OK, val));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(pinTokenPayload)
.build();
}
break;
case RESET:
case GET_NEXT_ASSERTION:
default:
return RawMessage.error(channelId, HidError.INVALID_CMD);
}
} catch (CborException e) {
log.error("Unable to deserialize CBOR", e);
}
return failure(channelId, Ctap2ResponseCode.INVALID_CBOR);
} | public RawMessage handleCborRequest(ChannelId channelId, byte[] payload) {
if (payload.length == 0) {
return RawMessage.error(channelId, HidError.INVALID_LEN);
}
Ctap2Method method = Ctap2Method.from(payload[0]);
byte[] params = new byte[payload.length - 1];
System.arraycopy(payload, 1, params, 0, payload.length - 1);
try {
log.debug("Received CBOR request with method {} and body {}", method, CborDecoder.decode(params));
} catch (CborException e) {
log.error("Unable to deserialize CBOR parameters", e);
return failure(channelId, Ctap2ResponseCode.INVALID_CBOR);
}
try {
switch (method) {
case GET_ASSERTION:
GetAssertionRequest gar = GetAssertionRequest.fromBytes(params);
byte[] assertion = authenticator.getAssertion(gar)
.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(value -> asPayload(Ctap2ResponseCode.OK, value.asCborMap()));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(assertion)
.build();
case GET_INFO:
GetInfoResponse response = authenticator.getInfo();
log.trace("Authenticator supports options {}", response);
byte[] info = asPayload(Ctap2ResponseCode.OK, response.asCborMap());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(info)
.build();
case MAKE_CREDENTIAL:
MakeCredentialRequest request = MakeCredentialRequest.fromBytes(params);
Result<MakeCredentialResponse, Ctap2ResponseCode> result = authenticator.makeCredential(request.clientDataHash(),
request.relayingPartyId(), request.userId(), request.supportedAlgorithmIds(), request.pinAuth());
byte[] credential = result
.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(value -> asPayload(Ctap2ResponseCode.OK, value.asCborMap()));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(credential)
.build();
case CLIENT_PIN:
Map pinParams = (Map) CborDecoder.decode(params).get(0);
UnsignedInteger pinProtocol = (UnsignedInteger) pinParams.get(new UnsignedInteger(0x01));
UnsignedInteger subCommand = (UnsignedInteger) pinParams.get(new UnsignedInteger(0x02));
switch(subCommand.getValue().intValue()) {
case 0x01:
List<DataItem> retries = pinRequestHandler.getRetries();
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(Ctap2ResponseCode.OK, retries))
.build();
case 0x02:
List<DataItem> key = pinRequestHandler.getKeyAgreementKey();
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(Ctap2ResponseCode.OK, key))
.build();
case 0x03:
Map hostCoseKey = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinAuth = (ByteString) pinParams.get(new UnsignedInteger(0x04));
ByteString newPinEnc = (ByteString) pinParams.get(new UnsignedInteger(0x05));
Ctap2ResponseCode code = pinRequestHandler.setPin(hostCoseKey, newPinEnc.getBytes(), pinAuth.getBytes());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(code, Collections.emptyList()))
.build();
case 0x04:
Map hostCoseKey2 = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinAuth2 = (ByteString) pinParams.get(new UnsignedInteger(0x04));
ByteString newPinEnc2 = (ByteString) pinParams.get(new UnsignedInteger(0x05));
ByteString PinHashEnc = (ByteString) pinParams.get(new UnsignedInteger(0x06));
Ctap2ResponseCode code2 = pinRequestHandler.changePin(hostCoseKey2, PinHashEnc.getBytes(), newPinEnc2.getBytes(), pinAuth2.getBytes());
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(asPayload(code2, Collections.emptyList()))
.build();
case 0x05:
Map hostCoseKey3 = (Map) pinParams.get(new UnsignedInteger(0x03));
ByteString pinHashEnc2 = (ByteString) pinParams.get(new UnsignedInteger(0x06));
Result<List<DataItem>, Ctap2ResponseCode> pinToken = pinRequestHandler.getPinToken(hostCoseKey3, pinHashEnc2.getBytes());
byte[] pinTokenPayload = pinToken.handleError(err -> asPayload(err, Collections.emptyList()))
.elseGet(val -> asPayload(Ctap2ResponseCode.OK, val));
return ImmutableRawMessage.builder()
.channelId(channelId)
.command(HidCommand.CBOR)
.payload(pinTokenPayload)
.build();
}
break;
case RESET:
case GET_NEXT_ASSERTION:
default:
return RawMessage.error(channelId, HidError.INVALID_CMD);
}
} catch (CborException e) {
log.error("Unable to deserialize CBOR", e);
}
return failure(channelId, Ctap2ResponseCode.INVALID_CBOR);
} | public rawmessage handlecborrequest(channelid channelid, byte[] payload) { if (payload.length == 0) { return rawmessage.error(channelid, hiderror.invalid_len); } ctap2method method = ctap2method.from(payload[0]); byte[] params = new byte[payload.length - 1]; system.arraycopy(payload, 1, params, 0, payload.length - 1); try { log.debug("received cbor request with method {} and body {}", method, cbordecoder.decode(params)); } catch (cborexception e) { log.error("unable to deserialize cbor parameters", e); return failure(channelid, ctap2responsecode.invalid_cbor); } try { switch (method) { case get_assertion: getassertionrequest gar = getassertionrequest.frombytes(params); byte[] assertion = authenticator.getassertion(gar) .handleerror(err -> aspayload(err, collections.emptylist())) .elseget(value -> aspayload(ctap2responsecode.ok, value.ascbormap())); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(assertion) .build(); case get_info: getinforesponse response = authenticator.getinfo(); log.trace("authenticator supports options {}", response); byte[] info = aspayload(ctap2responsecode.ok, response.ascbormap()); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(info) .build(); case make_credential: makecredentialrequest request = makecredentialrequest.frombytes(params); result<makecredentialresponse, ctap2responsecode> result = authenticator.makecredential(request.clientdatahash(), request.relayingpartyid(), request.userid(), request.supportedalgorithmids(), request.pinauth()); byte[] credential = result .handleerror(err -> aspayload(err, collections.emptylist())) .elseget(value -> aspayload(ctap2responsecode.ok, value.ascbormap())); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(credential) .build(); case client_pin: map pinparams = (map) cbordecoder.decode(params).get(0); unsignedinteger pinprotocol = (unsignedinteger) pinparams.get(new unsignedinteger(0x01)); unsignedinteger subcommand = (unsignedinteger) pinparams.get(new unsignedinteger(0x02)); switch(subcommand.getvalue().intvalue()) { case 0x01: list<dataitem> retries = pinrequesthandler.getretries(); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(aspayload(ctap2responsecode.ok, retries)) .build(); case 0x02: list<dataitem> key = pinrequesthandler.getkeyagreementkey(); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(aspayload(ctap2responsecode.ok, key)) .build(); case 0x03: map hostcosekey = (map) pinparams.get(new unsignedinteger(0x03)); bytestring pinauth = (bytestring) pinparams.get(new unsignedinteger(0x04)); bytestring newpinenc = (bytestring) pinparams.get(new unsignedinteger(0x05)); ctap2responsecode code = pinrequesthandler.setpin(hostcosekey, newpinenc.getbytes(), pinauth.getbytes()); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(aspayload(code, collections.emptylist())) .build(); case 0x04: map hostcosekey2 = (map) pinparams.get(new unsignedinteger(0x03)); bytestring pinauth2 = (bytestring) pinparams.get(new unsignedinteger(0x04)); bytestring newpinenc2 = (bytestring) pinparams.get(new unsignedinteger(0x05)); bytestring pinhashenc = (bytestring) pinparams.get(new unsignedinteger(0x06)); ctap2responsecode code2 = pinrequesthandler.changepin(hostcosekey2, pinhashenc.getbytes(), newpinenc2.getbytes(), pinauth2.getbytes()); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(aspayload(code2, collections.emptylist())) .build(); case 0x05: map hostcosekey3 = (map) pinparams.get(new unsignedinteger(0x03)); bytestring pinhashenc2 = (bytestring) pinparams.get(new unsignedinteger(0x06)); result<list<dataitem>, ctap2responsecode> pintoken = pinrequesthandler.getpintoken(hostcosekey3, pinhashenc2.getbytes()); byte[] pintokenpayload = pintoken.handleerror(err -> aspayload(err, collections.emptylist())) .elseget(val -> aspayload(ctap2responsecode.ok, val)); return immutablerawmessage.builder() .channelid(channelid) .command(hidcommand.cbor) .payload(pintokenpayload) .build(); } break; case reset: case get_next_assertion: default: return rawmessage.error(channelid, hiderror.invalid_cmd); } } catch (cborexception e) { log.error("unable to deserialize cbor", e); } return failure(channelid, ctap2responsecode.invalid_cbor); } | mphi-rc/pi-zero-security-key | [
1,
0,
0,
0
] |
22,750 | @Test
public void acceptedRequestIsFullyApplied() throws EngineException
{
RegistrationContext defContext = new RegistrationContext(false, TriggeringMode.manualAtLogin);
initAndCreateForm(false, null);
RegistrationRequest request = getRequest();
String id3 = registrationsMan.submitRegistrationRequest(request, defContext);
registrationsMan.processRegistrationRequest(id3, null,
RegistrationRequestAction.accept, "a2", "p2");
RegistrationRequestState fromDb = registrationsMan.getRegistrationRequests().get(0);
assertEquals(request, fromDb.getRequest());
assertEquals(2, fromDb.getAdminComments().size());
assertEquals("p2", fromDb.getAdminComments().get(1).getContents());
assertEquals("a2", fromDb.getAdminComments().get(0).getContents());
assertEquals(RegistrationRequestStatus.accepted, fromDb.getStatus());
assertEquals(id3, fromDb.getRequestId());
assertNotNull(fromDb.getTimestamp());
Entity added = idsMan.getEntity(new EntityParam(new IdentityTaV(X500Identity.ID, "CN=registration test")));
assertEquals(EntityState.valid, added.getState());
assertEquals(EngineInitialization.DEFAULT_CREDENTIAL_REQUIREMENT,
added.getCredentialInfo().getCredentialRequirementId());
assertThat(fromDb.getCreatedEntityId(), is(added.getId()));
CredentialPublicInformation cpi = added.getCredentialInfo().getCredentialsState().get(
EngineInitialization.DEFAULT_CREDENTIAL);
assertEquals(LocalCredentialState.correct, cpi.getState());
EntityParam addedP = new EntityParam(added.getId());
Collection<String> groups = idsMan.getGroups(addedP).keySet();
assertTrue(groups.contains("/"));
assertTrue(groups.contains("/A"));
assertTrue(groups.contains("/B"));
Collection<AttributesClass> acs = acMan.getEntityAttributeClasses(addedP, "/");
assertEquals(1, acs.size());
assertEquals(InitializerCommon.NAMING_AC, acs.iterator().next().getName());
Collection<AttributeExt> attrs = attrsMan.getAttributes(addedP, "/", "cn");
assertEquals(1, attrs.size());
assertEquals("val", attrs.iterator().next().getValues().get(0));
attrs = attrsMan.getAttributes(addedP, "/", "email");
assertEquals(1, attrs.size());
String value = attrs.iterator().next().getValues().get(0);
VerifiableEmail ve = new VerifiableEmail(JsonUtil.parse(value)); //FIXME - this is likely wrong
assertEquals("[email protected]", ve.getValue());
assertEquals(false, ve.getConfirmationInfo().isConfirmed());
} | @Test
public void acceptedRequestIsFullyApplied() throws EngineException
{
RegistrationContext defContext = new RegistrationContext(false, TriggeringMode.manualAtLogin);
initAndCreateForm(false, null);
RegistrationRequest request = getRequest();
String id3 = registrationsMan.submitRegistrationRequest(request, defContext);
registrationsMan.processRegistrationRequest(id3, null,
RegistrationRequestAction.accept, "a2", "p2");
RegistrationRequestState fromDb = registrationsMan.getRegistrationRequests().get(0);
assertEquals(request, fromDb.getRequest());
assertEquals(2, fromDb.getAdminComments().size());
assertEquals("p2", fromDb.getAdminComments().get(1).getContents());
assertEquals("a2", fromDb.getAdminComments().get(0).getContents());
assertEquals(RegistrationRequestStatus.accepted, fromDb.getStatus());
assertEquals(id3, fromDb.getRequestId());
assertNotNull(fromDb.getTimestamp());
Entity added = idsMan.getEntity(new EntityParam(new IdentityTaV(X500Identity.ID, "CN=registration test")));
assertEquals(EntityState.valid, added.getState());
assertEquals(EngineInitialization.DEFAULT_CREDENTIAL_REQUIREMENT,
added.getCredentialInfo().getCredentialRequirementId());
assertThat(fromDb.getCreatedEntityId(), is(added.getId()));
CredentialPublicInformation cpi = added.getCredentialInfo().getCredentialsState().get(
EngineInitialization.DEFAULT_CREDENTIAL);
assertEquals(LocalCredentialState.correct, cpi.getState());
EntityParam addedP = new EntityParam(added.getId());
Collection<String> groups = idsMan.getGroups(addedP).keySet();
assertTrue(groups.contains("/"));
assertTrue(groups.contains("/A"));
assertTrue(groups.contains("/B"));
Collection<AttributesClass> acs = acMan.getEntityAttributeClasses(addedP, "/");
assertEquals(1, acs.size());
assertEquals(InitializerCommon.NAMING_AC, acs.iterator().next().getName());
Collection<AttributeExt> attrs = attrsMan.getAttributes(addedP, "/", "cn");
assertEquals(1, attrs.size());
assertEquals("val", attrs.iterator().next().getValues().get(0));
attrs = attrsMan.getAttributes(addedP, "/", "email");
assertEquals(1, attrs.size());
String value = attrs.iterator().next().getValues().get(0);
VerifiableEmail ve = new VerifiableEmail(JsonUtil.parse(value));
assertEquals("[email protected]", ve.getValue());
assertEquals(false, ve.getConfirmationInfo().isConfirmed());
} | @test public void acceptedrequestisfullyapplied() throws engineexception { registrationcontext defcontext = new registrationcontext(false, triggeringmode.manualatlogin); initandcreateform(false, null); registrationrequest request = getrequest(); string id3 = registrationsman.submitregistrationrequest(request, defcontext); registrationsman.processregistrationrequest(id3, null, registrationrequestaction.accept, "a2", "p2"); registrationrequeststate fromdb = registrationsman.getregistrationrequests().get(0); assertequals(request, fromdb.getrequest()); assertequals(2, fromdb.getadmincomments().size()); assertequals("p2", fromdb.getadmincomments().get(1).getcontents()); assertequals("a2", fromdb.getadmincomments().get(0).getcontents()); assertequals(registrationrequeststatus.accepted, fromdb.getstatus()); assertequals(id3, fromdb.getrequestid()); assertnotnull(fromdb.gettimestamp()); entity added = idsman.getentity(new entityparam(new identitytav(x500identity.id, "cn=registration test"))); assertequals(entitystate.valid, added.getstate()); assertequals(engineinitialization.default_credential_requirement, added.getcredentialinfo().getcredentialrequirementid()); assertthat(fromdb.getcreatedentityid(), is(added.getid())); credentialpublicinformation cpi = added.getcredentialinfo().getcredentialsstate().get( engineinitialization.default_credential); assertequals(localcredentialstate.correct, cpi.getstate()); entityparam addedp = new entityparam(added.getid()); collection<string> groups = idsman.getgroups(addedp).keyset(); asserttrue(groups.contains("/")); asserttrue(groups.contains("/a")); asserttrue(groups.contains("/b")); collection<attributesclass> acs = acman.getentityattributeclasses(addedp, "/"); assertequals(1, acs.size()); assertequals(initializercommon.naming_ac, acs.iterator().next().getname()); collection<attributeext> attrs = attrsman.getattributes(addedp, "/", "cn"); assertequals(1, attrs.size()); assertequals("val", attrs.iterator().next().getvalues().get(0)); attrs = attrsman.getattributes(addedp, "/", "email"); assertequals(1, attrs.size()); string value = attrs.iterator().next().getvalues().get(0); verifiableemail ve = new verifiableemail(jsonutil.parse(value)); assertequals("[email protected]", ve.getvalue()); assertequals(false, ve.getconfirmationinfo().isconfirmed()); } | olivier-maury/unity | [
0,
0,
1,
0
] |
23,007 | public static boolean isAlreadySubtitled( Downloadable videoDownloadable, Language subtitlesLanguage ) throws IOException, InterruptedException {
List<DownloadableFile> allFiles = DownloadableManager.getInstance().getAllFiles( videoDownloadable.getId() );
Path mainVideoFilePath;
String filename;
Optional<Path> optPath = selectMainVideoFile( allFiles );
if (optPath.isPresent()) {
mainVideoFilePath = optPath.get();
filename = mainVideoFilePath.getFileName().toString();
} else {
ErrorManager.getInstance().reportError(String.format("No video file found for %s", videoDownloadable.toString()));
return false;
}
if (subtitlesLanguage == null) {
return true;
}
if (subtitlesLanguage.getSubTokens() != null) {
// Test if the filename contains an indication of the subtitles (VOSTFR, ...)
for (String subToken : subtitlesLanguage.getSubTokens()) {
if ( StringUtils.containsIgnoreCase( filename, subToken) ) {
return true;
}
}
}
VideoMetaData metaData = getInstance().getMetaData(videoDownloadable, mainVideoFilePath);
if (metaData.getSubtitleLanguages() != null) {
for (Locale locale : metaData.getSubtitleLanguages()) {
if (locale.getLanguage().equals( subtitlesLanguage.getLocale().getLanguage() )) {
return true;
}
}
}
List<DownloadableFile> subtitleFiles =
allFiles.stream()
.filter( file -> SubtitlesFileFilter.getInstance().accept( file.getFilePath() ) )
.collect( Collectors.toList() );
String filenameWithoutExtension = filename;
if ( filenameWithoutExtension.lastIndexOf('.') > 0 ) {
filenameWithoutExtension = filenameWithoutExtension.substring( 0, filenameWithoutExtension.lastIndexOf('.'));
}
String targetFileNameRegExp = filenameWithoutExtension + "." + subtitlesLanguage.getShortName() + "\\.srt";
for (DownloadableFile subTitleFile : subtitleFiles) {
String subtitleFileName = subTitleFile.getFilePath().getFileName().toString();
if (RegExp.matches(subtitleFileName, targetFileNameRegExp )) {
return true;
}
}
// FIXME : this last test will accept any subtitle file without checking the language
if (!subtitleFiles.isEmpty()) {
return true;
}
return false;
} | public static boolean isAlreadySubtitled( Downloadable videoDownloadable, Language subtitlesLanguage ) throws IOException, InterruptedException {
List<DownloadableFile> allFiles = DownloadableManager.getInstance().getAllFiles( videoDownloadable.getId() );
Path mainVideoFilePath;
String filename;
Optional<Path> optPath = selectMainVideoFile( allFiles );
if (optPath.isPresent()) {
mainVideoFilePath = optPath.get();
filename = mainVideoFilePath.getFileName().toString();
} else {
ErrorManager.getInstance().reportError(String.format("No video file found for %s", videoDownloadable.toString()));
return false;
}
if (subtitlesLanguage == null) {
return true;
}
if (subtitlesLanguage.getSubTokens() != null) {
for (String subToken : subtitlesLanguage.getSubTokens()) {
if ( StringUtils.containsIgnoreCase( filename, subToken) ) {
return true;
}
}
}
VideoMetaData metaData = getInstance().getMetaData(videoDownloadable, mainVideoFilePath);
if (metaData.getSubtitleLanguages() != null) {
for (Locale locale : metaData.getSubtitleLanguages()) {
if (locale.getLanguage().equals( subtitlesLanguage.getLocale().getLanguage() )) {
return true;
}
}
}
List<DownloadableFile> subtitleFiles =
allFiles.stream()
.filter( file -> SubtitlesFileFilter.getInstance().accept( file.getFilePath() ) )
.collect( Collectors.toList() );
String filenameWithoutExtension = filename;
if ( filenameWithoutExtension.lastIndexOf('.') > 0 ) {
filenameWithoutExtension = filenameWithoutExtension.substring( 0, filenameWithoutExtension.lastIndexOf('.'));
}
String targetFileNameRegExp = filenameWithoutExtension + "." + subtitlesLanguage.getShortName() + "\\.srt";
for (DownloadableFile subTitleFile : subtitleFiles) {
String subtitleFileName = subTitleFile.getFilePath().getFileName().toString();
if (RegExp.matches(subtitleFileName, targetFileNameRegExp )) {
return true;
}
}
if (!subtitleFiles.isEmpty()) {
return true;
}
return false;
} | public static boolean isalreadysubtitled( downloadable videodownloadable, language subtitleslanguage ) throws ioexception, interruptedexception { list<downloadablefile> allfiles = downloadablemanager.getinstance().getallfiles( videodownloadable.getid() ); path mainvideofilepath; string filename; optional<path> optpath = selectmainvideofile( allfiles ); if (optpath.ispresent()) { mainvideofilepath = optpath.get(); filename = mainvideofilepath.getfilename().tostring(); } else { errormanager.getinstance().reporterror(string.format("no video file found for %s", videodownloadable.tostring())); return false; } if (subtitleslanguage == null) { return true; } if (subtitleslanguage.getsubtokens() != null) { for (string subtoken : subtitleslanguage.getsubtokens()) { if ( stringutils.containsignorecase( filename, subtoken) ) { return true; } } } videometadata metadata = getinstance().getmetadata(videodownloadable, mainvideofilepath); if (metadata.getsubtitlelanguages() != null) { for (locale locale : metadata.getsubtitlelanguages()) { if (locale.getlanguage().equals( subtitleslanguage.getlocale().getlanguage() )) { return true; } } } list<downloadablefile> subtitlefiles = allfiles.stream() .filter( file -> subtitlesfilefilter.getinstance().accept( file.getfilepath() ) ) .collect( collectors.tolist() ); string filenamewithoutextension = filename; if ( filenamewithoutextension.lastindexof('.') > 0 ) { filenamewithoutextension = filenamewithoutextension.substring( 0, filenamewithoutextension.lastindexof('.')); } string targetfilenameregexp = filenamewithoutextension + "." + subtitleslanguage.getshortname() + "\\.srt"; for (downloadablefile subtitlefile : subtitlefiles) { string subtitlefilename = subtitlefile.getfilepath().getfilename().tostring(); if (regexp.matches(subtitlefilename, targetfilenameregexp )) { return true; } } if (!subtitlefiles.isempty()) { return true; } return false; } | mozvip/dynamo | [
0,
0,
1,
0
] |
14,893 | public void addBlock(Set<CFANode> nodes, CFANode blockHead) {
Set<ReferencedVariable> referencedVariables = collectReferencedVariables(nodes);
Set<CFANode> callNodes = collectCallNodes(nodes);
Set<CFANode> returnNodes = collectReturnNodes(nodes);
Set<FunctionEntryNode> innerFunctionCalls = collectInnerFunctionCalls(nodes);
if (callNodes.isEmpty()) {
/* What shall we do with function, which is not called from anywhere?
* There are problems with them at partitioning building stage
*/
return;
}
CFANode registerNode = null;
for (CFANode node : callNodes) {
registerNode = node;
if (node instanceof FunctionEntryNode) {
break;
}
}
if (registerNode == null) {
//It means, that there is no entry in this block. Don't add it
return;
}
referencedVariablesMap.put(registerNode, referencedVariables);
callNodesMap.put(registerNode, callNodes);
returnNodesMap.put(registerNode, returnNodes);
innerFunctionCallsMap.put(registerNode, innerFunctionCalls);
blockNodesMap.put(registerNode, nodes);
} | public void addBlock(Set<CFANode> nodes, CFANode blockHead) {
Set<ReferencedVariable> referencedVariables = collectReferencedVariables(nodes);
Set<CFANode> callNodes = collectCallNodes(nodes);
Set<CFANode> returnNodes = collectReturnNodes(nodes);
Set<FunctionEntryNode> innerFunctionCalls = collectInnerFunctionCalls(nodes);
if (callNodes.isEmpty()) {
return;
}
CFANode registerNode = null;
for (CFANode node : callNodes) {
registerNode = node;
if (node instanceof FunctionEntryNode) {
break;
}
}
if (registerNode == null) {
return;
}
referencedVariablesMap.put(registerNode, referencedVariables);
callNodesMap.put(registerNode, callNodes);
returnNodesMap.put(registerNode, returnNodes);
innerFunctionCallsMap.put(registerNode, innerFunctionCalls);
blockNodesMap.put(registerNode, nodes);
} | public void addblock(set<cfanode> nodes, cfanode blockhead) { set<referencedvariable> referencedvariables = collectreferencedvariables(nodes); set<cfanode> callnodes = collectcallnodes(nodes); set<cfanode> returnnodes = collectreturnnodes(nodes); set<functionentrynode> innerfunctioncalls = collectinnerfunctioncalls(nodes); if (callnodes.isempty()) { return; } cfanode registernode = null; for (cfanode node : callnodes) { registernode = node; if (node instanceof functionentrynode) { break; } } if (registernode == null) { return; } referencedvariablesmap.put(registernode, referencedvariables); callnodesmap.put(registernode, callnodes); returnnodesmap.put(registernode, returnnodes); innerfunctioncallsmap.put(registernode, innerfunctioncalls); blocknodesmap.put(registernode, nodes); } | prokopk1n/cpachecker-1 | [
0,
1,
0,
0
] |
23,121 | @Test
public void isJson_shouldReturnTrue_whenStringIsJsonArray() throws JSONException {
assertTrue(validateTor.isJSON("[]"));
assertTrue(validateTor.isJSON("[{\"id\":1}]"));
} | @Test
public void isJson_shouldReturnTrue_whenStringIsJsonArray() throws JSONException {
assertTrue(validateTor.isJSON("[]"));
assertTrue(validateTor.isJSON("[{\"id\":1}]"));
} | @test public void isjson_shouldreturntrue_whenstringisjsonarray() throws jsonexception { asserttrue(validatetor.isjson("[]")); asserttrue(validatetor.isjson("[{\"id\":1}]")); } | nisrulz/validatetor | [
0,
0,
1,
0
] |
23,144 | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt)
throws VisADException, RemoteException {
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
EarthLocation el = new EarthLocationLite(lat, lon, alt);
// TODO: make this more efficient
PointObTuple pot = new PointObTuple(el, dt,
timeStep.getSample(i), rangeType);
ff.setSample(i, pot, false, false);
}
}
return ff;
} | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt)
throws VisADException, RemoteException {
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
EarthLocation el = new EarthLocationLite(lat, lon, alt);
PointObTuple pot = new PointObTuple(el, dt,
timeStep.getSample(i), rangeType);
ff.setSample(i, pot, false, false);
}
}
return ff;
} | private static fieldimpl makepointobs(flatfield timestep, datetime dt) throws visadexception, remoteexception { if (timestep == null) { return null; } sampledset domain = getspatialdomain(timestep); int numpoints = domain.getlength(); integer1dset points = new integer1dset(realtype.getrealtype("index"), numpoints); tupletype tt = getparamtype(timestep); tupletype rangetype = new tupletype(new mathtype[] { realtupletype.latitudelongitudealtitude, realtype.time, tt }); fieldimpl ff = new fieldimpl( new functiontype( ((settype) points.gettype()).getdomain(), rangetype), points); float[][] samples = timestep.getfloats(false); float[][] geovals = getearthlocationpoints((griddedset) domain); boolean islatlon = islatlonorder(domain); int latindex = islatlon ? 0 : 1; int lonindex = islatlon ? 1 : 0; boolean havealt = geovals.length > 2; for (int i = 0; i < numpoints; i++) { float lat = geovals[latindex][i]; float lon = geovals[lonindex][i]; float alt = havealt ? geovals[2][i] : 0; if ((lat == lat) && (lon == lon)) { if ( !(alt == alt)) { alt = 0; } earthlocation el = new earthlocationlite(lat, lon, alt); pointobtuple pot = new pointobtuple(el, dt, timestep.getsample(i), rangetype); ff.setsample(i, pot, false, false); } } return ff; } | oxelson/IDV | [
1,
0,
0,
0
] |
23,145 | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt, String function)
throws VisADException, RemoteException {
final boolean doMax = function.equals(FUNC_MAX);
final boolean doMin = function.equals(FUNC_MIN);
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
float pMin = Float.POSITIVE_INFINITY;
float pMax = Float.NEGATIVE_INFINITY;
int index = 0;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
if(doMax && (float)timeStep.getValues(i)[0] >= pMax){
pMax = (float)timeStep.getValues(i)[0];
index = i;
} else if(doMin && (float)timeStep.getValues(i)[0] < pMin){
pMin = (float)timeStep.getValues(i)[0];
index = i;
}
}
}
float alt0 = haveAlt
? geoVals[2][index]
: 0;
EarthLocation el0 = new EarthLocationLite(geoVals[latIndex][index], geoVals[lonIndex][index], alt0);
// TODO: make this more efficient
PointObTuple pot = new PointObTuple(el0, dt,
timeStep.getSample(index), rangeType);
ff.setSample(0, pot, false, false);
return ff;
} | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt, String function)
throws VisADException, RemoteException {
final boolean doMax = function.equals(FUNC_MAX);
final boolean doMin = function.equals(FUNC_MIN);
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
float pMin = Float.POSITIVE_INFINITY;
float pMax = Float.NEGATIVE_INFINITY;
int index = 0;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
if(doMax && (float)timeStep.getValues(i)[0] >= pMax){
pMax = (float)timeStep.getValues(i)[0];
index = i;
} else if(doMin && (float)timeStep.getValues(i)[0] < pMin){
pMin = (float)timeStep.getValues(i)[0];
index = i;
}
}
}
float alt0 = haveAlt
? geoVals[2][index]
: 0;
EarthLocation el0 = new EarthLocationLite(geoVals[latIndex][index], geoVals[lonIndex][index], alt0);
PointObTuple pot = new PointObTuple(el0, dt,
timeStep.getSample(index), rangeType);
ff.setSample(0, pot, false, false);
return ff;
} | private static fieldimpl makepointobs(flatfield timestep, datetime dt, string function) throws visadexception, remoteexception { final boolean domax = function.equals(func_max); final boolean domin = function.equals(func_min); if (timestep == null) { return null; } sampledset domain = getspatialdomain(timestep); int numpoints = domain.getlength(); integer1dset points = new integer1dset(realtype.getrealtype("index"), numpoints); tupletype tt = getparamtype(timestep); tupletype rangetype = new tupletype(new mathtype[] { realtupletype.latitudelongitudealtitude, realtype.time, tt }); fieldimpl ff = new fieldimpl( new functiontype( ((settype) points.gettype()).getdomain(), rangetype), points); float[][] samples = timestep.getfloats(false); float[][] geovals = getearthlocationpoints((griddedset) domain); boolean islatlon = islatlonorder(domain); int latindex = islatlon ? 0 : 1; int lonindex = islatlon ? 1 : 0; boolean havealt = geovals.length > 2; float pmin = float.positive_infinity; float pmax = float.negative_infinity; int index = 0; for (int i = 0; i < numpoints; i++) { float lat = geovals[latindex][i]; float lon = geovals[lonindex][i]; float alt = havealt ? geovals[2][i] : 0; if ((lat == lat) && (lon == lon)) { if ( !(alt == alt)) { alt = 0; } if(domax && (float)timestep.getvalues(i)[0] >= pmax){ pmax = (float)timestep.getvalues(i)[0]; index = i; } else if(domin && (float)timestep.getvalues(i)[0] < pmin){ pmin = (float)timestep.getvalues(i)[0]; index = i; } } } float alt0 = havealt ? geovals[2][index] : 0; earthlocation el0 = new earthlocationlite(geovals[latindex][index], geovals[lonindex][index], alt0); pointobtuple pot = new pointobtuple(el0, dt, timestep.getsample(index), rangetype); ff.setsample(0, pot, false, false); return ff; } | oxelson/IDV | [
1,
0,
0,
0
] |
23,146 | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt, double min, double max)
throws VisADException, RemoteException {
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
if(timeStep.getValues(i)[0] >= min && timeStep.getValues(i)[0] < max) {
EarthLocation el = new EarthLocationLite(lat, lon, alt);
// TODO: make this more efficient
PointObTuple pot = new PointObTuple(el, dt,
timeStep.getSample(i), rangeType);
ff.setSample(i, pot, false, false);
}
}
}
return ff;
} | private static FieldImpl makePointObs(FlatField timeStep, DateTime dt, double min, double max)
throws VisADException, RemoteException {
if (timeStep == null) {
return null;
}
SampledSet domain = getSpatialDomain(timeStep);
int numPoints = domain.getLength();
Integer1DSet points = new Integer1DSet(RealType.getRealType("index"),
numPoints);
TupleType tt = getParamType(timeStep);
TupleType rangeType = new TupleType(new MathType[] {
RealTupleType.LatitudeLongitudeAltitude,
RealType.Time,
tt });
FieldImpl ff = new FieldImpl(
new FunctionType(
((SetType) points.getType()).getDomain(),
rangeType), points);
float[][] samples = timeStep.getFloats(false);
float[][] geoVals = getEarthLocationPoints((GriddedSet) domain);
boolean isLatLon = isLatLonOrder(domain);
int latIndex = isLatLon
? 0
: 1;
int lonIndex = isLatLon
? 1
: 0;
boolean haveAlt = geoVals.length > 2;
for (int i = 0; i < numPoints; i++) {
float lat = geoVals[latIndex][i];
float lon = geoVals[lonIndex][i];
float alt = haveAlt
? geoVals[2][i]
: 0;
if ((lat == lat) && (lon == lon)) {
if ( !(alt == alt)) {
alt = 0;
}
if(timeStep.getValues(i)[0] >= min && timeStep.getValues(i)[0] < max) {
EarthLocation el = new EarthLocationLite(lat, lon, alt);
PointObTuple pot = new PointObTuple(el, dt,
timeStep.getSample(i), rangeType);
ff.setSample(i, pot, false, false);
}
}
}
return ff;
} | private static fieldimpl makepointobs(flatfield timestep, datetime dt, double min, double max) throws visadexception, remoteexception { if (timestep == null) { return null; } sampledset domain = getspatialdomain(timestep); int numpoints = domain.getlength(); integer1dset points = new integer1dset(realtype.getrealtype("index"), numpoints); tupletype tt = getparamtype(timestep); tupletype rangetype = new tupletype(new mathtype[] { realtupletype.latitudelongitudealtitude, realtype.time, tt }); fieldimpl ff = new fieldimpl( new functiontype( ((settype) points.gettype()).getdomain(), rangetype), points); float[][] samples = timestep.getfloats(false); float[][] geovals = getearthlocationpoints((griddedset) domain); boolean islatlon = islatlonorder(domain); int latindex = islatlon ? 0 : 1; int lonindex = islatlon ? 1 : 0; boolean havealt = geovals.length > 2; for (int i = 0; i < numpoints; i++) { float lat = geovals[latindex][i]; float lon = geovals[lonindex][i]; float alt = havealt ? geovals[2][i] : 0; if ((lat == lat) && (lon == lon)) { if ( !(alt == alt)) { alt = 0; } if(timestep.getvalues(i)[0] >= min && timestep.getvalues(i)[0] < max) { earthlocation el = new earthlocationlite(lat, lon, alt); pointobtuple pot = new pointobtuple(el, dt, timestep.getsample(i), rangetype); ff.setsample(i, pot, false, false); } } } return ff; } | oxelson/IDV | [
1,
0,
0,
0
] |
14,974 | @Nullable
@Deprecated
public static Character max(@Nullable CharSequence charSequence) {
return maxOrNull(charSequence);
} | @Nullable
@Deprecated
public static Character max(@Nullable CharSequence charSequence) {
return maxOrNull(charSequence);
} | @nullable @deprecated public static character max(@nullable charsequence charsequence) { return maxornull(charsequence); } | panpf/tools4j | [
0,
0,
0,
0
] |
14,975 | @Nullable
@Deprecated
public static <R extends Comparable<R>> Character maxBy(@Nullable CharSequence charSequence, @NotNull Transformer<Character, R> selector) {
return maxByOrNull(charSequence, selector);
} | @Nullable
@Deprecated
public static <R extends Comparable<R>> Character maxBy(@Nullable CharSequence charSequence, @NotNull Transformer<Character, R> selector) {
return maxByOrNull(charSequence, selector);
} | @nullable @deprecated public static <r extends comparable<r>> character maxby(@nullable charsequence charsequence, @notnull transformer<character, r> selector) { return maxbyornull(charsequence, selector); } | panpf/tools4j | [
0,
0,
0,
0
] |
14,976 | @Nullable
@Deprecated
public static Character min(@Nullable CharSequence charSequence) {
return minOrNull(charSequence);
} | @Nullable
@Deprecated
public static Character min(@Nullable CharSequence charSequence) {
return minOrNull(charSequence);
} | @nullable @deprecated public static character min(@nullable charsequence charsequence) { return minornull(charsequence); } | panpf/tools4j | [
0,
0,
0,
0
] |
14,977 | @Nullable
@Deprecated
public static <R extends Comparable<R>> Character minBy(@Nullable CharSequence charSequence, @NotNull Transformer<Character, R> selector) {
return minByOrNull(charSequence, selector);
} | @Nullable
@Deprecated
public static <R extends Comparable<R>> Character minBy(@Nullable CharSequence charSequence, @NotNull Transformer<Character, R> selector) {
return minByOrNull(charSequence, selector);
} | @nullable @deprecated public static <r extends comparable<r>> character minby(@nullable charsequence charsequence, @notnull transformer<character, r> selector) { return minbyornull(charsequence, selector); } | panpf/tools4j | [
0,
0,
0,
0
] |
23,182 | public static void main(String[] args) {
// TODO code application logic her
// Hola Onasis
int num = 0;
// Holaaaaa
// QQUE ONDASS
switch(num){
case 1:
case 2:
default:
}
//
} | public static void main(String[] args) {
int num = 0;
switch(num){
case 1:
case 2:
default:
}
} | public static void main(string[] args) { int num = 0; switch(num){ case 1: case 2: default: } } | monicastle/Prueba | [
0,
1,
0,
0
] |
31,571 | private Object convertNullValue(RdsTableColumnDetails theColumnDetail) {
if (theColumnDetail.isNullable()) {
return null;
}
// For integral number type things, need to convert "null" into 0.
if (theColumnDetail.isIntegralType()) {
return new Integer(0);
}
// For decimal number type things, need to convert "null" into 0.0.
if (theColumnDetail.isDecimalType()) {
return new Double(0.0);
}
return null; // What can we do in this case :(
} | private Object convertNullValue(RdsTableColumnDetails theColumnDetail) {
if (theColumnDetail.isNullable()) {
return null;
}
if (theColumnDetail.isIntegralType()) {
return new Integer(0);
}
if (theColumnDetail.isDecimalType()) {
return new Double(0.0);
}
return null;
} | private object convertnullvalue(rdstablecolumndetails thecolumndetail) { if (thecolumndetail.isnullable()) { return null; } if (thecolumndetail.isintegraltype()) { return new integer(0); } if (thecolumndetail.isdecimaltype()) { return new double(0.0); } return null; } | ogreflow/ogre | [
1,
0,
0,
0
] |
23,738 | @Override
public void validateStatusOnUpdate(Post post) {
// TODO Implement a method
} | @Override
public void validateStatusOnUpdate(Post post) {
} | @override public void validatestatusonupdate(post post) { } | reckue/note-api | [
0,
1,
0,
0
] |
32,092 | private boolean validParameters(List<String> parameters) {
// TODO: throw a parsing error exception on the YAML parser when params size is less than 2.
return CollectionUtils.isNotEmpty(parameters) && parameters.size() >= 2;
} | private boolean validParameters(List<String> parameters) {
return CollectionUtils.isNotEmpty(parameters) && parameters.size() >= 2;
} | private boolean validparameters(list<string> parameters) { return collectionutils.isnotempty(parameters) && parameters.size() >= 2; } | ngouagna/alien4cloud-cloudify2-provider | [
0,
1,
0,
0
] |
15,833 | private String get_location ()
{
final StackTraceElement[] stackTrace = new Throwable ().getStackTrace ();
for (int i = 2 /* skip this+serviceChanged */; i < stackTrace.length; i++)
{
StackTraceElement ste = stackTrace [i];
String class_name = ste.getClassName ();
if (!class_name.startsWith ("org.apache.felix.framework.") // Skip framework (todo: add more fws)
&& !class_name.equals (this.getClass ().getName ())) // Skip ourselves
{
return (ste.toString ());
}
}
return ("StackTraceElement:Unknown");
} | private String get_location ()
{
final StackTraceElement[] stackTrace = new Throwable ().getStackTrace ();
for (int i = 2; i < stackTrace.length; i++)
{
StackTraceElement ste = stackTrace [i];
String class_name = ste.getClassName ();
if (!class_name.startsWith ("org.apache.felix.framework.")
&& !class_name.equals (this.getClass ().getName ()))
{
return (ste.toString ());
}
}
return ("StackTraceElement:Unknown");
} | private string get_location () { final stacktraceelement[] stacktrace = new throwable ().getstacktrace (); for (int i = 2; i < stacktrace.length; i++) { stacktraceelement ste = stacktrace [i]; string class_name = ste.getclassname (); if (!class_name.startswith ("org.apache.felix.framework.") && !class_name.equals (this.getclass ().getname ())) { return (ste.tostring ()); } } return ("stacktraceelement:unknown"); } | neoautus/lucidj-core | [
1,
0,
0,
0
] |
7,735 | public static void main(String[] args) {
// TODO(pdex): why are we making our own threadpool?
final List<ListeningExecutorService> l =
Spez.ServicePoolGenerator(THREAD_POOL, "Spanner Tailer Event Worker");
final SpannerTailer tailer = new SpannerTailer(THREAD_POOL, 200000000);
// final EventPublisher publisher = new EventPublisher(PROJECT_NAME, TOPIC_NAME);
final ThreadLocal<EventPublisher> publisher =
ThreadLocal.withInitial(
() -> {
return new EventPublisher(PROJECT_NAME, TOPIC_NAME);
});
final ExecutorService workStealingPool = Executors.newWorkStealingPool();
final ListeningExecutorService forkJoinPool =
MoreExecutors.listeningDecorator(workStealingPool);
final Map<String, String> metadata = new HashMap<>();
final CountDownLatch doneSignal = new CountDownLatch(1);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
// Populate CDC Metadata
metadata.put("SrcDatabase", DB_NAME);
metadata.put("SrcTablename", TABLE_NAME);
metadata.put("DstTopic", TOPIC_NAME);
final ListenableFuture<SchemaSet> schemaSetFuture =
tailer.getSchema(PROJECT_NAME, INSTANCE_NAME, DB_NAME, TABLE_NAME);
Futures.addCallback(
schemaSetFuture,
new FutureCallback<SchemaSet>() {
@Override
public void onSuccess(SchemaSet schemaSet) {
log.info("Successfully Processed the Table Schema. Starting the poller now ...");
if (DISRUPTOR) {
DisruptorHandler handler =
new DisruptorHandler(schemaSet, publisher, metadata, l.get(0));
handler.start();
tailer.setRingBuffer(handler.getRingBuffer());
ScheduledFuture<?> result =
tailer.start(
2,
500,
PROJECT_NAME,
INSTANCE_NAME,
DB_NAME,
TABLE_NAME,
"lpts_table",
schemaSet.tsColName(),
"2000");
doneSignal.countDown();
} else {
WorkStealingHandler handler =
new WorkStealingHandler(scheduler, schemaSet, publisher, metadata);
tailer.start(
handler,
schemaSet.tsColName(),
l.size(),
THREAD_POOL,
500,
PROJECT_NAME,
INSTANCE_NAME,
DB_NAME,
TABLE_NAME,
"lpts_table",
"2000",
500,
500);
scheduler.scheduleAtFixedRate(
() -> {
handler.logStats();
tailer.logStats();
},
30,
30,
TimeUnit.SECONDS);
doneSignal.countDown();
}
}
@Override
public void onFailure(Throwable t) {
log.error("Unable to process schema", t);
System.exit(-1);
}
},
l.get(l.size() % THREAD_POOL));
try {
log.debug("Dumping all known Loggers");
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
java.util.Iterator<ch.qos.logback.classic.Logger> it = lc.getLoggerList().iterator();
while (it.hasNext()) {
ch.qos.logback.classic.Logger thisLog = it.next();
log.debug("name: {} status: {}", thisLog.getName(), thisLog.getLevel());
}
log.info("waiting for doneSignal");
doneSignal.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
} | public static void main(String[] args) {
final List<ListeningExecutorService> l =
Spez.ServicePoolGenerator(THREAD_POOL, "Spanner Tailer Event Worker");
final SpannerTailer tailer = new SpannerTailer(THREAD_POOL, 200000000);
final ThreadLocal<EventPublisher> publisher =
ThreadLocal.withInitial(
() -> {
return new EventPublisher(PROJECT_NAME, TOPIC_NAME);
});
final ExecutorService workStealingPool = Executors.newWorkStealingPool();
final ListeningExecutorService forkJoinPool =
MoreExecutors.listeningDecorator(workStealingPool);
final Map<String, String> metadata = new HashMap<>();
final CountDownLatch doneSignal = new CountDownLatch(1);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
metadata.put("SrcDatabase", DB_NAME);
metadata.put("SrcTablename", TABLE_NAME);
metadata.put("DstTopic", TOPIC_NAME);
final ListenableFuture<SchemaSet> schemaSetFuture =
tailer.getSchema(PROJECT_NAME, INSTANCE_NAME, DB_NAME, TABLE_NAME);
Futures.addCallback(
schemaSetFuture,
new FutureCallback<SchemaSet>() {
@Override
public void onSuccess(SchemaSet schemaSet) {
log.info("Successfully Processed the Table Schema. Starting the poller now ...");
if (DISRUPTOR) {
DisruptorHandler handler =
new DisruptorHandler(schemaSet, publisher, metadata, l.get(0));
handler.start();
tailer.setRingBuffer(handler.getRingBuffer());
ScheduledFuture<?> result =
tailer.start(
2,
500,
PROJECT_NAME,
INSTANCE_NAME,
DB_NAME,
TABLE_NAME,
"lpts_table",
schemaSet.tsColName(),
"2000");
doneSignal.countDown();
} else {
WorkStealingHandler handler =
new WorkStealingHandler(scheduler, schemaSet, publisher, metadata);
tailer.start(
handler,
schemaSet.tsColName(),
l.size(),
THREAD_POOL,
500,
PROJECT_NAME,
INSTANCE_NAME,
DB_NAME,
TABLE_NAME,
"lpts_table",
"2000",
500,
500);
scheduler.scheduleAtFixedRate(
() -> {
handler.logStats();
tailer.logStats();
},
30,
30,
TimeUnit.SECONDS);
doneSignal.countDown();
}
}
@Override
public void onFailure(Throwable t) {
log.error("Unable to process schema", t);
System.exit(-1);
}
},
l.get(l.size() % THREAD_POOL));
try {
log.debug("Dumping all known Loggers");
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
java.util.Iterator<ch.qos.logback.classic.Logger> it = lc.getLoggerList().iterator();
while (it.hasNext()) {
ch.qos.logback.classic.Logger thisLog = it.next();
log.debug("name: {} status: {}", thisLog.getName(), thisLog.getLevel());
}
log.info("waiting for doneSignal");
doneSignal.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
} | public static void main(string[] args) { final list<listeningexecutorservice> l = spez.servicepoolgenerator(thread_pool, "spanner tailer event worker"); final spannertailer tailer = new spannertailer(thread_pool, 200000000); final threadlocal<eventpublisher> publisher = threadlocal.withinitial( () -> { return new eventpublisher(project_name, topic_name); }); final executorservice workstealingpool = executors.newworkstealingpool(); final listeningexecutorservice forkjoinpool = moreexecutors.listeningdecorator(workstealingpool); final map<string, string> metadata = new hashmap<>(); final countdownlatch donesignal = new countdownlatch(1); final scheduledexecutorservice scheduler = executors.newscheduledthreadpool(1); metadata.put("srcdatabase", db_name); metadata.put("srctablename", table_name); metadata.put("dsttopic", topic_name); final listenablefuture<schemaset> schemasetfuture = tailer.getschema(project_name, instance_name, db_name, table_name); futures.addcallback( schemasetfuture, new futurecallback<schemaset>() { @override public void onsuccess(schemaset schemaset) { log.info("successfully processed the table schema. starting the poller now ..."); if (disruptor) { disruptorhandler handler = new disruptorhandler(schemaset, publisher, metadata, l.get(0)); handler.start(); tailer.setringbuffer(handler.getringbuffer()); scheduledfuture<?> result = tailer.start( 2, 500, project_name, instance_name, db_name, table_name, "lpts_table", schemaset.tscolname(), "2000"); donesignal.countdown(); } else { workstealinghandler handler = new workstealinghandler(scheduler, schemaset, publisher, metadata); tailer.start( handler, schemaset.tscolname(), l.size(), thread_pool, 500, project_name, instance_name, db_name, table_name, "lpts_table", "2000", 500, 500); scheduler.scheduleatfixedrate( () -> { handler.logstats(); tailer.logstats(); }, 30, 30, timeunit.seconds); donesignal.countdown(); } } @override public void onfailure(throwable t) { log.error("unable to process schema", t); system.exit(-1); } }, l.get(l.size() % thread_pool)); try { log.debug("dumping all known loggers"); loggercontext lc = (loggercontext) loggerfactory.getiloggerfactory(); java.util.iterator<ch.qos.logback.classic.logger> it = lc.getloggerlist().iterator(); while (it.hasnext()) { ch.qos.logback.classic.logger thislog = it.next(); log.debug("name: {} status: {}", thislog.getname(), thislog.getlevel()); } log.info("waiting for donesignal"); donesignal.await(); } catch (interruptedexception e) { e.printstacktrace(); } } | olavloite/spez2 | [
1,
0,
0,
0
] |
24,154 | @Test
public void trouble_maker() {
Mono<String> trouble = null; //todo: change this line
StepVerifier.create(trouble)
.expectError(IllegalStateException.class)
.verify();
} | @Test
public void trouble_maker() {
Mono<String> trouble = null;
StepVerifier.create(trouble)
.expectError(IllegalStateException.class)
.verify();
} | @test public void trouble_maker() { mono<string> trouble = null; stepverifier.create(trouble) .expecterror(illegalstateexception.class) .verify(); } | nicolasbelfis/practical-reactor | [
1,
0,
0,
0
] |
24,160 | @Test
public void repeat() {
AtomicInteger counter = new AtomicInteger(0);
Flux<Integer> repeated = null; //todo: change this line
System.out.println("Repeat: ");
StepVerifier.create(repeated.doOnNext(System.out::println))
.expectNext(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.verifyComplete();
} | @Test
public void repeat() {
AtomicInteger counter = new AtomicInteger(0);
Flux<Integer> repeated = null;
System.out.println("Repeat: ");
StepVerifier.create(repeated.doOnNext(System.out::println))
.expectNext(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.verifyComplete();
} | @test public void repeat() { atomicinteger counter = new atomicinteger(0); flux<integer> repeated = null; system.out.println("repeat: "); stepverifier.create(repeated.doonnext(system.out::println)) .expectnext(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) .verifycomplete(); } | nicolasbelfis/practical-reactor | [
1,
0,
0,
0
] |
7,823 | private static double EvalSegmentedFn(final cmsToneCurve g, double R)
{
int i;
for (i = g.nSegments-1; i >= 0 ; --i)
{
// Check for domain
if ((R > g.Segments[i].x0) && (R <= g.Segments[i].x1))
{
// Type == 0 means segment is sampled
if (g.Segments[i].Type == 0)
{
float R1 = (float)(R - g.Segments[i].x0);
float[] Out = new float[1];
// Setup the table (TODO: clean that)
g.SegInterp[i].Table = g.Segments[i].SampledPoints;
g.SegInterp[i].Interpolation.getFloat().run(new float[]{R1}, Out, g.SegInterp[i]);
return Out[0];
}
else
{
return g.Evals[i].run(g.Segments[i].Type, g.Segments[i].Params, R);
}
}
}
return MINUS_INF;
} | private static double EvalSegmentedFn(final cmsToneCurve g, double R)
{
int i;
for (i = g.nSegments-1; i >= 0 ; --i)
{
if ((R > g.Segments[i].x0) && (R <= g.Segments[i].x1))
{
if (g.Segments[i].Type == 0)
{
float R1 = (float)(R - g.Segments[i].x0);
float[] Out = new float[1];
g.SegInterp[i].Table = g.Segments[i].SampledPoints;
g.SegInterp[i].Interpolation.getFloat().run(new float[]{R1}, Out, g.SegInterp[i]);
return Out[0];
}
else
{
return g.Evals[i].run(g.Segments[i].Type, g.Segments[i].Params, R);
}
}
}
return MINUS_INF;
} | private static double evalsegmentedfn(final cmstonecurve g, double r) { int i; for (i = g.nsegments-1; i >= 0 ; --i) { if ((r > g.segments[i].x0) && (r <= g.segments[i].x1)) { if (g.segments[i].type == 0) { float r1 = (float)(r - g.segments[i].x0); float[] out = new float[1]; g.seginterp[i].table = g.segments[i].sampledpoints; g.seginterp[i].interpolation.getfloat().run(new float[]{r1}, out, g.seginterp[i]); return out[0]; } else { return g.evals[i].run(g.segments[i].type, g.segments[i].params, r); } } } return minus_inf; } | rcmaniac25/little-cms-for-blackberry | [
1,
0,
0,
0
] |
16,061 | @Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData,
final ExecutionContext exec) throws Exception {
DataColumnSpec[] allColSpecs = new DataColumnSpec[1];
allColSpecs[0] =
new DataColumnSpecCreator("url", StringCell.TYPE).createSpec();
DataTableSpec outputSpec = new DataTableSpec(allColSpecs);
// System.out.println("calling execute"+resultUrl.getStringValue());
// the execution context will provide us with storage capacity, in this
// case a data container to which we will add rows sequentially
// Note, this container can also handle arbitrary big data tables, it
// will buffer to disc if necessary.
BufferedDataContainer container = exec.createDataContainer(outputSpec);
DataCell[] cells = new DataCell[1];
String urlTemplate = resultUrl.getStringValue();
Iterator<DataRow> varIt = inData[0].iterator();
DataTableSpec dts = inData[0].getDataTableSpec();
String[] colNames = dts.getColumnNames();
if(varIt.hasNext()){
//while(varIt.hasNext()){
DataRow current = varIt.next();
Iterator<DataCell> cellIt = current.iterator();
int colCount = 0;
while(cellIt.hasNext()){
DataCell currentCell= cellIt.next();
String curVar = colNames[colCount];
String curVal = currentCell.toString();
//System.out.println("curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(urlTemplate.indexOf(curVar)==-1){
System.out.println("1curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
urlTemplate = urlTemplate+"&"+curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8");
System.out.println("2curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
}else{
System.out.println("3curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
String configValue = "";
if(urlTemplate.indexOf("&"+curVar)!=-1){
configValue = urlTemplate.substring(urlTemplate.indexOf("&"+curVar)+curVar.length()+2,urlTemplate.indexOf("]",urlTemplate.indexOf(curVar)));
}else if(urlTemplate.indexOf("?"+curVar)!=-1){
configValue = urlTemplate.substring(urlTemplate.indexOf("?"+curVar)+curVar.length()+2,urlTemplate.indexOf("]",urlTemplate.indexOf(curVar)));
}
System.out.println("4curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(configValue.endsWith("&")){
configValue = configValue.substring(0,configValue.length()-1); //get rid of the &
}
if(colCount==0){
urlTemplate +="&";
}
System.out.println("5curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(!configValue.equals("")){
System.out.println("6curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
System.out.println("config var: "+curVar+" with configValue:"+ configValue+ ", is overwritten with value: "+currentCell.toString() );
urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"&]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8")+"&");
urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8"));
}else{
//System.out.println("now we are here");
System.out.println("7curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(urlTemplate.endsWith("&")||urlTemplate.endsWith("?")){
//urlTemplate = urlTemplate.substring(0,urlTemplate.length()-1);
urlTemplate = urlTemplate+curVar+"="+URLEncoder.encode(curVal,"UTF-8");
}else{
urlTemplate = "&"+urlTemplate+curVar+"="+URLEncoder.encode(curVal,"UTF-8");
}
//urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"&]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8")+"&");
//urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8"));
//System.out.println("urlTemplate before:"+doubleStr+", and after:"+urlTemplate);
}
}
colCount++;
}
}
System.out.println("yy:"+urlTemplate);
//urlTemplate =urlTemplate.replaceAll("\\[.*?\\]", "");
urlTemplate =urlTemplate.replaceAll("\\[", "");
urlTemplate =urlTemplate.replaceAll("\\]", "");
urlTemplate =urlTemplate.replaceAll("%28", "(");
urlTemplate =urlTemplate.replaceAll("%29", ")");
//urlTemplate =urlTemplate.replaceAll("%5B", "["); //strange that this is needed....API issue?
//urlTemplate =urlTemplate.replaceAll("%5D", "]");
System.out.println("yy:"+urlTemplate);
if(urlTemplate.endsWith("&")){
urlTemplate = urlTemplate.substring(0,urlTemplate.length()-1);
}
//urlTemplate = urlTemplate.substring(0, urlTemplate.length()-1);
//System.out.println("urlTemplate became "+urlTemplate);
cells[0] = new StringCell(urlTemplate);
DataRow row = new DefaultRow("aboutCell", cells);
container.addRowToTable(row);
container.close();
BufferedDataTable out = container.getTable();
// TODO: Return a BufferedDataTable for each output port
return new BufferedDataTable[]{out};
} | @Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData,
final ExecutionContext exec) throws Exception {
DataColumnSpec[] allColSpecs = new DataColumnSpec[1];
allColSpecs[0] =
new DataColumnSpecCreator("url", StringCell.TYPE).createSpec();
DataTableSpec outputSpec = new DataTableSpec(allColSpecs);
BufferedDataContainer container = exec.createDataContainer(outputSpec);
DataCell[] cells = new DataCell[1];
String urlTemplate = resultUrl.getStringValue();
Iterator<DataRow> varIt = inData[0].iterator();
DataTableSpec dts = inData[0].getDataTableSpec();
String[] colNames = dts.getColumnNames();
if(varIt.hasNext()){
DataRow current = varIt.next();
Iterator<DataCell> cellIt = current.iterator();
int colCount = 0;
while(cellIt.hasNext()){
DataCell currentCell= cellIt.next();
String curVar = colNames[colCount];
String curVal = currentCell.toString();
if(urlTemplate.indexOf(curVar)==-1){
System.out.println("1curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
urlTemplate = urlTemplate+"&"+curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8");
System.out.println("2curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
}else{
System.out.println("3curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
String configValue = "";
if(urlTemplate.indexOf("&"+curVar)!=-1){
configValue = urlTemplate.substring(urlTemplate.indexOf("&"+curVar)+curVar.length()+2,urlTemplate.indexOf("]",urlTemplate.indexOf(curVar)));
}else if(urlTemplate.indexOf("?"+curVar)!=-1){
configValue = urlTemplate.substring(urlTemplate.indexOf("?"+curVar)+curVar.length()+2,urlTemplate.indexOf("]",urlTemplate.indexOf(curVar)));
}
System.out.println("4curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(configValue.endsWith("&")){
configValue = configValue.substring(0,configValue.length()-1);
}
if(colCount==0){
urlTemplate +="&";
}
System.out.println("5curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(!configValue.equals("")){
System.out.println("6curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
System.out.println("config var: "+curVar+" with configValue:"+ configValue+ ", is overwritten with value: "+currentCell.toString() );
urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"&]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8")+"&");
urlTemplate = urlTemplate.replaceAll(Pattern.quote("["+curVar+"="+configValue+"]"), curVar+"="+URLEncoder.encode(currentCell.toString(),"UTF-8"));
}else{
System.out.println("7curvar:"+curVar+", curval:"+curVal+", urlTemplate:"+urlTemplate);
if(urlTemplate.endsWith("&")||urlTemplate.endsWith("?")){
urlTemplate = urlTemplate+curVar+"="+URLEncoder.encode(curVal,"UTF-8");
}else{
urlTemplate = "&"+urlTemplate+curVar+"="+URLEncoder.encode(curVal,"UTF-8");
}
}
}
colCount++;
}
}
System.out.println("yy:"+urlTemplate);
urlTemplate =urlTemplate.replaceAll("\\[", "");
urlTemplate =urlTemplate.replaceAll("\\]", "");
urlTemplate =urlTemplate.replaceAll("%28", "(");
urlTemplate =urlTemplate.replaceAll("%29", ")");
System.out.println("yy:"+urlTemplate);
if(urlTemplate.endsWith("&")){
urlTemplate = urlTemplate.substring(0,urlTemplate.length()-1);
}
cells[0] = new StringCell(urlTemplate);
DataRow row = new DefaultRow("aboutCell", cells);
container.addRowToTable(row);
container.close();
BufferedDataTable out = container.getTable();
return new BufferedDataTable[]{out};
} | @override protected buffereddatatable[] execute(final buffereddatatable[] indata, final executioncontext exec) throws exception { datacolumnspec[] allcolspecs = new datacolumnspec[1]; allcolspecs[0] = new datacolumnspeccreator("url", stringcell.type).createspec(); datatablespec outputspec = new datatablespec(allcolspecs); buffereddatacontainer container = exec.createdatacontainer(outputspec); datacell[] cells = new datacell[1]; string urltemplate = resulturl.getstringvalue(); iterator<datarow> varit = indata[0].iterator(); datatablespec dts = indata[0].getdatatablespec(); string[] colnames = dts.getcolumnnames(); if(varit.hasnext()){ datarow current = varit.next(); iterator<datacell> cellit = current.iterator(); int colcount = 0; while(cellit.hasnext()){ datacell currentcell= cellit.next(); string curvar = colnames[colcount]; string curval = currentcell.tostring(); if(urltemplate.indexof(curvar)==-1){ system.out.println("1curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); urltemplate = urltemplate+"&"+curvar+"="+urlencoder.encode(currentcell.tostring(),"utf-8"); system.out.println("2curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); }else{ system.out.println("3curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); string configvalue = ""; if(urltemplate.indexof("&"+curvar)!=-1){ configvalue = urltemplate.substring(urltemplate.indexof("&"+curvar)+curvar.length()+2,urltemplate.indexof("]",urltemplate.indexof(curvar))); }else if(urltemplate.indexof("?"+curvar)!=-1){ configvalue = urltemplate.substring(urltemplate.indexof("?"+curvar)+curvar.length()+2,urltemplate.indexof("]",urltemplate.indexof(curvar))); } system.out.println("4curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); if(configvalue.endswith("&")){ configvalue = configvalue.substring(0,configvalue.length()-1); } if(colcount==0){ urltemplate +="&"; } system.out.println("5curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); if(!configvalue.equals("")){ system.out.println("6curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); system.out.println("config var: "+curvar+" with configvalue:"+ configvalue+ ", is overwritten with value: "+currentcell.tostring() ); urltemplate = urltemplate.replaceall(pattern.quote("["+curvar+"="+configvalue+"&]"), curvar+"="+urlencoder.encode(currentcell.tostring(),"utf-8")+"&"); urltemplate = urltemplate.replaceall(pattern.quote("["+curvar+"="+configvalue+"]"), curvar+"="+urlencoder.encode(currentcell.tostring(),"utf-8")); }else{ system.out.println("7curvar:"+curvar+", curval:"+curval+", urltemplate:"+urltemplate); if(urltemplate.endswith("&")||urltemplate.endswith("?")){ urltemplate = urltemplate+curvar+"="+urlencoder.encode(curval,"utf-8"); }else{ urltemplate = "&"+urltemplate+curvar+"="+urlencoder.encode(curval,"utf-8"); } } } colcount++; } } system.out.println("yy:"+urltemplate); urltemplate =urltemplate.replaceall("\\[", ""); urltemplate =urltemplate.replaceall("\\]", ""); urltemplate =urltemplate.replaceall("%28", "("); urltemplate =urltemplate.replaceall("%29", ")"); system.out.println("yy:"+urltemplate); if(urltemplate.endswith("&")){ urltemplate = urltemplate.substring(0,urltemplate.length()-1); } cells[0] = new stringcell(urltemplate); datarow row = new defaultrow("aboutcell", cells); container.addrowtotable(row); container.close(); buffereddatatable out = container.gettable(); return new buffereddatatable[]{out}; } | openphacts/OPS-Knime | [
0,
1,
1,
0
] |
16,088 | private void parseNamespaceOrTypes() throws FeatureNotSupportedException {
while (!curtok.equals(EOF)) {
// todo: account for assembly attributes
parsePossibleAttributes(true);
if (curAttributes.size() > 0) {
for (AttributeNode an : curAttributes) {
cu.attributes.add(an);
}
curAttributes.clear();
}
// can be usingDirectives, globalAttribs, or NamespaceMembersDecls
// NamespaceMembersDecls include namespaces, class, struct, interface, enum, delegate
switch (curtok.id) {
case TokenID.Using:
// using directive
parseUsingDirectives();
break;
case TokenID.New:
case TokenID.Public:
case TokenID.Protected:
case TokenID.Partial:
case TokenID.Static:
case TokenID.Internal:
case TokenID.Private:
case TokenID.Abstract:
case TokenID.Sealed:
//parseTypeModifier();
curmods |= modMap.get(curtok.id);
advance();
break;
case TokenID.Namespace:
parseNamespace();
break;
case TokenID.Class:
parseClass();
break;
case TokenID.Struct:
parseStruct();
break;
case TokenID.Interface:
parseInterface();
break;
case TokenID.Enum:
parseEnum();
break;
case TokenID.Delegate:
parseDelegate();
break;
case TokenID.Semi:
advance();
break;
default:
return;
}
}
} | private void parseNamespaceOrTypes() throws FeatureNotSupportedException {
while (!curtok.equals(EOF)) {
parsePossibleAttributes(true);
if (curAttributes.size() > 0) {
for (AttributeNode an : curAttributes) {
cu.attributes.add(an);
}
curAttributes.clear();
}
switch (curtok.id) {
case TokenID.Using:
parseUsingDirectives();
break;
case TokenID.New:
case TokenID.Public:
case TokenID.Protected:
case TokenID.Partial:
case TokenID.Static:
case TokenID.Internal:
case TokenID.Private:
case TokenID.Abstract:
case TokenID.Sealed:
curmods |= modMap.get(curtok.id);
advance();
break;
case TokenID.Namespace:
parseNamespace();
break;
case TokenID.Class:
parseClass();
break;
case TokenID.Struct:
parseStruct();
break;
case TokenID.Interface:
parseInterface();
break;
case TokenID.Enum:
parseEnum();
break;
case TokenID.Delegate:
parseDelegate();
break;
case TokenID.Semi:
advance();
break;
default:
return;
}
}
} | private void parsenamespaceortypes() throws featurenotsupportedexception { while (!curtok.equals(eof)) { parsepossibleattributes(true); if (curattributes.size() > 0) { for (attributenode an : curattributes) { cu.attributes.add(an); } curattributes.clear(); } switch (curtok.id) { case tokenid.using: parseusingdirectives(); break; case tokenid.new: case tokenid.public: case tokenid.protected: case tokenid.partial: case tokenid.static: case tokenid.internal: case tokenid.private: case tokenid.abstract: case tokenid.sealed: curmods |= modmap.get(curtok.id); advance(); break; case tokenid.namespace: parsenamespace(); break; case tokenid.class: parseclass(); break; case tokenid.struct: parsestruct(); break; case tokenid.interface: parseinterface(); break; case tokenid.enum: parseenum(); break; case tokenid.delegate: parsedelegate(); break; case tokenid.semi: advance(); break; default: return; } } } | osoco/comprendiendo-software-creando-herramientas | [
0,
1,
0,
0
] |
16,089 | private PPNode parsePreprocessorDirective() throws FeatureNotSupportedException {
PPNode result = null;
int startLine = lineCount;
inPPDirective = true;
advance(); // over hash
IdentifierExpression ie = parseIdentifierOrKeyword();
String ppKind = ie.Identifier[0];
byte id = PreprocessorID.Empty;
if (preprocessor.containsKey(ppKind)) {
id = preprocessor.get(ppKind);
} else {
ReportError("Preprocessor directive must be valid identifier, rather than \"" + ppKind + "\".");
}
switch (id) {
case PreprocessorID.Define:
// conditional-symbol pp-newline
IdentifierExpression def = parseIdentifierOrKeyword();
if (!ppDefs.containsKey(def.Identifier[0])) {
ppDefs.put(def.Identifier[0], PreprocessorID.Empty);
}
result = new PPDefineNode(def);
break;
case PreprocessorID.Undef:
// conditional-symbol pp-newline
IdentifierExpression undef = parseIdentifierOrKeyword();
if (ppDefs.containsKey(undef.Identifier[0])) {
ppDefs.remove(undef.Identifier[0]);
}
result = new PPDefineNode(undef);
break;
case PreprocessorID.If:
// pp-expression pp-newline conditional-section(opt)
if (curtok.id == TokenID.LParen) {
advance();
}
int startCount = lineCount;
ppCondition = false;
// todo: account for true, false, ||, &&, ==, !=, !
IdentifierExpression ifexpr = parseIdentifierOrKeyword();
if (ppDefs.containsKey(ifexpr.Identifier[0])) {
ppCondition = true;
}
//result = new PPIfNode(ParseExpressionToNewline());
if (curtok.id == TokenID.RParen) {
advance();
}
if (ppCondition == false) {
// skip this block
SkipToElseOrEndIf();
}
break;
case PreprocessorID.Elif:
// pp-expression pp-newline conditional-section(opt)
SkipToEOL(startLine);
break;
case PreprocessorID.Else:
// pp-newline conditional-section(opt)
if (ppCondition == true) {
// skip this block
SkipToElseOrEndIf();
}
break;
case PreprocessorID.Endif:
// pp-newline
result = new PPEndIfNode();
ppCondition = false;
break;
case PreprocessorID.Line:
// line-indicator pp-newline
SkipToEOL(startLine);
break;
case PreprocessorID.Error:
// pp-message
SkipToEOL(startLine);
break;
case PreprocessorID.Warning:
// pp-message
SkipToEOL(startLine);
break;
case PreprocessorID.Region:
// pp-message
SkipToEOL(startLine);
break;
case PreprocessorID.Endregion:
// pp-message
SkipToEOL(startLine);
break;
case PreprocessorID.Pragma:
// pp-message
SkipToEOL(startLine);
break;
default:
break;
}
inPPDirective = false;
return result;
} | private PPNode parsePreprocessorDirective() throws FeatureNotSupportedException {
PPNode result = null;
int startLine = lineCount;
inPPDirective = true;
advance();
IdentifierExpression ie = parseIdentifierOrKeyword();
String ppKind = ie.Identifier[0];
byte id = PreprocessorID.Empty;
if (preprocessor.containsKey(ppKind)) {
id = preprocessor.get(ppKind);
} else {
ReportError("Preprocessor directive must be valid identifier, rather than \"" + ppKind + "\".");
}
switch (id) {
case PreprocessorID.Define:
IdentifierExpression def = parseIdentifierOrKeyword();
if (!ppDefs.containsKey(def.Identifier[0])) {
ppDefs.put(def.Identifier[0], PreprocessorID.Empty);
}
result = new PPDefineNode(def);
break;
case PreprocessorID.Undef:
IdentifierExpression undef = parseIdentifierOrKeyword();
if (ppDefs.containsKey(undef.Identifier[0])) {
ppDefs.remove(undef.Identifier[0]);
}
result = new PPDefineNode(undef);
break;
case PreprocessorID.If:
if (curtok.id == TokenID.LParen) {
advance();
}
int startCount = lineCount;
ppCondition = false;
IdentifierExpression ifexpr = parseIdentifierOrKeyword();
if (ppDefs.containsKey(ifexpr.Identifier[0])) {
ppCondition = true;
}
if (curtok.id == TokenID.RParen) {
advance();
}
if (ppCondition == false) {
SkipToElseOrEndIf();
}
break;
case PreprocessorID.Elif:
SkipToEOL(startLine);
break;
case PreprocessorID.Else:
if (ppCondition == true) {
SkipToElseOrEndIf();
}
break;
case PreprocessorID.Endif:
result = new PPEndIfNode();
ppCondition = false;
break;
case PreprocessorID.Line:
SkipToEOL(startLine);
break;
case PreprocessorID.Error:
SkipToEOL(startLine);
break;
case PreprocessorID.Warning:
SkipToEOL(startLine);
break;
case PreprocessorID.Region:
SkipToEOL(startLine);
break;
case PreprocessorID.Endregion:
SkipToEOL(startLine);
break;
case PreprocessorID.Pragma:
SkipToEOL(startLine);
break;
default:
break;
}
inPPDirective = false;
return result;
} | private ppnode parsepreprocessordirective() throws featurenotsupportedexception { ppnode result = null; int startline = linecount; inppdirective = true; advance(); identifierexpression ie = parseidentifierorkeyword(); string ppkind = ie.identifier[0]; byte id = preprocessorid.empty; if (preprocessor.containskey(ppkind)) { id = preprocessor.get(ppkind); } else { reporterror("preprocessor directive must be valid identifier, rather than \"" + ppkind + "\"."); } switch (id) { case preprocessorid.define: identifierexpression def = parseidentifierorkeyword(); if (!ppdefs.containskey(def.identifier[0])) { ppdefs.put(def.identifier[0], preprocessorid.empty); } result = new ppdefinenode(def); break; case preprocessorid.undef: identifierexpression undef = parseidentifierorkeyword(); if (ppdefs.containskey(undef.identifier[0])) { ppdefs.remove(undef.identifier[0]); } result = new ppdefinenode(undef); break; case preprocessorid.if: if (curtok.id == tokenid.lparen) { advance(); } int startcount = linecount; ppcondition = false; identifierexpression ifexpr = parseidentifierorkeyword(); if (ppdefs.containskey(ifexpr.identifier[0])) { ppcondition = true; } if (curtok.id == tokenid.rparen) { advance(); } if (ppcondition == false) { skiptoelseorendif(); } break; case preprocessorid.elif: skiptoeol(startline); break; case preprocessorid.else: if (ppcondition == true) { skiptoelseorendif(); } break; case preprocessorid.endif: result = new ppendifnode(); ppcondition = false; break; case preprocessorid.line: skiptoeol(startline); break; case preprocessorid.error: skiptoeol(startline); break; case preprocessorid.warning: skiptoeol(startline); break; case preprocessorid.region: skiptoeol(startline); break; case preprocessorid.endregion: skiptoeol(startline); break; case preprocessorid.pragma: skiptoeol(startline); break; default: break; } inppdirective = false; return result; } | osoco/comprendiendo-software-creando-herramientas | [
0,
1,
0,
0
] |
16,146 | public void create(File dstFolder, List<ExcelTable> tables, ExcelSproutOptions options) {
Random rnd = new Random(options.getRandomSeed());
//there are many tables
//we have to decide: what table to what sheet, and what sheet to what workbook
//we should not repeat tables about the same class, so there are possible groups
//maybe a workbook should be full to have the best expected.ttl from the original model
//here we calculate: what table to what workbook
List<List<ExcelTable>> workbookClusters = getWorkbookClusters(tables, rnd);
//now we have to decide which table will be in which sheet
//the cleanest way is to have a table per sheet
//TODO make this variable so more messy version are possible
List<List<List<ExcelTable>>> workbookSheetTables = new ArrayList<>();
for (List<ExcelTable> workbookTables : workbookClusters) {
List<List<ExcelTable>> sheetTables = new ArrayList<>();
for (ExcelTable tbl : workbookTables) {
List<ExcelTable> sheet = Arrays.asList(tbl);
sheetTables.add(sheet);
}
workbookSheetTables.add(sheetTables);
}
int maxDigits = String.valueOf(workbookSheetTables.size() - 1).length();
Map<List<ExcelTable>, ExcelGeneratorSheetConfig> sheetConfigMap = new HashMap<>();
//now we use the ExcelGenerator to generate the workbooks
ExcelGenerator excelGenerator = new ExcelGenerator();
for (int i = 0; i < workbookSheetTables.size(); i++) {
List<List<ExcelTable>> sheets = workbookSheetTables.get(i);
//create a config for this workbook
ExcelGeneratorWorkbookConfig workbookConf = new ExcelGeneratorWorkbookConfig();
//TODO configurable
workbookConf.setFileName("workbook.xlsx");
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConf = new ExcelGeneratorSheetConfig();
sheetConfigMap.put(sheet, sheetConf);
StringBuilder sheetNameSB = new StringBuilder();
//TODO a second table in the sheet means we maybe have to move the offset
// so that it will not overlap
for (int k = 0; k < sheet.size(); k++) {
ExcelTable excelTable = sheet.get(k);
ExcelGeneratorTableConfig tableConf = new ExcelGeneratorTableConfig();
//TODO maybe make a getSingleOrDefault method
Point offset = (Point) excelTable.getSetup().getOrDefault("offset", new Point(0, 0));
tableConf.setOffset(offset);
//draw the ExcelCell matrix from ExcelTable
tableConf.setStaticCellDrawer(d -> {
//it uses the tableConf offset
d.exceltable(excelTable, options);
});
sheetConf.getTableConfigs().add(tableConf);
//TODO if only one table with one class: add provenance sheetname -> insts a class. (for all insts)
ClassConfig classConfig = excelTable.getSetup().getOrThrow("classes", ClassConfig.class);
if (classConfig.hasLabel()) {
sheetNameSB.append(classConfig.getLabel());
} else {
throw new RuntimeException("ClassConfig should give a label to name the sheet");
}
//in one sheet multiple tables could be existing
if (k != sheet.size() - 1) {
sheetNameSB.append(" & ");
}
}
//sheet name comes from table content
sheetConf.setSheetName(sheetNameSB.toString());
workbookConf.getSheetConfigs().add(sheetConf);
}//per sheet
ExcelGeneratorResult result = excelGenerator.generate(null, workbookConf);
//System.out.println("save workbook " + i);
//no extra folder when only one workbook
File workbookFolder = workbookSheetTables.size() == 1 ? dstFolder : new File(dstFolder, String.format("%0" + maxDigits + "d", i));
result.saveExcel(workbookFolder);
//write provenance =================================================
Model expectedModel = null;
Model provenanceModel = null;
CSVPrinter provenanceCSV = null;
if (options.isWriteExpectedModel()) {
expectedModel = ModelFactory.createDefaultModel();
expectedModel.setNsPrefixes(options.getPrefixMapping());
expectedModel.setNsPrefixes(PrefixMapping.Standard);
}
if (options.isWriteProvenanceModel()) {
provenanceModel = ModelFactory.createDefaultModel();
provenanceModel.setNsPrefixes(options.getPrefixMapping());
provenanceModel.setNsPrefix("prov", PROV.NS);
provenanceModel.setNsPrefix("csvw", CSVW.NS);
provenanceModel.setNsPrefix("ss", SS.NS);
provenanceModel.setNsPrefixes(PrefixMapping.Standard);
}
if (options.isWriteProvenanceCSV()) {
try {
provenanceCSV = CSVFormat.DEFAULT.print(
new OutputStreamWriter(
new GZIPOutputStream(
new FileOutputStream(
new File(workbookFolder, "provenance.csv.gz")
))));
csvProvenanceHeader(provenanceCSV);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
//used in rdfProvenance for fast lookup reified statements
Map<Statement, Resource> stmt2res = new HashMap<>();
//need here sheetname for provenance
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConfig = sheetConfigMap.get(sheet);
for (ExcelTable table : sheet) {
for (Entry<ExcelCell, Provenance> cell2prov : table.getCellProvMap().entrySet()) {
ExcelCell cell = cell2prov.getKey();
Provenance prov = cell2prov.getValue();
if (cell.getAddress() == null) {
//this was a temporary cell created for a merge
//in TableGenerator putMultipleObjects method
continue;
}
if (prov.getStatements().isEmpty()) {
//no provenance information for this cell
continue;
}
if (options.isWriteExpectedModel()) {
expectedModel.add(prov.getStatements());
}
if (options.isWriteProvenanceModel()) {
rdfProvenance(cell, sheetConfig.getSheetName(), prov, stmt2res, provenanceModel);
}
if (options.isWriteProvenanceCSV()) {
csvProvenance(cell, sheetConfig.getSheetName(), prov, provenanceCSV, provenanceModel);
}
}
}
}
//write to files
if (options.isWriteExpectedModel()) {
File file = new File(workbookFolder, "expected.ttl.gz");
try (OutputStream os = file.getName().endsWith("gz") ? new GZIPOutputStream(new FileOutputStream(file)) : new FileOutputStream(file)) {
expectedModel.write(os, "TTL");
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteProvenanceModel()) {
File file = new File(workbookFolder, "provenance.ttl.gz");
try (OutputStream os = file.getName().endsWith("gz") ? new GZIPOutputStream(new FileOutputStream(file)) : new FileOutputStream(file)) {
provenanceModel.write(os, "TTL");
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteProvenanceCSV()) {
try {
provenanceCSV.close();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteGenerationSummaryJson()) {
//key is sheet name
JSONObject perSheetPatternUsage = new JSONObject();
JsonUtility.forceLinkedHashMap(perSheetPatternUsage);
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConfig = sheetConfigMap.get(sheet);
//count how often
Map<String, Map<Object, Integer>> pattern2value2count = new HashMap<>();
for (ExcelTable tbl : sheet) {
for (Entry<ExcelCell, Provenance> entry : tbl.getCellProvMap().entrySet()) {
//skip the ones with no prov and no address (temp cells)
if (entry.getKey().getAddress() == null || entry.getValue().getStatements().isEmpty()) {
continue;
}
for (Entry<String, Object> e : entry.getValue().getUsedPatterns().entrySet()) {
Object val = e.getValue();
if(val instanceof JSONArray) {
//because json array hash is always different
val = val.toString();
}
Map<Object, Integer> value2count = pattern2value2count.computeIfAbsent(e.getKey(), k -> new HashMap<>());
value2count.put(val, value2count.getOrDefault(val, 0) + 1);
}
}
}
JSONObject patternUsage = new JSONObject();
JsonUtility.forceLinkedHashMap(patternUsage);
List<Entry<String, Map<Object, Integer>>> pattern2value2countList = new ArrayList<>(pattern2value2count.entrySet());
pattern2value2countList.sort((a,b) -> a.getKey().compareTo(b.getKey()));
for (Entry<String, Map<Object, Integer>> pattern2value2countEntry : pattern2value2countList) {
JSONArray array = new JSONArray();
for (Entry<Object, Integer> e : pattern2value2countEntry.getValue().entrySet()) {
JSONObject v2c = new JSONObject();
JsonUtility.forceLinkedHashMap(v2c);
v2c.put("value", e.getKey());
v2c.put("count", e.getValue());
array.put(v2c);
}
patternUsage.put(pattern2value2countEntry.getKey(), array);
}
perSheetPatternUsage.put(sheetConfig.getSheetName(), patternUsage);
}
options.getGenerationSummary().put("patternUsagePerSheet", perSheetPatternUsage);
File file = new File(workbookFolder, "summary.json");
try {
FileUtils.writeStringToFile(file, options.getGenerationSummary().toString(2), StandardCharsets.UTF_8);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}//per workbook
} | public void create(File dstFolder, List<ExcelTable> tables, ExcelSproutOptions options) {
Random rnd = new Random(options.getRandomSeed());
List<List<ExcelTable>> workbookClusters = getWorkbookClusters(tables, rnd);
List<List<List<ExcelTable>>> workbookSheetTables = new ArrayList<>();
for (List<ExcelTable> workbookTables : workbookClusters) {
List<List<ExcelTable>> sheetTables = new ArrayList<>();
for (ExcelTable tbl : workbookTables) {
List<ExcelTable> sheet = Arrays.asList(tbl);
sheetTables.add(sheet);
}
workbookSheetTables.add(sheetTables);
}
int maxDigits = String.valueOf(workbookSheetTables.size() - 1).length();
Map<List<ExcelTable>, ExcelGeneratorSheetConfig> sheetConfigMap = new HashMap<>();
ExcelGenerator excelGenerator = new ExcelGenerator();
for (int i = 0; i < workbookSheetTables.size(); i++) {
List<List<ExcelTable>> sheets = workbookSheetTables.get(i);
ExcelGeneratorWorkbookConfig workbookConf = new ExcelGeneratorWorkbookConfig();
workbookConf.setFileName("workbook.xlsx");
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConf = new ExcelGeneratorSheetConfig();
sheetConfigMap.put(sheet, sheetConf);
StringBuilder sheetNameSB = new StringBuilder();
for (int k = 0; k < sheet.size(); k++) {
ExcelTable excelTable = sheet.get(k);
ExcelGeneratorTableConfig tableConf = new ExcelGeneratorTableConfig();
Point offset = (Point) excelTable.getSetup().getOrDefault("offset", new Point(0, 0));
tableConf.setOffset(offset);
tableConf.setStaticCellDrawer(d -> {
d.exceltable(excelTable, options);
});
sheetConf.getTableConfigs().add(tableConf);
ClassConfig classConfig = excelTable.getSetup().getOrThrow("classes", ClassConfig.class);
if (classConfig.hasLabel()) {
sheetNameSB.append(classConfig.getLabel());
} else {
throw new RuntimeException("ClassConfig should give a label to name the sheet");
}
if (k != sheet.size() - 1) {
sheetNameSB.append(" & ");
}
}
sheetConf.setSheetName(sheetNameSB.toString());
workbookConf.getSheetConfigs().add(sheetConf);
ExcelGeneratorResult result = excelGenerator.generate(null, workbookConf);
File workbookFolder = workbookSheetTables.size() == 1 ? dstFolder : new File(dstFolder, String.format("%0" + maxDigits + "d", i));
result.saveExcel(workbookFolder);
Model expectedModel = null;
Model provenanceModel = null;
CSVPrinter provenanceCSV = null;
if (options.isWriteExpectedModel()) {
expectedModel = ModelFactory.createDefaultModel();
expectedModel.setNsPrefixes(options.getPrefixMapping());
expectedModel.setNsPrefixes(PrefixMapping.Standard);
}
if (options.isWriteProvenanceModel()) {
provenanceModel = ModelFactory.createDefaultModel();
provenanceModel.setNsPrefixes(options.getPrefixMapping());
provenanceModel.setNsPrefix("prov", PROV.NS);
provenanceModel.setNsPrefix("csvw", CSVW.NS);
provenanceModel.setNsPrefix("ss", SS.NS);
provenanceModel.setNsPrefixes(PrefixMapping.Standard);
}
if (options.isWriteProvenanceCSV()) {
try {
provenanceCSV = CSVFormat.DEFAULT.print(
new OutputStreamWriter(
new GZIPOutputStream(
new FileOutputStream(
new File(workbookFolder, "provenance.csv.gz")
))));
csvProvenanceHeader(provenanceCSV);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
Map<Statement, Resource> stmt2res = new HashMap<>();
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConfig = sheetConfigMap.get(sheet);
for (ExcelTable table : sheet) {
for (Entry<ExcelCell, Provenance> cell2prov : table.getCellProvMap().entrySet()) {
ExcelCell cell = cell2prov.getKey();
Provenance prov = cell2prov.getValue();
if (cell.getAddress() == null) {
continue;
}
if (prov.getStatements().isEmpty()) {
continue;
}
if (options.isWriteExpectedModel()) {
expectedModel.add(prov.getStatements());
}
if (options.isWriteProvenanceModel()) {
rdfProvenance(cell, sheetConfig.getSheetName(), prov, stmt2res, provenanceModel);
}
if (options.isWriteProvenanceCSV()) {
csvProvenance(cell, sheetConfig.getSheetName(), prov, provenanceCSV, provenanceModel);
}
}
}
}
if (options.isWriteExpectedModel()) {
File file = new File(workbookFolder, "expected.ttl.gz");
try (OutputStream os = file.getName().endsWith("gz") ? new GZIPOutputStream(new FileOutputStream(file)) : new FileOutputStream(file)) {
expectedModel.write(os, "TTL");
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteProvenanceModel()) {
File file = new File(workbookFolder, "provenance.ttl.gz");
try (OutputStream os = file.getName().endsWith("gz") ? new GZIPOutputStream(new FileOutputStream(file)) : new FileOutputStream(file)) {
provenanceModel.write(os, "TTL");
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteProvenanceCSV()) {
try {
provenanceCSV.close();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
if (options.isWriteGenerationSummaryJson()) {
JSONObject perSheetPatternUsage = new JSONObject();
JsonUtility.forceLinkedHashMap(perSheetPatternUsage);
for (List<ExcelTable> sheet : sheets) {
ExcelGeneratorSheetConfig sheetConfig = sheetConfigMap.get(sheet);
Map<String, Map<Object, Integer>> pattern2value2count = new HashMap<>();
for (ExcelTable tbl : sheet) {
for (Entry<ExcelCell, Provenance> entry : tbl.getCellProvMap().entrySet()) {
if (entry.getKey().getAddress() == null || entry.getValue().getStatements().isEmpty()) {
continue;
}
for (Entry<String, Object> e : entry.getValue().getUsedPatterns().entrySet()) {
Object val = e.getValue();
if(val instanceof JSONArray) {
val = val.toString();
}
Map<Object, Integer> value2count = pattern2value2count.computeIfAbsent(e.getKey(), k -> new HashMap<>());
value2count.put(val, value2count.getOrDefault(val, 0) + 1);
}
}
}
JSONObject patternUsage = new JSONObject();
JsonUtility.forceLinkedHashMap(patternUsage);
List<Entry<String, Map<Object, Integer>>> pattern2value2countList = new ArrayList<>(pattern2value2count.entrySet());
pattern2value2countList.sort((a,b) -> a.getKey().compareTo(b.getKey()));
for (Entry<String, Map<Object, Integer>> pattern2value2countEntry : pattern2value2countList) {
JSONArray array = new JSONArray();
for (Entry<Object, Integer> e : pattern2value2countEntry.getValue().entrySet()) {
JSONObject v2c = new JSONObject();
JsonUtility.forceLinkedHashMap(v2c);
v2c.put("value", e.getKey());
v2c.put("count", e.getValue());
array.put(v2c);
}
patternUsage.put(pattern2value2countEntry.getKey(), array);
}
perSheetPatternUsage.put(sheetConfig.getSheetName(), patternUsage);
}
options.getGenerationSummary().put("patternUsagePerSheet", perSheetPatternUsage);
File file = new File(workbookFolder, "summary.json");
try {
FileUtils.writeStringToFile(file, options.getGenerationSummary().toString(2), StandardCharsets.UTF_8);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
} | public void create(file dstfolder, list<exceltable> tables, excelsproutoptions options) { random rnd = new random(options.getrandomseed()); list<list<exceltable>> workbookclusters = getworkbookclusters(tables, rnd); list<list<list<exceltable>>> workbooksheettables = new arraylist<>(); for (list<exceltable> workbooktables : workbookclusters) { list<list<exceltable>> sheettables = new arraylist<>(); for (exceltable tbl : workbooktables) { list<exceltable> sheet = arrays.aslist(tbl); sheettables.add(sheet); } workbooksheettables.add(sheettables); } int maxdigits = string.valueof(workbooksheettables.size() - 1).length(); map<list<exceltable>, excelgeneratorsheetconfig> sheetconfigmap = new hashmap<>(); excelgenerator excelgenerator = new excelgenerator(); for (int i = 0; i < workbooksheettables.size(); i++) { list<list<exceltable>> sheets = workbooksheettables.get(i); excelgeneratorworkbookconfig workbookconf = new excelgeneratorworkbookconfig(); workbookconf.setfilename("workbook.xlsx"); for (list<exceltable> sheet : sheets) { excelgeneratorsheetconfig sheetconf = new excelgeneratorsheetconfig(); sheetconfigmap.put(sheet, sheetconf); stringbuilder sheetnamesb = new stringbuilder(); for (int k = 0; k < sheet.size(); k++) { exceltable exceltable = sheet.get(k); excelgeneratortableconfig tableconf = new excelgeneratortableconfig(); point offset = (point) exceltable.getsetup().getordefault("offset", new point(0, 0)); tableconf.setoffset(offset); tableconf.setstaticcelldrawer(d -> { d.exceltable(exceltable, options); }); sheetconf.gettableconfigs().add(tableconf); classconfig classconfig = exceltable.getsetup().getorthrow("classes", classconfig.class); if (classconfig.haslabel()) { sheetnamesb.append(classconfig.getlabel()); } else { throw new runtimeexception("classconfig should give a label to name the sheet"); } if (k != sheet.size() - 1) { sheetnamesb.append(" & "); } } sheetconf.setsheetname(sheetnamesb.tostring()); workbookconf.getsheetconfigs().add(sheetconf); excelgeneratorresult result = excelgenerator.generate(null, workbookconf); file workbookfolder = workbooksheettables.size() == 1 ? dstfolder : new file(dstfolder, string.format("%0" + maxdigits + "d", i)); result.saveexcel(workbookfolder); model expectedmodel = null; model provenancemodel = null; csvprinter provenancecsv = null; if (options.iswriteexpectedmodel()) { expectedmodel = modelfactory.createdefaultmodel(); expectedmodel.setnsprefixes(options.getprefixmapping()); expectedmodel.setnsprefixes(prefixmapping.standard); } if (options.iswriteprovenancemodel()) { provenancemodel = modelfactory.createdefaultmodel(); provenancemodel.setnsprefixes(options.getprefixmapping()); provenancemodel.setnsprefix("prov", prov.ns); provenancemodel.setnsprefix("csvw", csvw.ns); provenancemodel.setnsprefix("ss", ss.ns); provenancemodel.setnsprefixes(prefixmapping.standard); } if (options.iswriteprovenancecsv()) { try { provenancecsv = csvformat.default.print( new outputstreamwriter( new gzipoutputstream( new fileoutputstream( new file(workbookfolder, "provenance.csv.gz") )))); csvprovenanceheader(provenancecsv); } catch (ioexception ex) { throw new runtimeexception(ex); } } map<statement, resource> stmt2res = new hashmap<>(); for (list<exceltable> sheet : sheets) { excelgeneratorsheetconfig sheetconfig = sheetconfigmap.get(sheet); for (exceltable table : sheet) { for (entry<excelcell, provenance> cell2prov : table.getcellprovmap().entryset()) { excelcell cell = cell2prov.getkey(); provenance prov = cell2prov.getvalue(); if (cell.getaddress() == null) { continue; } if (prov.getstatements().isempty()) { continue; } if (options.iswriteexpectedmodel()) { expectedmodel.add(prov.getstatements()); } if (options.iswriteprovenancemodel()) { rdfprovenance(cell, sheetconfig.getsheetname(), prov, stmt2res, provenancemodel); } if (options.iswriteprovenancecsv()) { csvprovenance(cell, sheetconfig.getsheetname(), prov, provenancecsv, provenancemodel); } } } } if (options.iswriteexpectedmodel()) { file file = new file(workbookfolder, "expected.ttl.gz"); try (outputstream os = file.getname().endswith("gz") ? new gzipoutputstream(new fileoutputstream(file)) : new fileoutputstream(file)) { expectedmodel.write(os, "ttl"); } catch (ioexception ex) { throw new runtimeexception(ex); } } if (options.iswriteprovenancemodel()) { file file = new file(workbookfolder, "provenance.ttl.gz"); try (outputstream os = file.getname().endswith("gz") ? new gzipoutputstream(new fileoutputstream(file)) : new fileoutputstream(file)) { provenancemodel.write(os, "ttl"); } catch (ioexception ex) { throw new runtimeexception(ex); } } if (options.iswriteprovenancecsv()) { try { provenancecsv.close(); } catch (ioexception ex) { throw new runtimeexception(ex); } } if (options.iswritegenerationsummaryjson()) { jsonobject persheetpatternusage = new jsonobject(); jsonutility.forcelinkedhashmap(persheetpatternusage); for (list<exceltable> sheet : sheets) { excelgeneratorsheetconfig sheetconfig = sheetconfigmap.get(sheet); map<string, map<object, integer>> pattern2value2count = new hashmap<>(); for (exceltable tbl : sheet) { for (entry<excelcell, provenance> entry : tbl.getcellprovmap().entryset()) { if (entry.getkey().getaddress() == null || entry.getvalue().getstatements().isempty()) { continue; } for (entry<string, object> e : entry.getvalue().getusedpatterns().entryset()) { object val = e.getvalue(); if(val instanceof jsonarray) { val = val.tostring(); } map<object, integer> value2count = pattern2value2count.computeifabsent(e.getkey(), k -> new hashmap<>()); value2count.put(val, value2count.getordefault(val, 0) + 1); } } } jsonobject patternusage = new jsonobject(); jsonutility.forcelinkedhashmap(patternusage); list<entry<string, map<object, integer>>> pattern2value2countlist = new arraylist<>(pattern2value2count.entryset()); pattern2value2countlist.sort((a,b) -> a.getkey().compareto(b.getkey())); for (entry<string, map<object, integer>> pattern2value2countentry : pattern2value2countlist) { jsonarray array = new jsonarray(); for (entry<object, integer> e : pattern2value2countentry.getvalue().entryset()) { jsonobject v2c = new jsonobject(); jsonutility.forcelinkedhashmap(v2c); v2c.put("value", e.getkey()); v2c.put("count", e.getvalue()); array.put(v2c); } patternusage.put(pattern2value2countentry.getkey(), array); } persheetpatternusage.put(sheetconfig.getsheetname(), patternusage); } options.getgenerationsummary().put("patternusagepersheet", persheetpatternusage); file file = new file(workbookfolder, "summary.json"); try { fileutils.writestringtofile(file, options.getgenerationsummary().tostring(2), standardcharsets.utf_8); } catch (ioexception ex) { throw new runtimeexception(ex); } } } | mschroeder-github/datasprout | [
1,
1,
0,
0
] |
16,159 | @Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterizedTypeImpl that = (ParameterizedTypeImpl) o;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(actualTypeArguments, that.actualTypeArguments)) return false;
if (ownerType != null ? !ownerType.equals(that.ownerType) : that.ownerType != null) return false;
return rawType != null ? rawType.equals(that.rawType) : that.rawType == null;
} | @Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParameterizedTypeImpl that = (ParameterizedTypeImpl) o;
if (!Arrays.equals(actualTypeArguments, that.actualTypeArguments)) return false;
if (ownerType != null ? !ownerType.equals(that.ownerType) : that.ownerType != null) return false;
return rawType != null ? rawType.equals(that.rawType) : that.rawType == null;
} | @override public boolean equals(object o) { if (this == o) return true; if (o == null || getclass() != o.getclass()) return false; parameterizedtypeimpl that = (parameterizedtypeimpl) o; if (!arrays.equals(actualtypearguments, that.actualtypearguments)) return false; if (ownertype != null ? !ownertype.equals(that.ownertype) : that.ownertype != null) return false; return rawtype != null ? rawtype.equals(that.rawtype) : that.rawtype == null; } | osglworks/java-di | [
0,
0,
1,
0
] |
8,062 | @Override // ObjectEventListener
public Object addingObject (Object obj, int index)
{
ObjectRenderer renderer = rendererFactory.newRenderer (obj);
// TODO: VAADIN SESSION HANDLING
layout.addComponent (renderer, index);
active_renderers.put (get_object_hash (obj), renderer);
log.info ("<<RENDERER>> addingObject() layout height = {} {}", layout.getHeight (), layout.getHeightUnits ().toString ());
log.info ("Add new renderer {}: obj={} or={} /// active_renderers={}", this, get_object_hash (obj), renderer, active_renderers);
return (obj);
} | @Override public Object addingObject (Object obj, int index)
{
ObjectRenderer renderer = rendererFactory.newRenderer (obj);
layout.addComponent (renderer, index);
active_renderers.put (get_object_hash (obj), renderer);
log.info ("<<RENDERER>> addingObject() layout height = {} {}", layout.getHeight (), layout.getHeightUnits ().toString ());
log.info ("Add new renderer {}: obj={} or={} /// active_renderers={}", this, get_object_hash (obj), renderer, active_renderers);
return (obj);
} | @override public object addingobject (object obj, int index) { objectrenderer renderer = rendererfactory.newrenderer (obj); layout.addcomponent (renderer, index); active_renderers.put (get_object_hash (obj), renderer); log.info ("<<renderer>> addingobject() layout height = {} {}", layout.getheight (), layout.getheightunits ().tostring ()); log.info ("add new renderer {}: obj={} or={} /// active_renderers={}", this, get_object_hash (obj), renderer, active_renderers); return (obj); } | neoautus/lucidj | [
0,
1,
0,
0
] |
8,063 | @Override // ObjectEventListener
public void removingObject (Object obj, int index)
{
String hash = get_object_hash (obj);
ObjectRenderer renderer = active_renderers.get (hash);
log.info ("removingObject: obj={} or={} layout={} /// active_renderers={}", hash, renderer, layout, active_renderers);
// Only deal with valid renderers
if (renderer != null)
{
// TODO: VAADIN SESSION HANDLING
layout.removeComponent (renderer);
log.info ("<<RENDERER>> removingObject() layout height = {} {}", layout.getHeight (), layout.getHeightUnits ().toString ());
}
active_renderers.remove (hash);
} | @Override public void removingObject (Object obj, int index)
{
String hash = get_object_hash (obj);
ObjectRenderer renderer = active_renderers.get (hash);
log.info ("removingObject: obj={} or={} layout={} /// active_renderers={}", hash, renderer, layout, active_renderers);
if (renderer != null)
{
layout.removeComponent (renderer);
log.info ("<<RENDERER>> removingObject() layout height = {} {}", layout.getHeight (), layout.getHeightUnits ().toString ());
}
active_renderers.remove (hash);
} | @override public void removingobject (object obj, int index) { string hash = get_object_hash (obj); objectrenderer renderer = active_renderers.get (hash); log.info ("removingobject: obj={} or={} layout={} /// active_renderers={}", hash, renderer, layout, active_renderers); if (renderer != null) { layout.removecomponent (renderer); log.info ("<<renderer>> removingobject() layout height = {} {}", layout.getheight (), layout.getheightunits ().tostring ()); } active_renderers.remove (hash); } | neoautus/lucidj | [
0,
1,
0,
0
] |
161 | protected final void addSequences(List<String> seqNames, Long seqDefaultValue, boolean ignoreFailure) throws SQLException{
// ToDo: rewrite function to use one SQL call per select/insert for all items
for (String seqName: seqNames){
addSequence(seqName, seqDefaultValue, ignoreFailure);
}
} | protected final void addSequences(List<String> seqNames, Long seqDefaultValue, boolean ignoreFailure) throws SQLException{
for (String seqName: seqNames){
addSequence(seqName, seqDefaultValue, ignoreFailure);
}
} | protected final void addsequences(list<string> seqnames, long seqdefaultvalue, boolean ignorefailure) throws sqlexception{ for (string seqname: seqnames){ addsequence(seqname, seqdefaultvalue, ignorefailure); } } | shavi71/ambari | [
1,
0,
0,
0
] |
24,756 | public static void addChatMessageFixed(ICommandSender sender, IChatComponent message) {
if (sender == null || message == null) return;
if (sender instanceof EntityPlayerMP) {
if (((EntityPlayerMP)sender).playerNetServerHandler != null) {
sender.addChatMessage(message);
} else {
//TODO Find a way to re-send the message.
}
} else {
sender.addChatMessage(message);
}
} | public static void addChatMessageFixed(ICommandSender sender, IChatComponent message) {
if (sender == null || message == null) return;
if (sender instanceof EntityPlayerMP) {
if (((EntityPlayerMP)sender).playerNetServerHandler != null) {
sender.addChatMessage(message);
} else {
}
} else {
sender.addChatMessage(message);
}
} | public static void addchatmessagefixed(icommandsender sender, ichatcomponent message) { if (sender == null || message == null) return; if (sender instanceof entityplayermp) { if (((entityplayermp)sender).playernetserverhandler != null) { sender.addchatmessage(message); } else { } } else { sender.addchatmessage(message); } } | repo-alt/MyEssentials-Core | [
0,
1,
0,
0
] |
33,024 | public Node addNode(String s) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, RepositoryException {
return null;
} | public Node addNode(String s) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, RepositoryException {
return null;
} | public node addnode(string s) throws itemexistsexception, pathnotfoundexception, versionexception, constraintviolationexception, lockexception, repositoryexception { return null; } | sohaniwso2/carbon-registry-1 | [
0,
1,
0,
0
] |
33,065 | private List<DataPoint> getIntersectedSetUsingQuadTree(List<DataPoint> dataset, Rectangle rect) {
throw new UnsupportedOperationException("Not Yet Implemented");
} | private List<DataPoint> getIntersectedSetUsingQuadTree(List<DataPoint> dataset, Rectangle rect) {
throw new UnsupportedOperationException("Not Yet Implemented");
} | private list<datapoint> getintersectedsetusingquadtree(list<datapoint> dataset, rectangle rect) { throw new unsupportedoperationexception("not yet implemented"); } | smozely/QuadTree | [
0,
1,
0,
0
] |
8,490 | private static String getSampleWithLineBreak(){
StringBuilder sb = new StringBuilder();
sb.append("[");
for(int i = 1; i < 4; i++){
sb.append(getTypicalElement(i));
sb.append(",");
}
sb.append("{\"id\" : 4," +
"\"author\" : \"With line\\n break\"," + //FIXME this line break is doubled - is this correct??
"\"title\" : \"Book title 4\"," +
"\"publish_date\" : \"2010-05-26\"" +
"},");
sb.append(getTypicalElement(5));
sb.append(",");
sb.append(getTypicalElement(6));
sb.append("]");
return sb.toString();
} | private static String getSampleWithLineBreak(){
StringBuilder sb = new StringBuilder();
sb.append("[");
for(int i = 1; i < 4; i++){
sb.append(getTypicalElement(i));
sb.append(",");
}
sb.append("{\"id\" : 4," +
"\"author\" : \"With line\\n break\"," +
"\"title\" : \"Book title 4\"," +
"\"publish_date\" : \"2010-05-26\"" +
"},");
sb.append(getTypicalElement(5));
sb.append(",");
sb.append(getTypicalElement(6));
sb.append("]");
return sb.toString();
} | private static string getsamplewithlinebreak(){ stringbuilder sb = new stringbuilder(); sb.append("["); for(int i = 1; i < 4; i++){ sb.append(gettypicalelement(i)); sb.append(","); } sb.append("{\"id\" : 4," + "\"author\" : \"with line\\n break\"," + "\"title\" : \"book title 4\"," + "\"publish_date\" : \"2010-05-26\"" + "},"); sb.append(gettypicalelement(5)); sb.append(","); sb.append(gettypicalelement(6)); sb.append("]"); return sb.tostring(); } | sbalineni19/OpenRefine | [
0,
0,
1,
0
] |
24,911 | @ParameterizedTest
@MethodSource("data") // bug in junit csvsource parsing - it does a trim, using a manual parsing as workaround
void parse(final String csv) {
final String[] parts = csv.split(",");
parse(parser, parts[0], parseInt(parts[1]), parseBoolean(parts[2]), null, parseInt(parts[3]));
} | @ParameterizedTest
@MethodSource("data")
void parse(final String csv) {
final String[] parts = csv.split(",");
parse(parser, parts[0], parseInt(parts[1]), parseBoolean(parts[2]), null, parseInt(parts[3]));
} | @parameterizedtest @methodsource("data") void parse(final string csv) { final string[] parts = csv.split(","); parse(parser, parts[0], parseint(parts[1]), parseboolean(parts[2]), null, parseint(parts[3])); } | rmannibucau/access-log-stats | [
0,
0,
1,
0
] |
380 | @Override
public boolean accept(File pathname)
{
// TODO: More checking here - actually parse out tile coord
return pathname.isFile() && pathname.getName().endsWith(".tile");
} | @Override
public boolean accept(File pathname)
{
return pathname.isFile() && pathname.getName().endsWith(".tile");
} | @override public boolean accept(file pathname) { return pathname.isfile() && pathname.getname().endswith(".tile"); } | snuk182/tectonicus | [
0,
1,
0,
0
] |
386 | public static <T> MetricStorage doubleAsynchronousAccumulator(
View view,
InstrumentDescriptor instrument,
Consumer<ObservableDoubleMeasurement> metricUpdater) {
final MetricDescriptor metricDescriptor = MetricDescriptor.create(view, instrument);
Aggregator<T> aggregator =
view.getAggregation().createAggregator(instrument, ExemplarFilter.neverSample());
final AsyncAccumulator<T> measurementAccumulator = new AsyncAccumulator<>(instrument);
if (Aggregator.empty() == aggregator) {
return empty();
}
final AttributesProcessor attributesProcessor = view.getAttributesProcessor();
// TODO: Find a way to grab the measurement JUST ONCE for all async metrics.
final ObservableDoubleMeasurement result =
new ObservableDoubleMeasurement() {
@Override
public void observe(double value, Attributes attributes) {
T accumulation =
aggregator.accumulateDoubleMeasurement(value, attributes, Context.current());
if (accumulation != null) {
measurementAccumulator.record(
attributesProcessor.process(attributes, Context.current()), accumulation);
}
}
@Override
public void observe(double value) {
observe(value, Attributes.empty());
}
};
return new AsynchronousMetricStorage<>(
metricDescriptor, aggregator, measurementAccumulator, () -> metricUpdater.accept(result));
} | public static <T> MetricStorage doubleAsynchronousAccumulator(
View view,
InstrumentDescriptor instrument,
Consumer<ObservableDoubleMeasurement> metricUpdater) {
final MetricDescriptor metricDescriptor = MetricDescriptor.create(view, instrument);
Aggregator<T> aggregator =
view.getAggregation().createAggregator(instrument, ExemplarFilter.neverSample());
final AsyncAccumulator<T> measurementAccumulator = new AsyncAccumulator<>(instrument);
if (Aggregator.empty() == aggregator) {
return empty();
}
final AttributesProcessor attributesProcessor = view.getAttributesProcessor();
final ObservableDoubleMeasurement result =
new ObservableDoubleMeasurement() {
@Override
public void observe(double value, Attributes attributes) {
T accumulation =
aggregator.accumulateDoubleMeasurement(value, attributes, Context.current());
if (accumulation != null) {
measurementAccumulator.record(
attributesProcessor.process(attributes, Context.current()), accumulation);
}
}
@Override
public void observe(double value) {
observe(value, Attributes.empty());
}
};
return new AsynchronousMetricStorage<>(
metricDescriptor, aggregator, measurementAccumulator, () -> metricUpdater.accept(result));
} | public static <t> metricstorage doubleasynchronousaccumulator( view view, instrumentdescriptor instrument, consumer<observabledoublemeasurement> metricupdater) { final metricdescriptor metricdescriptor = metricdescriptor.create(view, instrument); aggregator<t> aggregator = view.getaggregation().createaggregator(instrument, exemplarfilter.neversample()); final asyncaccumulator<t> measurementaccumulator = new asyncaccumulator<>(instrument); if (aggregator.empty() == aggregator) { return empty(); } final attributesprocessor attributesprocessor = view.getattributesprocessor(); final observabledoublemeasurement result = new observabledoublemeasurement() { @override public void observe(double value, attributes attributes) { t accumulation = aggregator.accumulatedoublemeasurement(value, attributes, context.current()); if (accumulation != null) { measurementaccumulator.record( attributesprocessor.process(attributes, context.current()), accumulation); } } @override public void observe(double value) { observe(value, attributes.empty()); } }; return new asynchronousmetricstorage<>( metricdescriptor, aggregator, measurementaccumulator, () -> metricupdater.accept(result)); } | svalaskevicius/opentelemetry-java | [
1,
0,
0,
0
] |
387 | public static <T> MetricStorage longAsynchronousAccumulator(
View view,
InstrumentDescriptor instrument,
Consumer<ObservableLongMeasurement> metricUpdater) {
final MetricDescriptor metricDescriptor = MetricDescriptor.create(view, instrument);
Aggregator<T> aggregator =
view.getAggregation().createAggregator(instrument, ExemplarFilter.neverSample());
final AsyncAccumulator<T> measurementAccumulator = new AsyncAccumulator<>(instrument);
final AttributesProcessor attributesProcessor = view.getAttributesProcessor();
// TODO: Find a way to grab the measurement JUST ONCE for all async metrics.
final ObservableLongMeasurement result =
new ObservableLongMeasurement() {
@Override
public void observe(long value, Attributes attributes) {
T accumulation =
aggregator.accumulateLongMeasurement(value, attributes, Context.current());
if (accumulation != null) {
measurementAccumulator.record(
attributesProcessor.process(attributes, Context.current()), accumulation);
}
}
@Override
public void observe(long value) {
observe(value, Attributes.empty());
}
};
return new AsynchronousMetricStorage<>(
metricDescriptor, aggregator, measurementAccumulator, () -> metricUpdater.accept(result));
} | public static <T> MetricStorage longAsynchronousAccumulator(
View view,
InstrumentDescriptor instrument,
Consumer<ObservableLongMeasurement> metricUpdater) {
final MetricDescriptor metricDescriptor = MetricDescriptor.create(view, instrument);
Aggregator<T> aggregator =
view.getAggregation().createAggregator(instrument, ExemplarFilter.neverSample());
final AsyncAccumulator<T> measurementAccumulator = new AsyncAccumulator<>(instrument);
final AttributesProcessor attributesProcessor = view.getAttributesProcessor();
final ObservableLongMeasurement result =
new ObservableLongMeasurement() {
@Override
public void observe(long value, Attributes attributes) {
T accumulation =
aggregator.accumulateLongMeasurement(value, attributes, Context.current());
if (accumulation != null) {
measurementAccumulator.record(
attributesProcessor.process(attributes, Context.current()), accumulation);
}
}
@Override
public void observe(long value) {
observe(value, Attributes.empty());
}
};
return new AsynchronousMetricStorage<>(
metricDescriptor, aggregator, measurementAccumulator, () -> metricUpdater.accept(result));
} | public static <t> metricstorage longasynchronousaccumulator( view view, instrumentdescriptor instrument, consumer<observablelongmeasurement> metricupdater) { final metricdescriptor metricdescriptor = metricdescriptor.create(view, instrument); aggregator<t> aggregator = view.getaggregation().createaggregator(instrument, exemplarfilter.neversample()); final asyncaccumulator<t> measurementaccumulator = new asyncaccumulator<>(instrument); final attributesprocessor attributesprocessor = view.getattributesprocessor(); final observablelongmeasurement result = new observablelongmeasurement() { @override public void observe(long value, attributes attributes) { t accumulation = aggregator.accumulatelongmeasurement(value, attributes, context.current()); if (accumulation != null) { measurementaccumulator.record( attributesprocessor.process(attributes, context.current()), accumulation); } } @override public void observe(long value) { observe(value, attributes.empty()); } }; return new asynchronousmetricstorage<>( metricdescriptor, aggregator, measurementaccumulator, () -> metricupdater.accept(result)); } | svalaskevicius/opentelemetry-java | [
1,
0,
0,
0
] |
395 | @Override
public Path getPath(URI uri) {
FileSystem fileSystem = getFileSystem(uri);
/**
* TODO: set as a list. one s3FileSystem by region
*/
return fileSystem.getPath(uri.getPath());
} | @Override
public Path getPath(URI uri) {
FileSystem fileSystem = getFileSystem(uri);
return fileSystem.getPath(uri.getPath());
} | @override public path getpath(uri uri) { filesystem filesystem = getfilesystem(uri); return filesystem.getpath(uri.getpath()); } | skashin/Amazon-S3-FileSystem-NIO2 | [
1,
1,
0,
0
] |
24,980 | protected void runInContextInternal() {
if (s_logger.isInfoEnabled()) {
s_logger.info("starting usage job...");
}
// how about we update the job exec time when the job starts???
long execTime = _jobExecTime.getTimeInMillis();
long now = System.currentTimeMillis() + 2000; // 2 second buffer since jobs can run a little early (though usually just by milliseconds)
if (execTime < now) {
// if exec time is in the past, calculate the next time the job will execute...if this is a one-off job that is a result
// of scheduleParse() then don't update the next exec time...
_jobExecTime.add(Calendar.MINUTE, _aggregationDuration);
}
UsageJobVO job = _usageJobDao.isOwner(_hostname, _pid);
if (job != null) {
// FIXME: we really need to do a better job of not missing any events...so we should some how
// keep track of the last time usage was run, then go from there...
// For executing the job, we treat hourly and daily as special time ranges, using the previous full hour or the previous
// full day. Otherwise we just subtract off the aggregation range from the current time and use that as start date with
// current time as end date.
Calendar cal = Calendar.getInstance(_usageTimezone);
cal.setTime(new Date());
long startDate = 0;
long endDate = 0;
if (_aggregationDuration == DAILY_TIME) {
cal.roll(Calendar.DAY_OF_YEAR, false);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.DAY_OF_YEAR, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else if (_aggregationDuration == HOURLY_TIME) {
cal.roll(Calendar.HOUR_OF_DAY, false);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.HOUR_OF_DAY, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else {
endDate = cal.getTime().getTime(); // current time
cal.add(Calendar.MINUTE, -1 * _aggregationDuration);
startDate = cal.getTime().getTime();
}
parse(job, startDate, endDate);
if (_runQuota){
try {
_quotaManager.calculateQuotaUsage();
}
catch (Exception e){
s_logger.error("Exception received while calculating quota", e);
}
try {
_quotaStatement.sendStatement();
} catch (Exception e) {
s_logger.error("Exception received while sending statements", e);
}
try {
_alertManager.checkAndSendQuotaAlertEmails();
} catch (Exception e) {
s_logger.error("Exception received while sending alerts", e);
}
}
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Not owner of usage job, skipping...");
}
}
if (s_logger.isInfoEnabled()) {
s_logger.info("usage job complete");
}
} | protected void runInContextInternal() {
if (s_logger.isInfoEnabled()) {
s_logger.info("starting usage job...");
}
long execTime = _jobExecTime.getTimeInMillis();
long now = System.currentTimeMillis() + 2000;
if (execTime < now) {
_jobExecTime.add(Calendar.MINUTE, _aggregationDuration);
}
UsageJobVO job = _usageJobDao.isOwner(_hostname, _pid);
if (job != null) {
Calendar cal = Calendar.getInstance(_usageTimezone);
cal.setTime(new Date());
long startDate = 0;
long endDate = 0;
if (_aggregationDuration == DAILY_TIME) {
cal.roll(Calendar.DAY_OF_YEAR, false);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.DAY_OF_YEAR, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else if (_aggregationDuration == HOURLY_TIME) {
cal.roll(Calendar.HOUR_OF_DAY, false);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.HOUR_OF_DAY, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else {
endDate = cal.getTime().getTime();
cal.add(Calendar.MINUTE, -1 * _aggregationDuration);
startDate = cal.getTime().getTime();
}
parse(job, startDate, endDate);
if (_runQuota){
try {
_quotaManager.calculateQuotaUsage();
}
catch (Exception e){
s_logger.error("Exception received while calculating quota", e);
}
try {
_quotaStatement.sendStatement();
} catch (Exception e) {
s_logger.error("Exception received while sending statements", e);
}
try {
_alertManager.checkAndSendQuotaAlertEmails();
} catch (Exception e) {
s_logger.error("Exception received while sending alerts", e);
}
}
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Not owner of usage job, skipping...");
}
}
if (s_logger.isInfoEnabled()) {
s_logger.info("usage job complete");
}
} | protected void runincontextinternal() { if (s_logger.isinfoenabled()) { s_logger.info("starting usage job..."); } long exectime = _jobexectime.gettimeinmillis(); long now = system.currenttimemillis() + 2000; if (exectime < now) { _jobexectime.add(calendar.minute, _aggregationduration); } usagejobvo job = _usagejobdao.isowner(_hostname, _pid); if (job != null) { calendar cal = calendar.getinstance(_usagetimezone); cal.settime(new date()); long startdate = 0; long enddate = 0; if (_aggregationduration == daily_time) { cal.roll(calendar.day_of_year, false); cal.set(calendar.hour_of_day, 0); cal.set(calendar.minute, 0); cal.set(calendar.second, 0); cal.set(calendar.millisecond, 0); startdate = cal.gettime().gettime(); cal.roll(calendar.day_of_year, true); cal.add(calendar.millisecond, -1); enddate = cal.gettime().gettime(); } else if (_aggregationduration == hourly_time) { cal.roll(calendar.hour_of_day, false); cal.set(calendar.minute, 0); cal.set(calendar.second, 0); cal.set(calendar.millisecond, 0); startdate = cal.gettime().gettime(); cal.roll(calendar.hour_of_day, true); cal.add(calendar.millisecond, -1); enddate = cal.gettime().gettime(); } else { enddate = cal.gettime().gettime(); cal.add(calendar.minute, -1 * _aggregationduration); startdate = cal.gettime().gettime(); } parse(job, startdate, enddate); if (_runquota){ try { _quotamanager.calculatequotausage(); } catch (exception e){ s_logger.error("exception received while calculating quota", e); } try { _quotastatement.sendstatement(); } catch (exception e) { s_logger.error("exception received while sending statements", e); } try { _alertmanager.checkandsendquotaalertemails(); } catch (exception e) { s_logger.error("exception received while sending alerts", e); } } } else { if (s_logger.isdebugenabled()) { s_logger.debug("not owner of usage job, skipping..."); } } if (s_logger.isinfoenabled()) { s_logger.info("usage job complete"); } } | serbaut/cloudstack | [
1,
0,
1,
0
] |
24,981 | @Override
public void parse(UsageJobVO job, long startDateMillis, long endDateMillis) {
// TODO: Shouldn't we also allow parsing by the type of usage?
boolean success = false;
long timeStart = System.currentTimeMillis();
try {
if ((endDateMillis == 0) || (endDateMillis > timeStart)) {
endDateMillis = timeStart;
}
long lastSuccess = _usageJobDao.getLastJobSuccessDateMillis();
if (lastSuccess != 0) {
startDateMillis = lastSuccess + 1; // 1 millisecond after
}
if (startDateMillis >= endDateMillis) {
if (s_logger.isInfoEnabled()) {
s_logger.info("not parsing usage records since start time mills (" + startDateMillis + ") is on or after end time millis (" + endDateMillis + ")");
}
TransactionLegacy jobUpdateTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
try {
jobUpdateTxn.start();
// everything seemed to work...set endDate as the last success date
_usageJobDao.updateJobSuccess(job.getId(), startDateMillis, endDateMillis, System.currentTimeMillis() - timeStart, success);
// create a new job if this is a recurring job
if (job.getJobType() == UsageJobVO.JOB_TYPE_RECURRING) {
_usageJobDao.createNewJob(_hostname, _pid, UsageJobVO.JOB_TYPE_RECURRING);
}
jobUpdateTxn.commit();
} finally {
jobUpdateTxn.close();
}
return;
}
Date startDate = new Date(startDateMillis);
Date endDate = new Date(endDateMillis);
if (s_logger.isInfoEnabled()) {
s_logger.info("Parsing usage records between " + startDate + " and " + endDate);
}
List<AccountVO> accounts = null;
List<UserStatisticsVO> userStats = null;
Map<String, UsageNetworkVO> networkStats = null;
List<VmDiskStatisticsVO> vmDiskStats = null;
Map<String, UsageVmDiskVO> vmDiskUsages = null;
TransactionLegacy userTxn = TransactionLegacy.open(TransactionLegacy.CLOUD_DB);
try {
Long limit = Long.valueOf(500);
Long offset = Long.valueOf(0);
Long lastAccountId = _usageDao.getLastAccountId();
if (lastAccountId == null) {
lastAccountId = Long.valueOf(0);
}
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findActiveAccounts(lastAccountId, filter);
if ((accounts != null) && !accounts.isEmpty()) {
// now update the accounts in the cloud_usage db
_usageDao.updateAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
// reset offset
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findRecentlyDeletedAccounts(lastAccountId, startDate, filter);
if ((accounts != null) && !accounts.isEmpty()) {
// now update the accounts in the cloud_usage db
_usageDao.updateAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
// reset offset
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findNewAccounts(lastAccountId, filter);
if ((accounts != null) && !accounts.isEmpty()) {
// now copy the accounts to cloud_usage db
_usageDao.saveAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
// reset offset
offset = Long.valueOf(0);
// get all the user stats to create usage records for the network usage
Long lastUserStatsId = _usageDao.getLastUserStatsId();
if (lastUserStatsId == null) {
lastUserStatsId = Long.valueOf(0);
}
SearchCriteria<UserStatisticsVO> sc2 = _userStatsDao.createSearchCriteria();
sc2.addAnd("id", SearchCriteria.Op.LTEQ, lastUserStatsId);
do {
Filter filter = new Filter(UserStatisticsVO.class, "id", true, offset, limit);
userStats = _userStatsDao.search(sc2, filter);
if ((userStats != null) && !userStats.isEmpty()) {
// now copy the accounts to cloud_usage db
_usageDao.updateUserStats(userStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((userStats != null) && !userStats.isEmpty());
// reset offset
offset = Long.valueOf(0);
sc2 = _userStatsDao.createSearchCriteria();
sc2.addAnd("id", SearchCriteria.Op.GT, lastUserStatsId);
do {
Filter filter = new Filter(UserStatisticsVO.class, "id", true, offset, limit);
userStats = _userStatsDao.search(sc2, filter);
if ((userStats != null) && !userStats.isEmpty()) {
// now copy the accounts to cloud_usage db
_usageDao.saveUserStats(userStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((userStats != null) && !userStats.isEmpty());
// reset offset
offset = Long.valueOf(0);
// get all the vm network stats to create usage_VM_network records for the vm network usage
Long lastVmDiskStatsId = _usageDao.getLastVmDiskStatsId();
if (lastVmDiskStatsId == null) {
lastVmDiskStatsId = Long.valueOf(0);
}
SearchCriteria<VmDiskStatisticsVO> sc4 = _vmDiskStatsDao.createSearchCriteria();
sc4.addAnd("id", SearchCriteria.Op.LTEQ, lastVmDiskStatsId);
do {
Filter filter = new Filter(VmDiskStatisticsVO.class, "id", true, offset, limit);
vmDiskStats = _vmDiskStatsDao.search(sc4, filter);
if ((vmDiskStats != null) && !vmDiskStats.isEmpty()) {
// now copy the accounts to cloud_usage db
_usageDao.updateVmDiskStats(vmDiskStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((vmDiskStats != null) && !vmDiskStats.isEmpty());
// reset offset
offset = Long.valueOf(0);
sc4 = _vmDiskStatsDao.createSearchCriteria();
sc4.addAnd("id", SearchCriteria.Op.GT, lastVmDiskStatsId);
do {
Filter filter = new Filter(VmDiskStatisticsVO.class, "id", true, offset, limit);
vmDiskStats = _vmDiskStatsDao.search(sc4, filter);
if ((vmDiskStats != null) && !vmDiskStats.isEmpty()) {
// now copy the accounts to cloud_usage db
_usageDao.saveVmDiskStats(vmDiskStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((vmDiskStats != null) && !vmDiskStats.isEmpty());
} finally {
userTxn.close();
}
// TODO: Fetch a maximum number of events and process them before moving on to the next range of events
// - get a list of the latest events
// - insert the latest events into the usage.events table
List<UsageEventVO> events = _usageEventDao.getRecentEvents(new Date(endDateMillis));
TransactionLegacy usageTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
try {
usageTxn.start();
// make sure start date is before all of our un-processed events (the events are ordered oldest
// to newest, so just test against the first event)
if ((events != null) && (events.size() > 0)) {
Date oldestEventDate = events.get(0).getCreateDate();
if (oldestEventDate.getTime() < startDateMillis) {
startDateMillis = oldestEventDate.getTime();
startDate = new Date(startDateMillis);
}
// - loop over the list of events and create entries in the helper tables
// - create the usage records using the parse methods below
for (UsageEventVO event : events) {
event.setProcessed(true);
_usageEventDao.update(event.getId(), event);
createHelperRecord(event);
}
}
// TODO: Fetch a maximum number of user stats and process them before moving on to the next range of user stats
// get user stats in order to compute network usage
networkStats = _usageNetworkDao.getRecentNetworkStats();
Calendar recentlyDeletedCal = Calendar.getInstance(_usageTimezone);
recentlyDeletedCal.setTimeInMillis(startDateMillis);
recentlyDeletedCal.add(Calendar.MINUTE, -1 * THREE_DAYS_IN_MINUTES);
Date recentlyDeletedDate = recentlyDeletedCal.getTime();
// Keep track of user stats for an account, across all of its public IPs
Map<String, UserStatisticsVO> aggregatedStats = new HashMap<String, UserStatisticsVO>();
int startIndex = 0;
do {
userStats = _userStatsDao.listActiveAndRecentlyDeleted(recentlyDeletedDate, startIndex, 500);
if (userStats != null) {
for (UserStatisticsVO userStat : userStats) {
if (userStat.getDeviceId() != null) {
String hostKey = userStat.getDataCenterId() + "-" + userStat.getAccountId() + "-Host-" + userStat.getDeviceId();
UserStatisticsVO hostAggregatedStat = aggregatedStats.get(hostKey);
if (hostAggregatedStat == null) {
hostAggregatedStat =
new UserStatisticsVO(userStat.getAccountId(), userStat.getDataCenterId(), userStat.getPublicIpAddress(), userStat.getDeviceId(),
userStat.getDeviceType(), userStat.getNetworkId());
}
hostAggregatedStat.setAggBytesSent(hostAggregatedStat.getAggBytesSent() + userStat.getAggBytesSent());
hostAggregatedStat.setAggBytesReceived(hostAggregatedStat.getAggBytesReceived() + userStat.getAggBytesReceived());
aggregatedStats.put(hostKey, hostAggregatedStat);
}
}
}
startIndex += 500;
} while ((userStats != null) && !userStats.isEmpty());
// loop over the user stats, create delta entries in the usage_network helper table
int numAcctsProcessed = 0;
usageNetworks.clear();
for (String key : aggregatedStats.keySet()) {
UsageNetworkVO currentNetworkStats = null;
if (networkStats != null) {
currentNetworkStats = networkStats.get(key);
}
createNetworkHelperEntry(aggregatedStats.get(key), currentNetworkStats, endDateMillis);
numAcctsProcessed++;
}
_usageNetworkDao.saveUsageNetworks(usageNetworks);
if (s_logger.isDebugEnabled()) {
s_logger.debug("created network stats helper entries for " + numAcctsProcessed + " accts");
}
// get vm disk stats in order to compute vm disk usage
vmDiskUsages = _usageVmDiskDao.getRecentVmDiskStats();
// Keep track of user stats for an account, across all of its public IPs
Map<String, VmDiskStatisticsVO> aggregatedDiskStats = new HashMap<String, VmDiskStatisticsVO>();
startIndex = 0;
do {
vmDiskStats = _vmDiskStatsDao.listActiveAndRecentlyDeleted(recentlyDeletedDate, startIndex, 500);
if (vmDiskUsages != null) {
for (VmDiskStatisticsVO vmDiskStat : vmDiskStats) {
if (vmDiskStat.getVmId() != null) {
String hostKey =
vmDiskStat.getDataCenterId() + "-" + vmDiskStat.getAccountId() + "-Vm-" + vmDiskStat.getVmId() + "-Disk-" + vmDiskStat.getVolumeId();
VmDiskStatisticsVO hostAggregatedStat = aggregatedDiskStats.get(hostKey);
if (hostAggregatedStat == null) {
hostAggregatedStat =
new VmDiskStatisticsVO(vmDiskStat.getAccountId(), vmDiskStat.getDataCenterId(), vmDiskStat.getVmId(), vmDiskStat.getVolumeId());
}
hostAggregatedStat.setAggIORead(hostAggregatedStat.getAggIORead() + vmDiskStat.getAggIORead());
hostAggregatedStat.setAggIOWrite(hostAggregatedStat.getAggIOWrite() + vmDiskStat.getAggIOWrite());
hostAggregatedStat.setAggBytesRead(hostAggregatedStat.getAggBytesRead() + vmDiskStat.getAggBytesRead());
hostAggregatedStat.setAggBytesWrite(hostAggregatedStat.getAggBytesWrite() + vmDiskStat.getAggBytesWrite());
aggregatedDiskStats.put(hostKey, hostAggregatedStat);
}
}
}
startIndex += 500;
} while ((userStats != null) && !userStats.isEmpty());
// loop over the user stats, create delta entries in the usage_disk helper table
numAcctsProcessed = 0;
usageVmDisks.clear();
for (String key : aggregatedDiskStats.keySet()) {
UsageVmDiskVO currentVmDiskStats = null;
if (vmDiskStats != null) {
currentVmDiskStats = vmDiskUsages.get(key);
}
createVmDiskHelperEntry(aggregatedDiskStats.get(key), currentVmDiskStats, endDateMillis);
numAcctsProcessed++;
}
_usageVmDiskDao.saveUsageVmDisks(usageVmDisks);
if (s_logger.isDebugEnabled()) {
s_logger.debug("created vm disk stats helper entries for " + numAcctsProcessed + " accts");
}
// commit the helper records, then start a new transaction
usageTxn.commit();
usageTxn.start();
boolean parsed = false;
numAcctsProcessed = 0;
Date currentStartDate = startDate;
Date currentEndDate = endDate;
Date tempDate = endDate;
Calendar aggregateCal = Calendar.getInstance(_usageTimezone);
while ((tempDate.after(startDate)) && ((tempDate.getTime() - startDate.getTime()) > 60000)) {
currentEndDate = tempDate;
aggregateCal.setTime(tempDate);
aggregateCal.add(Calendar.MINUTE, -_aggregationDuration);
tempDate = aggregateCal.getTime();
}
while (!currentEndDate.after(endDate) || (currentEndDate.getTime() - endDate.getTime() < 60000)) {
Long offset = Long.valueOf(0);
Long limit = Long.valueOf(500);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.listAll(filter);
if ((accounts != null) && !accounts.isEmpty()) {
for (AccountVO account : accounts) {
parsed = parseHelperTables(account, currentStartDate, currentEndDate);
numAcctsProcessed++;
}
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
if (s_logger.isDebugEnabled()) {
s_logger.debug("processed VM/Network Usage for " + numAcctsProcessed + " ACTIVE accts");
}
numAcctsProcessed = 0;
// reset offset
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findRecentlyDeletedAccounts(null, recentlyDeletedDate, filter);
if ((accounts != null) && !accounts.isEmpty()) {
for (AccountVO account : accounts) {
parsed = parseHelperTables(account, currentStartDate, currentEndDate);
List<Long> publicTemplates = _usageDao.listPublicTemplatesByAccount(account.getId());
for (Long templateId : publicTemplates) {
//mark public templates owned by deleted accounts as deleted
List<UsageStorageVO> storageVOs = _usageStorageDao.listById(account.getId(), templateId, StorageTypes.TEMPLATE);
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + templateId + " assigned to account: " + account.getId() +
"; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting template: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(account.getRemoved());
_usageStorageDao.update(storageVO);
}
}
numAcctsProcessed++;
}
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
currentStartDate = new Date(currentEndDate.getTime() + 1);
aggregateCal.setTime(currentEndDate);
aggregateCal.add(Calendar.MINUTE, _aggregationDuration);
currentEndDate = aggregateCal.getTime();
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("processed Usage for " + numAcctsProcessed + " RECENTLY DELETED accts");
}
// FIXME: we don't break the above loop if something fails to parse, so it gets reset every account,
// do we want to break out of processing accounts and rollback if there are errors?
if (!parsed) {
usageTxn.rollback();
} else {
success = true;
}
} catch (Exception ex) {
s_logger.error("Exception in usage manager", ex);
usageTxn.rollback();
} finally {
// everything seemed to work...set endDate as the last success date
_usageJobDao.updateJobSuccess(job.getId(), startDateMillis, endDateMillis, System.currentTimeMillis() - timeStart, success);
// create a new job if this is a recurring job
if (job.getJobType() == UsageJobVO.JOB_TYPE_RECURRING) {
_usageJobDao.createNewJob(_hostname, _pid, UsageJobVO.JOB_TYPE_RECURRING);
}
usageTxn.commit();
usageTxn.close();
// switch back to CLOUD_DB
TransactionLegacy swap = TransactionLegacy.open(TransactionLegacy.CLOUD_DB);
if (!success) {
_alertMgr.sendAlert(AlertManager.AlertType.ALERT_TYPE_USAGE_SERVER_RESULT, 0, new Long(0), "Usage job failed. Job id: " + job.getId(),
"Usage job failed. Job id: " + job.getId());
} else {
_alertMgr.clearAlert(AlertManager.AlertType.ALERT_TYPE_USAGE_SERVER_RESULT, 0, 0);
}
swap.close();
}
} catch (Exception e) {
s_logger.error("Usage Manager error", e);
}
} | @Override
public void parse(UsageJobVO job, long startDateMillis, long endDateMillis) {
boolean success = false;
long timeStart = System.currentTimeMillis();
try {
if ((endDateMillis == 0) || (endDateMillis > timeStart)) {
endDateMillis = timeStart;
}
long lastSuccess = _usageJobDao.getLastJobSuccessDateMillis();
if (lastSuccess != 0) {
startDateMillis = lastSuccess + 1;
}
if (startDateMillis >= endDateMillis) {
if (s_logger.isInfoEnabled()) {
s_logger.info("not parsing usage records since start time mills (" + startDateMillis + ") is on or after end time millis (" + endDateMillis + ")");
}
TransactionLegacy jobUpdateTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
try {
jobUpdateTxn.start();
_usageJobDao.updateJobSuccess(job.getId(), startDateMillis, endDateMillis, System.currentTimeMillis() - timeStart, success);
if (job.getJobType() == UsageJobVO.JOB_TYPE_RECURRING) {
_usageJobDao.createNewJob(_hostname, _pid, UsageJobVO.JOB_TYPE_RECURRING);
}
jobUpdateTxn.commit();
} finally {
jobUpdateTxn.close();
}
return;
}
Date startDate = new Date(startDateMillis);
Date endDate = new Date(endDateMillis);
if (s_logger.isInfoEnabled()) {
s_logger.info("Parsing usage records between " + startDate + " and " + endDate);
}
List<AccountVO> accounts = null;
List<UserStatisticsVO> userStats = null;
Map<String, UsageNetworkVO> networkStats = null;
List<VmDiskStatisticsVO> vmDiskStats = null;
Map<String, UsageVmDiskVO> vmDiskUsages = null;
TransactionLegacy userTxn = TransactionLegacy.open(TransactionLegacy.CLOUD_DB);
try {
Long limit = Long.valueOf(500);
Long offset = Long.valueOf(0);
Long lastAccountId = _usageDao.getLastAccountId();
if (lastAccountId == null) {
lastAccountId = Long.valueOf(0);
}
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findActiveAccounts(lastAccountId, filter);
if ((accounts != null) && !accounts.isEmpty()) {
_usageDao.updateAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findRecentlyDeletedAccounts(lastAccountId, startDate, filter);
if ((accounts != null) && !accounts.isEmpty()) {
_usageDao.updateAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findNewAccounts(lastAccountId, filter);
if ((accounts != null) && !accounts.isEmpty()) {
_usageDao.saveAccounts(accounts);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
offset = Long.valueOf(0);
Long lastUserStatsId = _usageDao.getLastUserStatsId();
if (lastUserStatsId == null) {
lastUserStatsId = Long.valueOf(0);
}
SearchCriteria<UserStatisticsVO> sc2 = _userStatsDao.createSearchCriteria();
sc2.addAnd("id", SearchCriteria.Op.LTEQ, lastUserStatsId);
do {
Filter filter = new Filter(UserStatisticsVO.class, "id", true, offset, limit);
userStats = _userStatsDao.search(sc2, filter);
if ((userStats != null) && !userStats.isEmpty()) {
_usageDao.updateUserStats(userStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((userStats != null) && !userStats.isEmpty());
offset = Long.valueOf(0);
sc2 = _userStatsDao.createSearchCriteria();
sc2.addAnd("id", SearchCriteria.Op.GT, lastUserStatsId);
do {
Filter filter = new Filter(UserStatisticsVO.class, "id", true, offset, limit);
userStats = _userStatsDao.search(sc2, filter);
if ((userStats != null) && !userStats.isEmpty()) {
_usageDao.saveUserStats(userStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((userStats != null) && !userStats.isEmpty());
offset = Long.valueOf(0);
Long lastVmDiskStatsId = _usageDao.getLastVmDiskStatsId();
if (lastVmDiskStatsId == null) {
lastVmDiskStatsId = Long.valueOf(0);
}
SearchCriteria<VmDiskStatisticsVO> sc4 = _vmDiskStatsDao.createSearchCriteria();
sc4.addAnd("id", SearchCriteria.Op.LTEQ, lastVmDiskStatsId);
do {
Filter filter = new Filter(VmDiskStatisticsVO.class, "id", true, offset, limit);
vmDiskStats = _vmDiskStatsDao.search(sc4, filter);
if ((vmDiskStats != null) && !vmDiskStats.isEmpty()) {
_usageDao.updateVmDiskStats(vmDiskStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((vmDiskStats != null) && !vmDiskStats.isEmpty());
offset = Long.valueOf(0);
sc4 = _vmDiskStatsDao.createSearchCriteria();
sc4.addAnd("id", SearchCriteria.Op.GT, lastVmDiskStatsId);
do {
Filter filter = new Filter(VmDiskStatisticsVO.class, "id", true, offset, limit);
vmDiskStats = _vmDiskStatsDao.search(sc4, filter);
if ((vmDiskStats != null) && !vmDiskStats.isEmpty()) {
_usageDao.saveVmDiskStats(vmDiskStats);
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((vmDiskStats != null) && !vmDiskStats.isEmpty());
} finally {
userTxn.close();
}
List<UsageEventVO> events = _usageEventDao.getRecentEvents(new Date(endDateMillis));
TransactionLegacy usageTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
try {
usageTxn.start();
if ((events != null) && (events.size() > 0)) {
Date oldestEventDate = events.get(0).getCreateDate();
if (oldestEventDate.getTime() < startDateMillis) {
startDateMillis = oldestEventDate.getTime();
startDate = new Date(startDateMillis);
}
for (UsageEventVO event : events) {
event.setProcessed(true);
_usageEventDao.update(event.getId(), event);
createHelperRecord(event);
}
}
networkStats = _usageNetworkDao.getRecentNetworkStats();
Calendar recentlyDeletedCal = Calendar.getInstance(_usageTimezone);
recentlyDeletedCal.setTimeInMillis(startDateMillis);
recentlyDeletedCal.add(Calendar.MINUTE, -1 * THREE_DAYS_IN_MINUTES);
Date recentlyDeletedDate = recentlyDeletedCal.getTime();
Map<String, UserStatisticsVO> aggregatedStats = new HashMap<String, UserStatisticsVO>();
int startIndex = 0;
do {
userStats = _userStatsDao.listActiveAndRecentlyDeleted(recentlyDeletedDate, startIndex, 500);
if (userStats != null) {
for (UserStatisticsVO userStat : userStats) {
if (userStat.getDeviceId() != null) {
String hostKey = userStat.getDataCenterId() + "-" + userStat.getAccountId() + "-Host-" + userStat.getDeviceId();
UserStatisticsVO hostAggregatedStat = aggregatedStats.get(hostKey);
if (hostAggregatedStat == null) {
hostAggregatedStat =
new UserStatisticsVO(userStat.getAccountId(), userStat.getDataCenterId(), userStat.getPublicIpAddress(), userStat.getDeviceId(),
userStat.getDeviceType(), userStat.getNetworkId());
}
hostAggregatedStat.setAggBytesSent(hostAggregatedStat.getAggBytesSent() + userStat.getAggBytesSent());
hostAggregatedStat.setAggBytesReceived(hostAggregatedStat.getAggBytesReceived() + userStat.getAggBytesReceived());
aggregatedStats.put(hostKey, hostAggregatedStat);
}
}
}
startIndex += 500;
} while ((userStats != null) && !userStats.isEmpty());
int numAcctsProcessed = 0;
usageNetworks.clear();
for (String key : aggregatedStats.keySet()) {
UsageNetworkVO currentNetworkStats = null;
if (networkStats != null) {
currentNetworkStats = networkStats.get(key);
}
createNetworkHelperEntry(aggregatedStats.get(key), currentNetworkStats, endDateMillis);
numAcctsProcessed++;
}
_usageNetworkDao.saveUsageNetworks(usageNetworks);
if (s_logger.isDebugEnabled()) {
s_logger.debug("created network stats helper entries for " + numAcctsProcessed + " accts");
}
vmDiskUsages = _usageVmDiskDao.getRecentVmDiskStats();
Map<String, VmDiskStatisticsVO> aggregatedDiskStats = new HashMap<String, VmDiskStatisticsVO>();
startIndex = 0;
do {
vmDiskStats = _vmDiskStatsDao.listActiveAndRecentlyDeleted(recentlyDeletedDate, startIndex, 500);
if (vmDiskUsages != null) {
for (VmDiskStatisticsVO vmDiskStat : vmDiskStats) {
if (vmDiskStat.getVmId() != null) {
String hostKey =
vmDiskStat.getDataCenterId() + "-" + vmDiskStat.getAccountId() + "-Vm-" + vmDiskStat.getVmId() + "-Disk-" + vmDiskStat.getVolumeId();
VmDiskStatisticsVO hostAggregatedStat = aggregatedDiskStats.get(hostKey);
if (hostAggregatedStat == null) {
hostAggregatedStat =
new VmDiskStatisticsVO(vmDiskStat.getAccountId(), vmDiskStat.getDataCenterId(), vmDiskStat.getVmId(), vmDiskStat.getVolumeId());
}
hostAggregatedStat.setAggIORead(hostAggregatedStat.getAggIORead() + vmDiskStat.getAggIORead());
hostAggregatedStat.setAggIOWrite(hostAggregatedStat.getAggIOWrite() + vmDiskStat.getAggIOWrite());
hostAggregatedStat.setAggBytesRead(hostAggregatedStat.getAggBytesRead() + vmDiskStat.getAggBytesRead());
hostAggregatedStat.setAggBytesWrite(hostAggregatedStat.getAggBytesWrite() + vmDiskStat.getAggBytesWrite());
aggregatedDiskStats.put(hostKey, hostAggregatedStat);
}
}
}
startIndex += 500;
} while ((userStats != null) && !userStats.isEmpty());
numAcctsProcessed = 0;
usageVmDisks.clear();
for (String key : aggregatedDiskStats.keySet()) {
UsageVmDiskVO currentVmDiskStats = null;
if (vmDiskStats != null) {
currentVmDiskStats = vmDiskUsages.get(key);
}
createVmDiskHelperEntry(aggregatedDiskStats.get(key), currentVmDiskStats, endDateMillis);
numAcctsProcessed++;
}
_usageVmDiskDao.saveUsageVmDisks(usageVmDisks);
if (s_logger.isDebugEnabled()) {
s_logger.debug("created vm disk stats helper entries for " + numAcctsProcessed + " accts");
}
usageTxn.commit();
usageTxn.start();
boolean parsed = false;
numAcctsProcessed = 0;
Date currentStartDate = startDate;
Date currentEndDate = endDate;
Date tempDate = endDate;
Calendar aggregateCal = Calendar.getInstance(_usageTimezone);
while ((tempDate.after(startDate)) && ((tempDate.getTime() - startDate.getTime()) > 60000)) {
currentEndDate = tempDate;
aggregateCal.setTime(tempDate);
aggregateCal.add(Calendar.MINUTE, -_aggregationDuration);
tempDate = aggregateCal.getTime();
}
while (!currentEndDate.after(endDate) || (currentEndDate.getTime() - endDate.getTime() < 60000)) {
Long offset = Long.valueOf(0);
Long limit = Long.valueOf(500);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.listAll(filter);
if ((accounts != null) && !accounts.isEmpty()) {
for (AccountVO account : accounts) {
parsed = parseHelperTables(account, currentStartDate, currentEndDate);
numAcctsProcessed++;
}
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
if (s_logger.isDebugEnabled()) {
s_logger.debug("processed VM/Network Usage for " + numAcctsProcessed + " ACTIVE accts");
}
numAcctsProcessed = 0;
offset = Long.valueOf(0);
do {
Filter filter = new Filter(AccountVO.class, "id", true, offset, limit);
accounts = _accountDao.findRecentlyDeletedAccounts(null, recentlyDeletedDate, filter);
if ((accounts != null) && !accounts.isEmpty()) {
for (AccountVO account : accounts) {
parsed = parseHelperTables(account, currentStartDate, currentEndDate);
List<Long> publicTemplates = _usageDao.listPublicTemplatesByAccount(account.getId());
for (Long templateId : publicTemplates) {
List<UsageStorageVO> storageVOs = _usageStorageDao.listById(account.getId(), templateId, StorageTypes.TEMPLATE);
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + templateId + " assigned to account: " + account.getId() +
"; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting template: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(account.getRemoved());
_usageStorageDao.update(storageVO);
}
}
numAcctsProcessed++;
}
}
offset = new Long(offset.longValue() + limit.longValue());
} while ((accounts != null) && !accounts.isEmpty());
currentStartDate = new Date(currentEndDate.getTime() + 1);
aggregateCal.setTime(currentEndDate);
aggregateCal.add(Calendar.MINUTE, _aggregationDuration);
currentEndDate = aggregateCal.getTime();
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("processed Usage for " + numAcctsProcessed + " RECENTLY DELETED accts");
}
if (!parsed) {
usageTxn.rollback();
} else {
success = true;
}
} catch (Exception ex) {
s_logger.error("Exception in usage manager", ex);
usageTxn.rollback();
} finally {
_usageJobDao.updateJobSuccess(job.getId(), startDateMillis, endDateMillis, System.currentTimeMillis() - timeStart, success);
if (job.getJobType() == UsageJobVO.JOB_TYPE_RECURRING) {
_usageJobDao.createNewJob(_hostname, _pid, UsageJobVO.JOB_TYPE_RECURRING);
}
usageTxn.commit();
usageTxn.close();
TransactionLegacy swap = TransactionLegacy.open(TransactionLegacy.CLOUD_DB);
if (!success) {
_alertMgr.sendAlert(AlertManager.AlertType.ALERT_TYPE_USAGE_SERVER_RESULT, 0, new Long(0), "Usage job failed. Job id: " + job.getId(),
"Usage job failed. Job id: " + job.getId());
} else {
_alertMgr.clearAlert(AlertManager.AlertType.ALERT_TYPE_USAGE_SERVER_RESULT, 0, 0);
}
swap.close();
}
} catch (Exception e) {
s_logger.error("Usage Manager error", e);
}
} | @override public void parse(usagejobvo job, long startdatemillis, long enddatemillis) { boolean success = false; long timestart = system.currenttimemillis(); try { if ((enddatemillis == 0) || (enddatemillis > timestart)) { enddatemillis = timestart; } long lastsuccess = _usagejobdao.getlastjobsuccessdatemillis(); if (lastsuccess != 0) { startdatemillis = lastsuccess + 1; } if (startdatemillis >= enddatemillis) { if (s_logger.isinfoenabled()) { s_logger.info("not parsing usage records since start time mills (" + startdatemillis + ") is on or after end time millis (" + enddatemillis + ")"); } transactionlegacy jobupdatetxn = transactionlegacy.open(transactionlegacy.usage_db); try { jobupdatetxn.start(); _usagejobdao.updatejobsuccess(job.getid(), startdatemillis, enddatemillis, system.currenttimemillis() - timestart, success); if (job.getjobtype() == usagejobvo.job_type_recurring) { _usagejobdao.createnewjob(_hostname, _pid, usagejobvo.job_type_recurring); } jobupdatetxn.commit(); } finally { jobupdatetxn.close(); } return; } date startdate = new date(startdatemillis); date enddate = new date(enddatemillis); if (s_logger.isinfoenabled()) { s_logger.info("parsing usage records between " + startdate + " and " + enddate); } list<accountvo> accounts = null; list<userstatisticsvo> userstats = null; map<string, usagenetworkvo> networkstats = null; list<vmdiskstatisticsvo> vmdiskstats = null; map<string, usagevmdiskvo> vmdiskusages = null; transactionlegacy usertxn = transactionlegacy.open(transactionlegacy.cloud_db); try { long limit = long.valueof(500); long offset = long.valueof(0); long lastaccountid = _usagedao.getlastaccountid(); if (lastaccountid == null) { lastaccountid = long.valueof(0); } do { filter filter = new filter(accountvo.class, "id", true, offset, limit); accounts = _accountdao.findactiveaccounts(lastaccountid, filter); if ((accounts != null) && !accounts.isempty()) { _usagedao.updateaccounts(accounts); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((accounts != null) && !accounts.isempty()); offset = long.valueof(0); do { filter filter = new filter(accountvo.class, "id", true, offset, limit); accounts = _accountdao.findrecentlydeletedaccounts(lastaccountid, startdate, filter); if ((accounts != null) && !accounts.isempty()) { _usagedao.updateaccounts(accounts); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((accounts != null) && !accounts.isempty()); offset = long.valueof(0); do { filter filter = new filter(accountvo.class, "id", true, offset, limit); accounts = _accountdao.findnewaccounts(lastaccountid, filter); if ((accounts != null) && !accounts.isempty()) { _usagedao.saveaccounts(accounts); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((accounts != null) && !accounts.isempty()); offset = long.valueof(0); long lastuserstatsid = _usagedao.getlastuserstatsid(); if (lastuserstatsid == null) { lastuserstatsid = long.valueof(0); } searchcriteria<userstatisticsvo> sc2 = _userstatsdao.createsearchcriteria(); sc2.addand("id", searchcriteria.op.lteq, lastuserstatsid); do { filter filter = new filter(userstatisticsvo.class, "id", true, offset, limit); userstats = _userstatsdao.search(sc2, filter); if ((userstats != null) && !userstats.isempty()) { _usagedao.updateuserstats(userstats); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((userstats != null) && !userstats.isempty()); offset = long.valueof(0); sc2 = _userstatsdao.createsearchcriteria(); sc2.addand("id", searchcriteria.op.gt, lastuserstatsid); do { filter filter = new filter(userstatisticsvo.class, "id", true, offset, limit); userstats = _userstatsdao.search(sc2, filter); if ((userstats != null) && !userstats.isempty()) { _usagedao.saveuserstats(userstats); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((userstats != null) && !userstats.isempty()); offset = long.valueof(0); long lastvmdiskstatsid = _usagedao.getlastvmdiskstatsid(); if (lastvmdiskstatsid == null) { lastvmdiskstatsid = long.valueof(0); } searchcriteria<vmdiskstatisticsvo> sc4 = _vmdiskstatsdao.createsearchcriteria(); sc4.addand("id", searchcriteria.op.lteq, lastvmdiskstatsid); do { filter filter = new filter(vmdiskstatisticsvo.class, "id", true, offset, limit); vmdiskstats = _vmdiskstatsdao.search(sc4, filter); if ((vmdiskstats != null) && !vmdiskstats.isempty()) { _usagedao.updatevmdiskstats(vmdiskstats); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((vmdiskstats != null) && !vmdiskstats.isempty()); offset = long.valueof(0); sc4 = _vmdiskstatsdao.createsearchcriteria(); sc4.addand("id", searchcriteria.op.gt, lastvmdiskstatsid); do { filter filter = new filter(vmdiskstatisticsvo.class, "id", true, offset, limit); vmdiskstats = _vmdiskstatsdao.search(sc4, filter); if ((vmdiskstats != null) && !vmdiskstats.isempty()) { _usagedao.savevmdiskstats(vmdiskstats); } offset = new long(offset.longvalue() + limit.longvalue()); } while ((vmdiskstats != null) && !vmdiskstats.isempty()); } finally { usertxn.close(); } list<usageeventvo> events = _usageeventdao.getrecentevents(new date(enddatemillis)); transactionlegacy usagetxn = transactionlegacy.open(transactionlegacy.usage_db); try { usagetxn.start(); if ((events != null) && (events.size() > 0)) { date oldesteventdate = events.get(0).getcreatedate(); if (oldesteventdate.gettime() < startdatemillis) { startdatemillis = oldesteventdate.gettime(); startdate = new date(startdatemillis); } for (usageeventvo event : events) { event.setprocessed(true); _usageeventdao.update(event.getid(), event); createhelperrecord(event); } } networkstats = _usagenetworkdao.getrecentnetworkstats(); calendar recentlydeletedcal = calendar.getinstance(_usagetimezone); recentlydeletedcal.settimeinmillis(startdatemillis); recentlydeletedcal.add(calendar.minute, -1 * three_days_in_minutes); date recentlydeleteddate = recentlydeletedcal.gettime(); map<string, userstatisticsvo> aggregatedstats = new hashmap<string, userstatisticsvo>(); int startindex = 0; do { userstats = _userstatsdao.listactiveandrecentlydeleted(recentlydeleteddate, startindex, 500); if (userstats != null) { for (userstatisticsvo userstat : userstats) { if (userstat.getdeviceid() != null) { string hostkey = userstat.getdatacenterid() + "-" + userstat.getaccountid() + "-host-" + userstat.getdeviceid(); userstatisticsvo hostaggregatedstat = aggregatedstats.get(hostkey); if (hostaggregatedstat == null) { hostaggregatedstat = new userstatisticsvo(userstat.getaccountid(), userstat.getdatacenterid(), userstat.getpublicipaddress(), userstat.getdeviceid(), userstat.getdevicetype(), userstat.getnetworkid()); } hostaggregatedstat.setaggbytessent(hostaggregatedstat.getaggbytessent() + userstat.getaggbytessent()); hostaggregatedstat.setaggbytesreceived(hostaggregatedstat.getaggbytesreceived() + userstat.getaggbytesreceived()); aggregatedstats.put(hostkey, hostaggregatedstat); } } } startindex += 500; } while ((userstats != null) && !userstats.isempty()); int numacctsprocessed = 0; usagenetworks.clear(); for (string key : aggregatedstats.keyset()) { usagenetworkvo currentnetworkstats = null; if (networkstats != null) { currentnetworkstats = networkstats.get(key); } createnetworkhelperentry(aggregatedstats.get(key), currentnetworkstats, enddatemillis); numacctsprocessed++; } _usagenetworkdao.saveusagenetworks(usagenetworks); if (s_logger.isdebugenabled()) { s_logger.debug("created network stats helper entries for " + numacctsprocessed + " accts"); } vmdiskusages = _usagevmdiskdao.getrecentvmdiskstats(); map<string, vmdiskstatisticsvo> aggregateddiskstats = new hashmap<string, vmdiskstatisticsvo>(); startindex = 0; do { vmdiskstats = _vmdiskstatsdao.listactiveandrecentlydeleted(recentlydeleteddate, startindex, 500); if (vmdiskusages != null) { for (vmdiskstatisticsvo vmdiskstat : vmdiskstats) { if (vmdiskstat.getvmid() != null) { string hostkey = vmdiskstat.getdatacenterid() + "-" + vmdiskstat.getaccountid() + "-vm-" + vmdiskstat.getvmid() + "-disk-" + vmdiskstat.getvolumeid(); vmdiskstatisticsvo hostaggregatedstat = aggregateddiskstats.get(hostkey); if (hostaggregatedstat == null) { hostaggregatedstat = new vmdiskstatisticsvo(vmdiskstat.getaccountid(), vmdiskstat.getdatacenterid(), vmdiskstat.getvmid(), vmdiskstat.getvolumeid()); } hostaggregatedstat.setaggioread(hostaggregatedstat.getaggioread() + vmdiskstat.getaggioread()); hostaggregatedstat.setaggiowrite(hostaggregatedstat.getaggiowrite() + vmdiskstat.getaggiowrite()); hostaggregatedstat.setaggbytesread(hostaggregatedstat.getaggbytesread() + vmdiskstat.getaggbytesread()); hostaggregatedstat.setaggbyteswrite(hostaggregatedstat.getaggbyteswrite() + vmdiskstat.getaggbyteswrite()); aggregateddiskstats.put(hostkey, hostaggregatedstat); } } } startindex += 500; } while ((userstats != null) && !userstats.isempty()); numacctsprocessed = 0; usagevmdisks.clear(); for (string key : aggregateddiskstats.keyset()) { usagevmdiskvo currentvmdiskstats = null; if (vmdiskstats != null) { currentvmdiskstats = vmdiskusages.get(key); } createvmdiskhelperentry(aggregateddiskstats.get(key), currentvmdiskstats, enddatemillis); numacctsprocessed++; } _usagevmdiskdao.saveusagevmdisks(usagevmdisks); if (s_logger.isdebugenabled()) { s_logger.debug("created vm disk stats helper entries for " + numacctsprocessed + " accts"); } usagetxn.commit(); usagetxn.start(); boolean parsed = false; numacctsprocessed = 0; date currentstartdate = startdate; date currentenddate = enddate; date tempdate = enddate; calendar aggregatecal = calendar.getinstance(_usagetimezone); while ((tempdate.after(startdate)) && ((tempdate.gettime() - startdate.gettime()) > 60000)) { currentenddate = tempdate; aggregatecal.settime(tempdate); aggregatecal.add(calendar.minute, -_aggregationduration); tempdate = aggregatecal.gettime(); } while (!currentenddate.after(enddate) || (currentenddate.gettime() - enddate.gettime() < 60000)) { long offset = long.valueof(0); long limit = long.valueof(500); do { filter filter = new filter(accountvo.class, "id", true, offset, limit); accounts = _accountdao.listall(filter); if ((accounts != null) && !accounts.isempty()) { for (accountvo account : accounts) { parsed = parsehelpertables(account, currentstartdate, currentenddate); numacctsprocessed++; } } offset = new long(offset.longvalue() + limit.longvalue()); } while ((accounts != null) && !accounts.isempty()); if (s_logger.isdebugenabled()) { s_logger.debug("processed vm/network usage for " + numacctsprocessed + " active accts"); } numacctsprocessed = 0; offset = long.valueof(0); do { filter filter = new filter(accountvo.class, "id", true, offset, limit); accounts = _accountdao.findrecentlydeletedaccounts(null, recentlydeleteddate, filter); if ((accounts != null) && !accounts.isempty()) { for (accountvo account : accounts) { parsed = parsehelpertables(account, currentstartdate, currentenddate); list<long> publictemplates = _usagedao.listpublictemplatesbyaccount(account.getid()); for (long templateid : publictemplates) { list<usagestoragevo> storagevos = _usagestoragedao.listbyid(account.getid(), templateid, storagetypes.template); if (storagevos.size() > 1) { s_logger.warn("more that one usage entry for storage: " + templateid + " assigned to account: " + account.getid() + "; marking them all as deleted..."); } for (usagestoragevo storagevo : storagevos) { if (s_logger.isdebugenabled()) { s_logger.debug("deleting template: " + storagevo.getid() + " from account: " + storagevo.getaccountid()); } storagevo.setdeleted(account.getremoved()); _usagestoragedao.update(storagevo); } } numacctsprocessed++; } } offset = new long(offset.longvalue() + limit.longvalue()); } while ((accounts != null) && !accounts.isempty()); currentstartdate = new date(currentenddate.gettime() + 1); aggregatecal.settime(currentenddate); aggregatecal.add(calendar.minute, _aggregationduration); currentenddate = aggregatecal.gettime(); } if (s_logger.isdebugenabled()) { s_logger.debug("processed usage for " + numacctsprocessed + " recently deleted accts"); } if (!parsed) { usagetxn.rollback(); } else { success = true; } } catch (exception ex) { s_logger.error("exception in usage manager", ex); usagetxn.rollback(); } finally { _usagejobdao.updatejobsuccess(job.getid(), startdatemillis, enddatemillis, system.currenttimemillis() - timestart, success); if (job.getjobtype() == usagejobvo.job_type_recurring) { _usagejobdao.createnewjob(_hostname, _pid, usagejobvo.job_type_recurring); } usagetxn.commit(); usagetxn.close(); transactionlegacy swap = transactionlegacy.open(transactionlegacy.cloud_db); if (!success) { _alertmgr.sendalert(alertmanager.alerttype.alert_type_usage_server_result, 0, new long(0), "usage job failed. job id: " + job.getid(), "usage job failed. job id: " + job.getid()); } else { _alertmgr.clearalert(alertmanager.alerttype.alert_type_usage_server_result, 0, 0); } swap.close(); } } catch (exception e) { s_logger.error("usage manager error", e); } } | serbaut/cloudstack | [
1,
1,
0,
0
] |
33,175 | @SuppressWarnings("unchecked")
public static <T> T[] toArray(List<T> list){
return (T[]) list.toArray();
} | @SuppressWarnings("unchecked")
public static <T> T[] toArray(List<T> list){
return (T[]) list.toArray();
} | @suppresswarnings("unchecked") public static <t> t[] toarray(list<t> list){ return (t[]) list.toarray(); } | sisbell/appia | [
1,
0,
0,
0
] |
408 | @SuppressWarnings("nls")
@Override
protected FilterResult doFilterInternal(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response = new IgnoreContentWrapper(response);
response.addHeader("P3P", "CP=\"CAO PSA OUR\"");
response.setHeader("X-Content-Type-Options", "nosniff");
// Chrome sucks.
// http://dev.equella.com/issues/8025
// http://dev.equella.com/issues/5612
String ua = request.getHeader("User-Agent");
if (ua != null && ua.contains("Chrome")) {
response.addHeader("X-XSS-Protection", "0");
} else {
response.setHeader("X-XSS-Protection", "1; mode=block");
}
if (stsMaxAge != -1) {
response.setHeader(
"Strict-Transport-Security", "max-age=" + stsMaxAge + "; includeSubDomains");
}
return new FilterResult(response);
} | @SuppressWarnings("nls")
@Override
protected FilterResult doFilterInternal(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response = new IgnoreContentWrapper(response);
response.addHeader("P3P", "CP=\"CAO PSA OUR\"");
response.setHeader("X-Content-Type-Options", "nosniff");
String ua = request.getHeader("User-Agent");
if (ua != null && ua.contains("Chrome")) {
response.addHeader("X-XSS-Protection", "0");
} else {
response.setHeader("X-XSS-Protection", "1; mode=block");
}
if (stsMaxAge != -1) {
response.setHeader(
"Strict-Transport-Security", "max-age=" + stsMaxAge + "; includeSubDomains");
}
return new FilterResult(response);
} | @suppresswarnings("nls") @override protected filterresult dofilterinternal(httpservletrequest request, httpservletresponse response) throws servletexception, ioexception { response = new ignorecontentwrapper(response); response.addheader("p3p", "cp=\"cao psa our\""); response.setheader("x-content-type-options", "nosniff"); string ua = request.getheader("user-agent"); if (ua != null && ua.contains("chrome")) { response.addheader("x-xss-protection", "0"); } else { response.setheader("x-xss-protection", "1; mode=block"); } if (stsmaxage != -1) { response.setheader( "strict-transport-security", "max-age=" + stsmaxage + "; includesubdomains"); } return new filterresult(response); } | rmathis/openEQUELLA | [
0,
0,
1,
0
] |
16,797 | public static Listener createListener(boolean ads,
io.envoyproxy.envoy.api.v2.core.ApiVersion rdsTransportVersion,
io.envoyproxy.envoy.api.v2.core.ApiVersion rdsResourceVersion, String listenerName,
int port, String routeName) {
ConfigSource.Builder configSourceBuilder = ConfigSource.newBuilder()
.setResourceApiVersion(rdsResourceVersion);
ConfigSource rdsSource = ads
? configSourceBuilder
.setAds(AggregatedConfigSource.getDefaultInstance())
.build()
: configSourceBuilder
.setApiConfigSource(ApiConfigSource.newBuilder()
.setApiType(ApiType.GRPC)
.setTransportApiVersion(rdsTransportVersion)
.addGrpcServices(GrpcService.newBuilder()
.setEnvoyGrpc(EnvoyGrpc.newBuilder()
.setClusterName(XDS_CLUSTER))))
.build();
HttpConnectionManager manager = HttpConnectionManager.newBuilder()
.setCodecType(CodecType.AUTO)
.setStatPrefix("http")
.setRds(Rds.newBuilder()
.setConfigSource(rdsSource)
.setRouteConfigName(routeName))
.addHttpFilters(HttpFilter.newBuilder()
.setName(Resources.FILTER_ENVOY_ROUTER))
.build();
return Listener.newBuilder()
.setName(listenerName)
.setAddress(Address.newBuilder()
.setSocketAddress(SocketAddress.newBuilder()
.setAddress(ANY_ADDRESS)
.setPortValue(port)
.setProtocol(Protocol.TCP)))
.addFilterChains(FilterChain.newBuilder()
.addFilters(Filter.newBuilder()
.setName(Resources.FILTER_HTTP_CONNECTION_MANAGER)
.setTypedConfig(Any.pack(manager))))
.build();
} | public static Listener createListener(boolean ads,
io.envoyproxy.envoy.api.v2.core.ApiVersion rdsTransportVersion,
io.envoyproxy.envoy.api.v2.core.ApiVersion rdsResourceVersion, String listenerName,
int port, String routeName) {
ConfigSource.Builder configSourceBuilder = ConfigSource.newBuilder()
.setResourceApiVersion(rdsResourceVersion);
ConfigSource rdsSource = ads
? configSourceBuilder
.setAds(AggregatedConfigSource.getDefaultInstance())
.build()
: configSourceBuilder
.setApiConfigSource(ApiConfigSource.newBuilder()
.setApiType(ApiType.GRPC)
.setTransportApiVersion(rdsTransportVersion)
.addGrpcServices(GrpcService.newBuilder()
.setEnvoyGrpc(EnvoyGrpc.newBuilder()
.setClusterName(XDS_CLUSTER))))
.build();
HttpConnectionManager manager = HttpConnectionManager.newBuilder()
.setCodecType(CodecType.AUTO)
.setStatPrefix("http")
.setRds(Rds.newBuilder()
.setConfigSource(rdsSource)
.setRouteConfigName(routeName))
.addHttpFilters(HttpFilter.newBuilder()
.setName(Resources.FILTER_ENVOY_ROUTER))
.build();
return Listener.newBuilder()
.setName(listenerName)
.setAddress(Address.newBuilder()
.setSocketAddress(SocketAddress.newBuilder()
.setAddress(ANY_ADDRESS)
.setPortValue(port)
.setProtocol(Protocol.TCP)))
.addFilterChains(FilterChain.newBuilder()
.addFilters(Filter.newBuilder()
.setName(Resources.FILTER_HTTP_CONNECTION_MANAGER)
.setTypedConfig(Any.pack(manager))))
.build();
} | public static listener createlistener(boolean ads, io.envoyproxy.envoy.api.v2.core.apiversion rdstransportversion, io.envoyproxy.envoy.api.v2.core.apiversion rdsresourceversion, string listenername, int port, string routename) { configsource.builder configsourcebuilder = configsource.newbuilder() .setresourceapiversion(rdsresourceversion); configsource rdssource = ads ? configsourcebuilder .setads(aggregatedconfigsource.getdefaultinstance()) .build() : configsourcebuilder .setapiconfigsource(apiconfigsource.newbuilder() .setapitype(apitype.grpc) .settransportapiversion(rdstransportversion) .addgrpcservices(grpcservice.newbuilder() .setenvoygrpc(envoygrpc.newbuilder() .setclustername(xds_cluster)))) .build(); httpconnectionmanager manager = httpconnectionmanager.newbuilder() .setcodectype(codectype.auto) .setstatprefix("http") .setrds(rds.newbuilder() .setconfigsource(rdssource) .setrouteconfigname(routename)) .addhttpfilters(httpfilter.newbuilder() .setname(resources.filter_envoy_router)) .build(); return listener.newbuilder() .setname(listenername) .setaddress(address.newbuilder() .setsocketaddress(socketaddress.newbuilder() .setaddress(any_address) .setportvalue(port) .setprotocol(protocol.tcp))) .addfilterchains(filterchain.newbuilder() .addfilters(filter.newbuilder() .setname(resources.filter_http_connection_manager) .settypedconfig(any.pack(manager)))) .build(); } | smtilden/java-control-plane | [
0,
0,
0,
0
] |
24,996 | @Override
public void performInitialization(Object model) {
setFieldBindingObjectPath(getBindingInfo().getBindingObjectPath());
super.performInitialization(model);
if (bindingInfo != null) {
bindingInfo.setDefaults(ViewLifecycle.getActiveLifecycle().getView(), getPropertyName());
}
// TODO: set object path for prototypes equal to the tree group object path?
} | @Override
public void performInitialization(Object model) {
setFieldBindingObjectPath(getBindingInfo().getBindingObjectPath());
super.performInitialization(model);
if (bindingInfo != null) {
bindingInfo.setDefaults(ViewLifecycle.getActiveLifecycle().getView(), getPropertyName());
}
} | @override public void performinitialization(object model) { setfieldbindingobjectpath(getbindinginfo().getbindingobjectpath()); super.performinitialization(model); if (bindinginfo != null) { bindinginfo.setdefaults(viewlifecycle.getactivelifecycle().getview(), getpropertyname()); } } | ricepanda/rice | [
0,
1,
0,
0
] |
25,027 | @Override
public Map<String, String> getOptionList(String id) {
// TODO: Return genre map
return super.getOptionList(id);
} | @Override
public Map<String, String> getOptionList(String id) {
return super.getOptionList(id);
} | @override public map<string, string> getoptionlist(string id) { return super.getoptionlist(id); } | stuckless/sagetv-phoenix-core | [
0,
1,
0,
0
] |
16,873 | private List<InferenceInOutSequence> getInputOutputAssets() throws IOException {
// TODO: Caching, don't read inputs for every inference
List<InferenceInOutSequence> inOutList =
getInputOutputAssets(mContext, mInputOutputAssets, mInputOutputDatasets);
Boolean lastGolden = null;
for (InferenceInOutSequence sequence : inOutList) {
mHasGoldenOutputs = sequence.hasGoldenOutput();
if (lastGolden == null) {
lastGolden = mHasGoldenOutputs;
} else {
if (lastGolden != mHasGoldenOutputs) {
throw new IllegalArgumentException(
"Some inputs for " + mModelName + " have outputs while some don't.");
}
}
}
return inOutList;
} | private List<InferenceInOutSequence> getInputOutputAssets() throws IOException {
List<InferenceInOutSequence> inOutList =
getInputOutputAssets(mContext, mInputOutputAssets, mInputOutputDatasets);
Boolean lastGolden = null;
for (InferenceInOutSequence sequence : inOutList) {
mHasGoldenOutputs = sequence.hasGoldenOutput();
if (lastGolden == null) {
lastGolden = mHasGoldenOutputs;
} else {
if (lastGolden != mHasGoldenOutputs) {
throw new IllegalArgumentException(
"Some inputs for " + mModelName + " have outputs while some don't.");
}
}
}
return inOutList;
} | private list<inferenceinoutsequence> getinputoutputassets() throws ioexception { list<inferenceinoutsequence> inoutlist = getinputoutputassets(mcontext, minputoutputassets, minputoutputdatasets); boolean lastgolden = null; for (inferenceinoutsequence sequence : inoutlist) { mhasgoldenoutputs = sequence.hasgoldenoutput(); if (lastgolden == null) { lastgolden = mhasgoldenoutputs; } else { if (lastgolden != mhasgoldenoutputs) { throw new illegalargumentexception( "some inputs for " + mmodelname + " have outputs while some don't."); } } } return inoutlist; } | riscv-android-src/platform-test-mlts-benchmark | [
0,
1,
0,
0
] |
16,874 | public static List<InferenceInOutSequence> getInputOutputAssets(Context context,
InferenceInOutSequence.FromAssets[] inputOutputAssets,
InferenceInOutSequence.FromDataset[] inputOutputDatasets) throws IOException {
// TODO: Caching, don't read inputs for every inference
List<InferenceInOutSequence> inOutList = new ArrayList<>();
if (inputOutputAssets != null) {
for (InferenceInOutSequence.FromAssets ioAsset : inputOutputAssets) {
inOutList.add(ioAsset.readAssets(context.getAssets()));
}
}
if (inputOutputDatasets != null) {
for (InferenceInOutSequence.FromDataset dataset : inputOutputDatasets) {
inOutList.addAll(dataset.readDataset(context.getAssets(), context.getCacheDir()));
}
}
return inOutList;
} | public static List<InferenceInOutSequence> getInputOutputAssets(Context context,
InferenceInOutSequence.FromAssets[] inputOutputAssets,
InferenceInOutSequence.FromDataset[] inputOutputDatasets) throws IOException {
List<InferenceInOutSequence> inOutList = new ArrayList<>();
if (inputOutputAssets != null) {
for (InferenceInOutSequence.FromAssets ioAsset : inputOutputAssets) {
inOutList.add(ioAsset.readAssets(context.getAssets()));
}
}
if (inputOutputDatasets != null) {
for (InferenceInOutSequence.FromDataset dataset : inputOutputDatasets) {
inOutList.addAll(dataset.readDataset(context.getAssets(), context.getCacheDir()));
}
}
return inOutList;
} | public static list<inferenceinoutsequence> getinputoutputassets(context context, inferenceinoutsequence.fromassets[] inputoutputassets, inferenceinoutsequence.fromdataset[] inputoutputdatasets) throws ioexception { list<inferenceinoutsequence> inoutlist = new arraylist<>(); if (inputoutputassets != null) { for (inferenceinoutsequence.fromassets ioasset : inputoutputassets) { inoutlist.add(ioasset.readassets(context.getassets())); } } if (inputoutputdatasets != null) { for (inferenceinoutsequence.fromdataset dataset : inputoutputdatasets) { inoutlist.addall(dataset.readdataset(context.getassets(), context.getcachedir())); } } return inoutlist; } | riscv-android-src/platform-test-mlts-benchmark | [
0,
1,
0,
0
] |
33,261 | @Test
public void testJMeterJtlFileWithTransactionsAndTransactionsBug() throws Exception {
String[] runArgs = {
"--report.dir",
temporaryFolder.getRoot().getPath(),
"jmeter",
"--report-logline-type",
"transaction",
"-gt",
"-gh",
"-gr",
"-gp",
"-group-by-http-status",
"src/test/resources/jmeter/result_with_url_and_sample_transaction_lines_with_bug.jtl"
};
String result = LogRaterRunTestUtil.getOutputFromLogRater(runArgs);
System.out.printf("[%s]%n", result);
// sampler samples (e.g. http request)
assertFalse("Contains 1 GET Jaw Kat", result.contains("GET Jaw Kat,200,1"));
assertFalse("Contains 1 GET Jaw Yaw", result.contains("GET Jaw Yaw,200,1"));
// transaction controller samples
assertTrue("Contains 1 cog", result.contains("cog,200,1"));
assertTrue("Contains 1 som", result.contains("som,200,1"));
// buggy transaction controller samples, are classified as transactions because URL column is available
assertTrue("Contains 3 ane transactions", result.contains("ane,204,3"));
assertTrue("The files contain 5 lines of which 0 failure lines.", result.contains("total-counter-success-total,5,0"));
} | @Test
public void testJMeterJtlFileWithTransactionsAndTransactionsBug() throws Exception {
String[] runArgs = {
"--report.dir",
temporaryFolder.getRoot().getPath(),
"jmeter",
"--report-logline-type",
"transaction",
"-gt",
"-gh",
"-gr",
"-gp",
"-group-by-http-status",
"src/test/resources/jmeter/result_with_url_and_sample_transaction_lines_with_bug.jtl"
};
String result = LogRaterRunTestUtil.getOutputFromLogRater(runArgs);
System.out.printf("[%s]%n", result);
assertFalse("Contains 1 GET Jaw Kat", result.contains("GET Jaw Kat,200,1"));
assertFalse("Contains 1 GET Jaw Yaw", result.contains("GET Jaw Yaw,200,1"));
assertTrue("Contains 1 cog", result.contains("cog,200,1"));
assertTrue("Contains 1 som", result.contains("som,200,1"));
assertTrue("Contains 3 ane transactions", result.contains("ane,204,3"));
assertTrue("The files contain 5 lines of which 0 failure lines.", result.contains("total-counter-success-total,5,0"));
} | @test public void testjmeterjtlfilewithtransactionsandtransactionsbug() throws exception { string[] runargs = { "--report.dir", temporaryfolder.getroot().getpath(), "jmeter", "--report-logline-type", "transaction", "-gt", "-gh", "-gr", "-gp", "-group-by-http-status", "src/test/resources/jmeter/result_with_url_and_sample_transaction_lines_with_bug.jtl" }; string result = lograterruntestutil.getoutputfromlograter(runargs); system.out.printf("[%s]%n", result); assertfalse("contains 1 get jaw kat", result.contains("get jaw kat,200,1")); assertfalse("contains 1 get jaw yaw", result.contains("get jaw yaw,200,1")); asserttrue("contains 1 cog", result.contains("cog,200,1")); asserttrue("contains 1 som", result.contains("som,200,1")); asserttrue("contains 3 ane transactions", result.contains("ane,204,3")); asserttrue("the files contain 5 lines of which 0 failure lines.", result.contains("total-counter-success-total,5,0")); } | stokpop/lograter | [
0,
0,
0,
0
] |
33,268 | @Override
public CustomProfile extractUserProfile(User user) throws InvalidDefinitionException {
// fetch custom attributes
List<UserAttributes> userAttributes = user.getAttributes().stream()
.filter(ua -> !ua.getIdentifier().startsWith("aac."))
.collect(Collectors.toList());
// fetch identities
Collection<UserIdentity> identities = user.getIdentities();
if (identities.isEmpty()) {
return extract(userAttributes);
}
// TODO decide how to merge identities into a single profile
// for now get first identity, should be last logged in
UserIdentity id = identities.iterator().next();
CustomProfile profile = extract(mergeAttributes(userAttributes, id.getAttributes()));
return profile;
} | @Override
public CustomProfile extractUserProfile(User user) throws InvalidDefinitionException {
List<UserAttributes> userAttributes = user.getAttributes().stream()
.filter(ua -> !ua.getIdentifier().startsWith("aac."))
.collect(Collectors.toList());
Collection<UserIdentity> identities = user.getIdentities();
if (identities.isEmpty()) {
return extract(userAttributes);
}
UserIdentity id = identities.iterator().next();
CustomProfile profile = extract(mergeAttributes(userAttributes, id.getAttributes()));
return profile;
} | @override public customprofile extractuserprofile(user user) throws invaliddefinitionexception { list<userattributes> userattributes = user.getattributes().stream() .filter(ua -> !ua.getidentifier().startswith("aac.")) .collect(collectors.tolist()); collection<useridentity> identities = user.getidentities(); if (identities.isempty()) { return extract(userattributes); } useridentity id = identities.iterator().next(); customprofile profile = extract(mergeattributes(userattributes, id.getattributes())); return profile; } | smartcommunitylab/AAC | [
1,
0,
0,
0
] |
16,896 | public TypeUse getTypeUse(XSSimpleType owner) {
if(typeUse!=null)
return typeUse;
JCodeModel cm = getCodeModel();
JDefinedClass a;
try {
a = cm._class(adapter);
a.hide(); // we assume this is given by the user
a._extends(cm.ref(XmlAdapter.class).narrow(String.class).narrow(
cm.ref(type)));
} catch (JClassAlreadyExistsException e) {
a = e.getExistingClass();
}
// TODO: it's not correct to say that it adapts from String,
// but OTOH I don't think we can compute that.
typeUse = TypeUseFactory.adapt(
CBuiltinLeafInfo.STRING,
new CAdapter(a));
return typeUse;
} | public TypeUse getTypeUse(XSSimpleType owner) {
if(typeUse!=null)
return typeUse;
JCodeModel cm = getCodeModel();
JDefinedClass a;
try {
a = cm._class(adapter);
a.hide();
a._extends(cm.ref(XmlAdapter.class).narrow(String.class).narrow(
cm.ref(type)));
} catch (JClassAlreadyExistsException e) {
a = e.getExistingClass();
}
typeUse = TypeUseFactory.adapt(
CBuiltinLeafInfo.STRING,
new CAdapter(a));
return typeUse;
} | public typeuse gettypeuse(xssimpletype owner) { if(typeuse!=null) return typeuse; jcodemodel cm = getcodemodel(); jdefinedclass a; try { a = cm._class(adapter); a.hide(); a._extends(cm.ref(xmladapter.class).narrow(string.class).narrow( cm.ref(type))); } catch (jclassalreadyexistsexception e) { a = e.getexistingclass(); } typeuse = typeusefactory.adapt( cbuiltinleafinfo.string, new cadapter(a)); return typeuse; } | renshuaibing-aaron/jdk1.8-source-analysis | [
1,
0,
0,
0
] |
8,710 | public EntityIdentifier[] searchForGroups(
final String query,
final SearchMethod method,
@SuppressWarnings("unchecked") final Class leaftype) {
// only search for groups
if (leaftype != IPerson.class) {
return new EntityIdentifier[] {};
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Searching Grouper for groups matching query: " + query);
}
// result groups.
List<EntityIdentifier> groups = new ArrayList<EntityIdentifier>();
try {
// TODO: searches need to be performed against the group display
// name rather than the group key
GcFindGroups groupSearch = new GcFindGroups();
WsQueryFilter filter = new WsQueryFilter();
// is this an exact search or fuzzy
if ((method == SearchMethod.DISCRETE_CI) || (method == SearchMethod.DISCRETE)) {
filter.setQueryFilterType("FIND_BY_GROUP_NAME_EXACT");
} else {
filter.setQueryFilterType("FIND_BY_GROUP_NAME_APPROXIMATE");
}
filter.setGroupName(query);
groupSearch.assignQueryFilter(filter);
WsFindGroupsResults results = groupSearch.execute();
if (results != null && results.getGroupResults() != null) {
for (WsGroup g : results.getGroupResults()) {
if (validKey(g.getName())) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Retrieved group: " + g.getName());
}
groups.add(new EntityIdentifier(g.getName(), IEntityGroup.class));
}
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Returning " + groups.size() + " results for query " + query);
}
return groups.toArray(new EntityIdentifier[groups.size()]);
} catch (Exception e) {
LOGGER.warn(
"Exception while attempting to retrieve "
+ "search results for query "
+ query
+ " and entity type "
+ leaftype.getCanonicalName()
+ " : "
+ e.getMessage());
return new EntityIdentifier[] {};
}
} | public EntityIdentifier[] searchForGroups(
final String query,
final SearchMethod method,
@SuppressWarnings("unchecked") final Class leaftype) {
if (leaftype != IPerson.class) {
return new EntityIdentifier[] {};
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Searching Grouper for groups matching query: " + query);
}
List<EntityIdentifier> groups = new ArrayList<EntityIdentifier>();
try {
GcFindGroups groupSearch = new GcFindGroups();
WsQueryFilter filter = new WsQueryFilter();
if ((method == SearchMethod.DISCRETE_CI) || (method == SearchMethod.DISCRETE)) {
filter.setQueryFilterType("FIND_BY_GROUP_NAME_EXACT");
} else {
filter.setQueryFilterType("FIND_BY_GROUP_NAME_APPROXIMATE");
}
filter.setGroupName(query);
groupSearch.assignQueryFilter(filter);
WsFindGroupsResults results = groupSearch.execute();
if (results != null && results.getGroupResults() != null) {
for (WsGroup g : results.getGroupResults()) {
if (validKey(g.getName())) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Retrieved group: " + g.getName());
}
groups.add(new EntityIdentifier(g.getName(), IEntityGroup.class));
}
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Returning " + groups.size() + " results for query " + query);
}
return groups.toArray(new EntityIdentifier[groups.size()]);
} catch (Exception e) {
LOGGER.warn(
"Exception while attempting to retrieve "
+ "search results for query "
+ query
+ " and entity type "
+ leaftype.getCanonicalName()
+ " : "
+ e.getMessage());
return new EntityIdentifier[] {};
}
} | public entityidentifier[] searchforgroups( final string query, final searchmethod method, @suppresswarnings("unchecked") final class leaftype) { if (leaftype != iperson.class) { return new entityidentifier[] {}; } if (logger.isdebugenabled()) { logger.debug("searching grouper for groups matching query: " + query); } list<entityidentifier> groups = new arraylist<entityidentifier>(); try { gcfindgroups groupsearch = new gcfindgroups(); wsqueryfilter filter = new wsqueryfilter(); if ((method == searchmethod.discrete_ci) || (method == searchmethod.discrete)) { filter.setqueryfiltertype("find_by_group_name_exact"); } else { filter.setqueryfiltertype("find_by_group_name_approximate"); } filter.setgroupname(query); groupsearch.assignqueryfilter(filter); wsfindgroupsresults results = groupsearch.execute(); if (results != null && results.getgroupresults() != null) { for (wsgroup g : results.getgroupresults()) { if (validkey(g.getname())) { if (logger.istraceenabled()) { logger.trace("retrieved group: " + g.getname()); } groups.add(new entityidentifier(g.getname(), ientitygroup.class)); } } } if (logger.isdebugenabled()) { logger.debug("returning " + groups.size() + " results for query " + query); } return groups.toarray(new entityidentifier[groups.size()]); } catch (exception e) { logger.warn( "exception while attempting to retrieve " + "search results for query " + query + " and entity type " + leaftype.getcanonicalname() + " : " + e.getmessage()); return new entityidentifier[] {}; } } | shreeshreee/Uport | [
1,
0,
0,
0
] |
33,289 | @Override
public void run(RunNotifier runNotifier) {
runNotifier.fireTestRunStarted(topLevelDesc);
for (SelfTest selfTest : selfTests) {
for (SelfTestCase selfTestCase : selfTest.getSelfTestCases()) {
Description currentDesc = createTestCaseDescription(selfTestCase);
runNotifier.fireTestStarted(currentDesc);
try {
selfTest.runTest(selfTestCase);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException
| AssertionError e) {
runNotifier.fireTestFailure(new Failure(currentDesc, e));
}
runNotifier.fireTestFinished(currentDesc);
}
}
// TODO: Fill in the Result instance appropriately.
runNotifier.fireTestRunFinished(new Result());
} | @Override
public void run(RunNotifier runNotifier) {
runNotifier.fireTestRunStarted(topLevelDesc);
for (SelfTest selfTest : selfTests) {
for (SelfTestCase selfTestCase : selfTest.getSelfTestCases()) {
Description currentDesc = createTestCaseDescription(selfTestCase);
runNotifier.fireTestStarted(currentDesc);
try {
selfTest.runTest(selfTestCase);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException
| AssertionError e) {
runNotifier.fireTestFailure(new Failure(currentDesc, e));
}
runNotifier.fireTestFinished(currentDesc);
}
}
runNotifier.fireTestRunFinished(new Result());
} | @override public void run(runnotifier runnotifier) { runnotifier.firetestrunstarted(topleveldesc); for (selftest selftest : selftests) { for (selftestcase selftestcase : selftest.getselftestcases()) { description currentdesc = createtestcasedescription(selftestcase); runnotifier.fireteststarted(currentdesc); try { selftest.runtest(selftestcase); } catch (illegalaccessexception | illegalargumentexception | invocationtargetexception | assertionerror e) { runnotifier.firetestfailure(new failure(currentdesc, e)); } runnotifier.firetestfinished(currentdesc); } } runnotifier.firetestrunfinished(new result()); } | scottmcmaster/blogcode | [
0,
1,
0,
0
] |
8,757 | @Override
public void notifyChildrenChanged(final RemoteUserInfo remoteUserInfo,
final String parentId, final Bundle options) {
// TODO(Post-P): Need a way to notify to a specific browser in framework.
notifyChildrenChangedForCompat(remoteUserInfo, parentId, options);
} | @Override
public void notifyChildrenChanged(final RemoteUserInfo remoteUserInfo,
final String parentId, final Bundle options) {
notifyChildrenChangedForCompat(remoteUserInfo, parentId, options);
} | @override public void notifychildrenchanged(final remoteuserinfo remoteuserinfo, final string parentid, final bundle options) { notifychildrenchangedforcompat(remoteuserinfo, parentid, options); } | svasilinets/androidx | [
1,
0,
0,
0
] |