id
int64 22
34.9k
| original_code
stringlengths 31
107k
| code_wo_comment
stringlengths 29
77.3k
| cleancode
stringlengths 25
62.1k
| repo
stringlengths 6
65
| label
sequencelengths 4
4
|
---|---|---|---|---|---|
34,902 | public static int[] parseJavaVersion(final String jdkVer) {
final int p0, p1;
try {
String[] parts = jdkVer.trim().split("[^0-9\\.]");//grab only number groups and "."
parts = parts[0].split("\\."); //split out the number groups
p0 = Integer.parseInt(parts[0]); //the first number group
p1 = (parts.length > 1) ? Integer.parseInt(parts[1]) : 0; //2nd number group, or 0
} catch (final NumberFormatException | ArrayIndexOutOfBoundsException e) {
throw new IllegalArgumentException("Improper Java -version string: " + jdkVer + "\n" + e);
}
//checkJavaVersion(jdkVer, p0, p1); //TODO Optional to omit this.
return new int[] {p0, p1};
} | public static int[] parseJavaVersion(final String jdkVer) {
final int p0, p1;
try {
String[] parts = jdkVer.trim().split("[^0-9\\.]")
parts = parts[0].split("\\.");
p0 = Integer.parseInt(parts[0]);
p1 = (parts.length > 1) ? Integer.parseInt(parts[1]) : 0;
} catch (final NumberFormatException | ArrayIndexOutOfBoundsException e) {
throw new IllegalArgumentException("Improper Java -version string: " + jdkVer + "\n" + e);
}
return new int[] {p0, p1};
} | public static int[] parsejavaversion(final string jdkver) { final int p0, p1; try { string[] parts = jdkver.trim().split("[^0-9\\.]") parts = parts[0].split("\\."); p0 = integer.parseint(parts[0]); p1 = (parts.length > 1) ? integer.parseint(parts[1]) : 0; } catch (final numberformatexception | arrayindexoutofboundsexception e) { throw new illegalargumentexception("improper java -version string: " + jdkver + "\n" + e); } return new int[] {p0, p1}; } | apache/incubator-datasketches-memory | [
1,
0,
0,
0
] |
34,920 | @Deprecated
public static float getScreenPixelRatio() {
//TODO replace with API from Java 9 once that is released
if (Platform.isMac()) {
Toolkit toolkit = Toolkit.getDefaultToolkit();
Object contentScaleFactor = toolkit.getDesktopProperty("apple.awt.contentScaleFactor");
if (contentScaleFactor instanceof Number) {
return ((Number) contentScaleFactor).floatValue();
} else {
return 1.0f;
}
} else {
return 1.0f;
}
} | @Deprecated
public static float getScreenPixelRatio() {
if (Platform.isMac()) {
Toolkit toolkit = Toolkit.getDefaultToolkit();
Object contentScaleFactor = toolkit.getDesktopProperty("apple.awt.contentScaleFactor");
if (contentScaleFactor instanceof Number) {
return ((Number) contentScaleFactor).floatValue();
} else {
return 1.0f;
}
} else {
return 1.0f;
}
} | @deprecated public static float getscreenpixelratio() { if (platform.ismac()) { toolkit toolkit = toolkit.getdefaulttoolkit(); object contentscalefactor = toolkit.getdesktopproperty("apple.awt.contentscalefactor"); if (contentscalefactor instanceof number) { return ((number) contentscalefactor).floatvalue(); } else { return 1.0f; } } else { return 1.0f; } } | colorizenl/colorize-java-commons | [
1,
0,
0,
0
] |
10,429 | @Override
public Object onExecute(ExecutionEvent event) throws ExecutionException {
project = PluginUtil.getSelectedProject();
try {
if (project == null) {
throw new Exception(Constant.ERROR_NO_SELECTED_PROJECT);
}
basePath = project.getLocation().toString();
if (MavenUtils.isMavenProject(project)) {
destinationPath = MavenUtils.getTargetPath(project);
} else {
destinationPath = Paths.get(basePath, Constant.DOCKERFILE_FOLDER, project.getName() + ".war")
.normalize().toString();
}
// Stop running container
String runningContainerId = DockerRuntime.getInstance().getRunningContainerId(basePath);
if (runningContainerId != null) {
boolean stop = MessageDialog.openConfirm(PluginUtil.getParentShell(), "Confirmation",
Constant.MESSAGE_CONFIRM_STOP_CONTAINER);
if (stop) {
DockerRuntime.getInstance().cleanRuningContainer(basePath);
} else {
return null;
}
}
// Build artifact
ConsoleLogger.info(String.format(Constant.MESSAGE_EXPORTING_PROJECT, destinationPath));
if (MavenUtils.isMavenProject(project)) {
MavenExecuteAction action = new MavenExecuteAction(MAVEN_GOALS);
IContainer container;
container = MavenUtils.getPomFile(project).getParent();
action.launch(container, () -> {
// TODO: callback after mvn package done. IMPORTANT
buildAndRun(event);
return null;
});
} else {
WarUtil.export(project, destinationPath);
buildAndRun(event);
}
} catch (Exception e) {
e.printStackTrace();
ConsoleLogger.error(String.format(Constant.ERROR_RUNNING_DOCKER, e.getMessage()));
sendTelemetryOnException(event, e);
}
return null;
} | @Override
public Object onExecute(ExecutionEvent event) throws ExecutionException {
project = PluginUtil.getSelectedProject();
try {
if (project == null) {
throw new Exception(Constant.ERROR_NO_SELECTED_PROJECT);
}
basePath = project.getLocation().toString();
if (MavenUtils.isMavenProject(project)) {
destinationPath = MavenUtils.getTargetPath(project);
} else {
destinationPath = Paths.get(basePath, Constant.DOCKERFILE_FOLDER, project.getName() + ".war")
.normalize().toString();
}
String runningContainerId = DockerRuntime.getInstance().getRunningContainerId(basePath);
if (runningContainerId != null) {
boolean stop = MessageDialog.openConfirm(PluginUtil.getParentShell(), "Confirmation",
Constant.MESSAGE_CONFIRM_STOP_CONTAINER);
if (stop) {
DockerRuntime.getInstance().cleanRuningContainer(basePath);
} else {
return null;
}
}
ConsoleLogger.info(String.format(Constant.MESSAGE_EXPORTING_PROJECT, destinationPath));
if (MavenUtils.isMavenProject(project)) {
MavenExecuteAction action = new MavenExecuteAction(MAVEN_GOALS);
IContainer container;
container = MavenUtils.getPomFile(project).getParent();
action.launch(container, () -> {
buildAndRun(event);
return null;
});
} else {
WarUtil.export(project, destinationPath);
buildAndRun(event);
}
} catch (Exception e) {
e.printStackTrace();
ConsoleLogger.error(String.format(Constant.ERROR_RUNNING_DOCKER, e.getMessage()));
sendTelemetryOnException(event, e);
}
return null;
} | @override public object onexecute(executionevent event) throws executionexception { project = pluginutil.getselectedproject(); try { if (project == null) { throw new exception(constant.error_no_selected_project); } basepath = project.getlocation().tostring(); if (mavenutils.ismavenproject(project)) { destinationpath = mavenutils.gettargetpath(project); } else { destinationpath = paths.get(basepath, constant.dockerfile_folder, project.getname() + ".war") .normalize().tostring(); } string runningcontainerid = dockerruntime.getinstance().getrunningcontainerid(basepath); if (runningcontainerid != null) { boolean stop = messagedialog.openconfirm(pluginutil.getparentshell(), "confirmation", constant.message_confirm_stop_container); if (stop) { dockerruntime.getinstance().cleanruningcontainer(basepath); } else { return null; } } consolelogger.info(string.format(constant.message_exporting_project, destinationpath)); if (mavenutils.ismavenproject(project)) { mavenexecuteaction action = new mavenexecuteaction(maven_goals); icontainer container; container = mavenutils.getpomfile(project).getparent(); action.launch(container, () -> { buildandrun(event); return null; }); } else { warutil.export(project, destinationpath); buildandrun(event); } } catch (exception e) { e.printstacktrace(); consolelogger.error(string.format(constant.error_running_docker, e.getmessage())); sendtelemetryonexception(event, e); } return null; } | andxu/azure-tools-for-java | [
0,
1,
0,
0
] |
2,322 | public ValueInstantiator createOptimized()
{
/* [Issue#11]: Need to avoid optimizing if we use delegate- or
* property-based creators.
*/
if (_originalInstantiator.canCreateFromObjectWith()
|| _originalInstantiator.canCreateUsingDelegate()) {
return null;
}
// for now, only consider need to handle default creator
AnnotatedWithParams defaultCreator = _originalInstantiator.getDefaultCreator();
if (defaultCreator != null) {
AnnotatedElement elem = defaultCreator.getAnnotated();
if (elem instanceof Constructor<?>) {
// First things first: as per [Issue#34], can NOT access private ctors or methods
Constructor<?> ctor = (Constructor<?>) elem;
if (!Modifier.isPrivate(ctor.getModifiers())) {
return createSubclass(ctor, null).with(_originalInstantiator);
}
} else if (elem instanceof Method) {
Method m = (Method) elem;
int mods = m.getModifiers();
// and as above, can't access private ones
if (Modifier.isStatic(mods) && !Modifier.isPrivate(mods)) {
return createSubclass(null, m).with(_originalInstantiator);
}
}
}
return null;
} | public ValueInstantiator createOptimized()
{
if (_originalInstantiator.canCreateFromObjectWith()
|| _originalInstantiator.canCreateUsingDelegate()) {
return null;
}
AnnotatedWithParams defaultCreator = _originalInstantiator.getDefaultCreator();
if (defaultCreator != null) {
AnnotatedElement elem = defaultCreator.getAnnotated();
if (elem instanceof Constructor<?>) {
Constructor<?> ctor = (Constructor<?>) elem;
if (!Modifier.isPrivate(ctor.getModifiers())) {
return createSubclass(ctor, null).with(_originalInstantiator);
}
} else if (elem instanceof Method) {
Method m = (Method) elem;
int mods = m.getModifiers();
if (Modifier.isStatic(mods) && !Modifier.isPrivate(mods)) {
return createSubclass(null, m).with(_originalInstantiator);
}
}
}
return null;
} | public valueinstantiator createoptimized() { if (_originalinstantiator.cancreatefromobjectwith() || _originalinstantiator.cancreateusingdelegate()) { return null; } annotatedwithparams defaultcreator = _originalinstantiator.getdefaultcreator(); if (defaultcreator != null) { annotatedelement elem = defaultcreator.getannotated(); if (elem instanceof constructor<?>) { constructor<?> ctor = (constructor<?>) elem; if (!modifier.isprivate(ctor.getmodifiers())) { return createsubclass(ctor, null).with(_originalinstantiator); } } else if (elem instanceof method) { method m = (method) elem; int mods = m.getmodifiers(); if (modifier.isstatic(mods) && !modifier.isprivate(mods)) { return createsubclass(null, m).with(_originalinstantiator); } } } return null; } | bsanchezb/jackson-modules-base | [
1,
0,
0,
0
] |
10,679 | @Override
public void flush(long timeoutMillis) {
// TODO: Flush transport
} | @Override
public void flush(long timeoutMillis) {
} | @override public void flush(long timeoutmillis) { } | bsergean/sentry-android | [
0,
1,
0,
0
] |
18,946 | protected boolean startsWithReferenceAnchor(TextLine line) {
if (line == null) {
return false;
}
String text = line.getText();
if (text == null) {
return false;
}
return REFERENCE_ANCHOR.matcher(text).find();
} | protected boolean startsWithReferenceAnchor(TextLine line) {
if (line == null) {
return false;
}
String text = line.getText();
if (text == null) {
return false;
}
return REFERENCE_ANCHOR.matcher(text).find();
} | protected boolean startswithreferenceanchor(textline line) { if (line == null) { return false; } string text = line.gettext(); if (text == null) { return false; } return reference_anchor.matcher(text).find(); } | ckorzen/pdfact | [
0,
1,
0,
0
] |
10,779 | public Future<?> greetMeSometimeAsync(String requestType,
AsyncHandler<GreetMeSometimeResponse> asyncHandler) {
return null;
/*not called */
} | public Future<?> greetMeSometimeAsync(String requestType,
AsyncHandler<GreetMeSometimeResponse> asyncHandler) {
return null;
} | public future<?> greetmesometimeasync(string requesttype, asynchandler<greetmesometimeresponse> asynchandler) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,780 | public Response<GreetMeSometimeResponse> greetMeSometimeAsync(String requestType) {
return null;
/*not called */
} | public Response<GreetMeSometimeResponse> greetMeSometimeAsync(String requestType) {
return null;
} | public response<greetmesometimeresponse> greetmesometimeasync(string requesttype) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,781 | public Response<TestDocLitFaultResponse> testDocLitFaultAsync(String faultType) {
return null;
/*not called */
} | public Response<TestDocLitFaultResponse> testDocLitFaultAsync(String faultType) {
return null;
} | public response<testdoclitfaultresponse> testdoclitfaultasync(string faulttype) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,782 | public Future<?> testDocLitFaultAsync(String faultType, AsyncHandler ah) {
return null;
/*not called */
} | public Future<?> testDocLitFaultAsync(String faultType, AsyncHandler ah) {
return null;
} | public future<?> testdoclitfaultasync(string faulttype, asynchandler ah) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,783 | public Future<?> testDocLitBareAsync(String bare, AsyncHandler ah) {
return null;
/* not called */
} | public Future<?> testDocLitBareAsync(String bare, AsyncHandler ah) {
return null;
} | public future<?> testdoclitbareasync(string bare, asynchandler ah) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,784 | public Response<BareDocumentResponse> testDocLitBareAsync(String bare) {
return null;
/* not called */
} | public Response<BareDocumentResponse> testDocLitBareAsync(String bare) {
return null;
} | public response<baredocumentresponse> testdoclitbareasync(string bare) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,785 | public Future<?> greetMeAsync(String requestType, AsyncHandler<GreetMeResponse> asyncHandler) {
return null;
/*not called */
} | public Future<?> greetMeAsync(String requestType, AsyncHandler<GreetMeResponse> asyncHandler) {
return null;
} | public future<?> greetmeasync(string requesttype, asynchandler<greetmeresponse> asynchandler) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,786 | public Response<GreetMeResponse> greetMeAsync(String requestType) {
return null;
/*not called */
} | public Response<GreetMeResponse> greetMeAsync(String requestType) {
return null;
} | public response<greetmeresponse> greetmeasync(string requesttype) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,787 | public Future<?> greetMeLaterAsync(long requestType, AsyncHandler<GreetMeLaterResponse> asyncHandler) {
return null;
/*not called */
} | public Future<?> greetMeLaterAsync(long requestType, AsyncHandler<GreetMeLaterResponse> asyncHandler) {
return null;
} | public future<?> greetmelaterasync(long requesttype, asynchandler<greetmelaterresponse> asynchandler) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,788 | public Response<GreetMeLaterResponse> greetMeLaterAsync(long requestType) {
return null;
/*not called */
} | public Response<GreetMeLaterResponse> greetMeLaterAsync(long requestType) {
return null;
} | public response<greetmelaterresponse> greetmelaterasync(long requesttype) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,789 | public Future<?> sayHiAsync(AsyncHandler<SayHiResponse> asyncHandler) {
return null;
/*not called */
} | public Future<?> sayHiAsync(AsyncHandler<SayHiResponse> asyncHandler) {
return null;
} | public future<?> sayhiasync(asynchandler<sayhiresponse> asynchandler) { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,790 | public Response<SayHiResponse> sayHiAsync() {
return null;
/*not called */
} | public Response<SayHiResponse> sayHiAsync() {
return null;
} | public response<sayhiresponse> sayhiasync() { return null; } | apache/servicemix4-features | [
0,
0,
0,
0
] |
10,852 | private ClusteringIndexFilter makeIndexFilter(AbstractSimplePerColumnSecondaryIndex index, ReadCommand command)
{
if (command instanceof SinglePartitionReadCommand)
{
// Note: as yet there's no route to get here - a 2i query *always* uses a
// PartitionRangeReadCommand. This is here in preparation for coming changes
// in SelectStatement.
SinglePartitionReadCommand sprc = (SinglePartitionReadCommand)command;
ByteBuffer pk = sprc.partitionKey().getKey();
ClusteringIndexFilter filter = sprc.clusteringIndexFilter();
if (filter instanceof ClusteringIndexNamesFilter)
{
NavigableSet<Clustering> requested = ((ClusteringIndexNamesFilter)filter).requestedRows();
NavigableSet<Clustering> clusterings = new TreeSet<>(index.getIndexComparator());
for (Clustering c : requested)
clusterings.add(index.makeIndexClustering(pk, c, (Cell)null).takeAlias());
return new ClusteringIndexNamesFilter(clusterings, filter.isReversed());
}
else
{
Slices requested = ((ClusteringIndexSliceFilter)filter).requestedSlices();
Slices.Builder builder = new Slices.Builder(index.getIndexComparator());
for (Slice slice : requested)
builder.add(index.makeIndexBound(pk, slice.start()), index.makeIndexBound(pk, slice.end()));
return new ClusteringIndexSliceFilter(builder.build(), filter.isReversed());
}
}
else
{
DataRange dataRange = ((PartitionRangeReadCommand)command).dataRange();
AbstractBounds<PartitionPosition> range = dataRange.keyRange();
Slice slice = Slice.ALL;
/*
* XXX: If the range requested is a token range, we'll have to start at the beginning (and stop at the end) of
* the indexed row unfortunately (which will be inefficient), because we have no way to intuit the smallest possible
* key having a given token. A potential fix would be to actually store the token along the key in the indexed row.
*/
if (range.left instanceof DecoratedKey)
{
// the right hand side of the range may not be a DecoratedKey (for instance if we're paging),
// but if it is, we can optimise slightly by restricting the slice
if (range.right instanceof DecoratedKey)
{
DecoratedKey startKey = (DecoratedKey) range.left;
DecoratedKey endKey = (DecoratedKey) range.right;
Slice.Bound start = Slice.Bound.BOTTOM;
Slice.Bound end = Slice.Bound.TOP;
/*
* For index queries over a range, we can't do a whole lot better than querying everything for the key range, though for
* slice queries where we can slightly restrict the beginning and end.
*/
if (!dataRange.isNamesQuery())
{
ClusteringIndexSliceFilter startSliceFilter = ((ClusteringIndexSliceFilter) dataRange.clusteringIndexFilter(startKey));
ClusteringIndexSliceFilter endSliceFilter = ((ClusteringIndexSliceFilter) dataRange.clusteringIndexFilter(endKey));
// We can't effectively support reversed queries when we have a range, so we don't support it
// (or through post-query reordering) and shouldn't get there.
assert !startSliceFilter.isReversed() && !endSliceFilter.isReversed();
Slices startSlices = startSliceFilter.requestedSlices();
Slices endSlices = endSliceFilter.requestedSlices();
if (startSlices.size() > 0)
start = startSlices.get(0).start();
if (endSlices.size() > 0)
end = endSlices.get(endSlices.size() - 1).end();
}
slice = Slice.make(index.makeIndexBound(startKey.getKey(), start),
index.makeIndexBound(endKey.getKey(), end));
}
else
{
// otherwise, just start the index slice from the key we do have
slice = Slice.make(index.makeIndexBound(((DecoratedKey)range.left).getKey(), Slice.Bound.BOTTOM),
Slice.Bound.TOP);
}
}
return new ClusteringIndexSliceFilter(Slices.with(index.getIndexComparator(), slice), false);
}
} | private ClusteringIndexFilter makeIndexFilter(AbstractSimplePerColumnSecondaryIndex index, ReadCommand command)
{
if (command instanceof SinglePartitionReadCommand)
{
SinglePartitionReadCommand sprc = (SinglePartitionReadCommand)command;
ByteBuffer pk = sprc.partitionKey().getKey();
ClusteringIndexFilter filter = sprc.clusteringIndexFilter();
if (filter instanceof ClusteringIndexNamesFilter)
{
NavigableSet<Clustering> requested = ((ClusteringIndexNamesFilter)filter).requestedRows();
NavigableSet<Clustering> clusterings = new TreeSet<>(index.getIndexComparator());
for (Clustering c : requested)
clusterings.add(index.makeIndexClustering(pk, c, (Cell)null).takeAlias());
return new ClusteringIndexNamesFilter(clusterings, filter.isReversed());
}
else
{
Slices requested = ((ClusteringIndexSliceFilter)filter).requestedSlices();
Slices.Builder builder = new Slices.Builder(index.getIndexComparator());
for (Slice slice : requested)
builder.add(index.makeIndexBound(pk, slice.start()), index.makeIndexBound(pk, slice.end()));
return new ClusteringIndexSliceFilter(builder.build(), filter.isReversed());
}
}
else
{
DataRange dataRange = ((PartitionRangeReadCommand)command).dataRange();
AbstractBounds<PartitionPosition> range = dataRange.keyRange();
Slice slice = Slice.ALL;
if (range.left instanceof DecoratedKey)
{
if (range.right instanceof DecoratedKey)
{
DecoratedKey startKey = (DecoratedKey) range.left;
DecoratedKey endKey = (DecoratedKey) range.right;
Slice.Bound start = Slice.Bound.BOTTOM;
Slice.Bound end = Slice.Bound.TOP;
if (!dataRange.isNamesQuery())
{
ClusteringIndexSliceFilter startSliceFilter = ((ClusteringIndexSliceFilter) dataRange.clusteringIndexFilter(startKey));
ClusteringIndexSliceFilter endSliceFilter = ((ClusteringIndexSliceFilter) dataRange.clusteringIndexFilter(endKey));
assert !startSliceFilter.isReversed() && !endSliceFilter.isReversed();
Slices startSlices = startSliceFilter.requestedSlices();
Slices endSlices = endSliceFilter.requestedSlices();
if (startSlices.size() > 0)
start = startSlices.get(0).start();
if (endSlices.size() > 0)
end = endSlices.get(endSlices.size() - 1).end();
}
slice = Slice.make(index.makeIndexBound(startKey.getKey(), start),
index.makeIndexBound(endKey.getKey(), end));
}
else
{
slice = Slice.make(index.makeIndexBound(((DecoratedKey)range.left).getKey(), Slice.Bound.BOTTOM),
Slice.Bound.TOP);
}
}
return new ClusteringIndexSliceFilter(Slices.with(index.getIndexComparator(), slice), false);
}
} | private clusteringindexfilter makeindexfilter(abstractsimplepercolumnsecondaryindex index, readcommand command) { if (command instanceof singlepartitionreadcommand) { singlepartitionreadcommand sprc = (singlepartitionreadcommand)command; bytebuffer pk = sprc.partitionkey().getkey(); clusteringindexfilter filter = sprc.clusteringindexfilter(); if (filter instanceof clusteringindexnamesfilter) { navigableset<clustering> requested = ((clusteringindexnamesfilter)filter).requestedrows(); navigableset<clustering> clusterings = new treeset<>(index.getindexcomparator()); for (clustering c : requested) clusterings.add(index.makeindexclustering(pk, c, (cell)null).takealias()); return new clusteringindexnamesfilter(clusterings, filter.isreversed()); } else { slices requested = ((clusteringindexslicefilter)filter).requestedslices(); slices.builder builder = new slices.builder(index.getindexcomparator()); for (slice slice : requested) builder.add(index.makeindexbound(pk, slice.start()), index.makeindexbound(pk, slice.end())); return new clusteringindexslicefilter(builder.build(), filter.isreversed()); } } else { datarange datarange = ((partitionrangereadcommand)command).datarange(); abstractbounds<partitionposition> range = datarange.keyrange(); slice slice = slice.all; if (range.left instanceof decoratedkey) { if (range.right instanceof decoratedkey) { decoratedkey startkey = (decoratedkey) range.left; decoratedkey endkey = (decoratedkey) range.right; slice.bound start = slice.bound.bottom; slice.bound end = slice.bound.top; if (!datarange.isnamesquery()) { clusteringindexslicefilter startslicefilter = ((clusteringindexslicefilter) datarange.clusteringindexfilter(startkey)); clusteringindexslicefilter endslicefilter = ((clusteringindexslicefilter) datarange.clusteringindexfilter(endkey)); assert !startslicefilter.isreversed() && !endslicefilter.isreversed(); slices startslices = startslicefilter.requestedslices(); slices endslices = endslicefilter.requestedslices(); if (startslices.size() > 0) start = startslices.get(0).start(); if (endslices.size() > 0) end = endslices.get(endslices.size() - 1).end(); } slice = slice.make(index.makeindexbound(startkey.getkey(), start), index.makeindexbound(endkey.getkey(), end)); } else { slice = slice.make(index.makeindexbound(((decoratedkey)range.left).getkey(), slice.bound.bottom), slice.bound.top); } } return new clusteringindexslicefilter(slices.with(index.getindexcomparator(), slice), false); } } | chaordic/cassandra | [
1,
0,
0,
0
] |
10,876 | public static <T extends ColumnSet> boolean fillColumnSets(Catalog catalog, Schema schema, Collection<T> output,
final int tClassifierId) {
// --- precondition
if (catalog == null && schema == null) {
return false;
}
RelationalSwitch<T> relationalSwitch = new RelationalSwitch<T>() {
@Override
protected T doSwitch(int classifierID, EObject theEObject) {
if (theEObject.eClass().getClassifierID() != tClassifierId) {
return null;
} else {
return super.doSwitch(classifierID, theEObject);
}
}
@Override
public T caseColumnSet(ColumnSet object) {
return castObject(object);
}
@Override
public T caseTable(Table object) {
return castObject(object);
}
@Override
public T caseTdTable(TdTable object) {
return castObject(object);
}
@Override
public T caseTdView(TdView object) {
return castObject(object);
}
@Override
public T caseView(View object) {
return castObject(object);
}
@SuppressWarnings("unchecked")
private T castObject(Object object) {
return (T) object;
}
};
EList<ModelElement> elements = (schema != null) ? schema.getOwnedElement() : catalog.getOwnedElement();
if (elements.isEmpty()) {
// no element found
return false;
}
for (EObject elt : elements) {
T columnSet = relationalSwitch.doSwitch(elt);
if (columnSet != null) {
output.add(columnSet);
}
}
return true;
} | public static <T extends ColumnSet> boolean fillColumnSets(Catalog catalog, Schema schema, Collection<T> output,
final int tClassifierId) {
if (catalog == null && schema == null) {
return false;
}
RelationalSwitch<T> relationalSwitch = new RelationalSwitch<T>() {
@Override
protected T doSwitch(int classifierID, EObject theEObject) {
if (theEObject.eClass().getClassifierID() != tClassifierId) {
return null;
} else {
return super.doSwitch(classifierID, theEObject);
}
}
@Override
public T caseColumnSet(ColumnSet object) {
return castObject(object);
}
@Override
public T caseTable(Table object) {
return castObject(object);
}
@Override
public T caseTdTable(TdTable object) {
return castObject(object);
}
@Override
public T caseTdView(TdView object) {
return castObject(object);
}
@Override
public T caseView(View object) {
return castObject(object);
}
@SuppressWarnings("unchecked")
private T castObject(Object object) {
return (T) object;
}
};
EList<ModelElement> elements = (schema != null) ? schema.getOwnedElement() : catalog.getOwnedElement();
if (elements.isEmpty()) {
return false;
}
for (EObject elt : elements) {
T columnSet = relationalSwitch.doSwitch(elt);
if (columnSet != null) {
output.add(columnSet);
}
}
return true;
} | public static <t extends columnset> boolean fillcolumnsets(catalog catalog, schema schema, collection<t> output, final int tclassifierid) { if (catalog == null && schema == null) { return false; } relationalswitch<t> relationalswitch = new relationalswitch<t>() { @override protected t doswitch(int classifierid, eobject theeobject) { if (theeobject.eclass().getclassifierid() != tclassifierid) { return null; } else { return super.doswitch(classifierid, theeobject); } } @override public t casecolumnset(columnset object) { return castobject(object); } @override public t casetable(table object) { return castobject(object); } @override public t casetdtable(tdtable object) { return castobject(object); } @override public t casetdview(tdview object) { return castobject(object); } @override public t caseview(view object) { return castobject(object); } @suppresswarnings("unchecked") private t castobject(object object) { return (t) object; } }; elist<modelelement> elements = (schema != null) ? schema.getownedelement() : catalog.getownedelement(); if (elements.isempty()) { return false; } for (eobject elt : elements) { t columnset = relationalswitch.doswitch(elt); if (columnset != null) { output.add(columnset); } } return true; } | coheigea/tcommon-studio-se | [
0,
0,
0,
1
] |
2,699 | public void controllerStatusChanged(boolean isPlaying) {
// Rocket doesn't implement this yet!
// Would be useful to control Rocket from demo's side
} | public void controllerStatusChanged(boolean isPlaying) {
} | public void controllerstatuschanged(boolean isplaying) { } | bjakke/moonlander | [
0,
1,
0,
0
] |
2,728 | @Override
public boolean addPendingAuthorisationtoBlock() {
logger.info("DefaultBlockChainPublisher:: addPendingAuthorisationtoBlock method called with Client ID:");
block = new Block();
try{
ArrayList<BlockChainAuthorisedObj> pendingAuthorisationArrayList;
synchronized (pendingAuthorisationContainer){
pendingAuthorisationArrayList = pendingAuthorisationContainer.getPendingAccessTokens();
logger.info("pendingAuthorisationArrayList Size: "+ pendingAuthorisationArrayList.size());
pendingAuthorisationContainer.reset();
logger.info("pendingAuthorisationArrayList Size post reset: "+ pendingAuthorisationArrayList.size());
}
// We need to order Merkle tree 1st before acually
// Actual Merkel Tree implementation in Java. We probably will go with a less dramatic version for POC.
}catch (Exception ex){
logger.error(ex.getStackTrace());
}
return false;
} | @Override
public boolean addPendingAuthorisationtoBlock() {
logger.info("DefaultBlockChainPublisher:: addPendingAuthorisationtoBlock method called with Client ID:");
block = new Block();
try{
ArrayList<BlockChainAuthorisedObj> pendingAuthorisationArrayList;
synchronized (pendingAuthorisationContainer){
pendingAuthorisationArrayList = pendingAuthorisationContainer.getPendingAccessTokens();
logger.info("pendingAuthorisationArrayList Size: "+ pendingAuthorisationArrayList.size());
pendingAuthorisationContainer.reset();
logger.info("pendingAuthorisationArrayList Size post reset: "+ pendingAuthorisationArrayList.size());
}
}catch (Exception ex){
logger.error(ex.getStackTrace());
}
return false;
} | @override public boolean addpendingauthorisationtoblock() { logger.info("defaultblockchainpublisher:: addpendingauthorisationtoblock method called with client id:"); block = new block(); try{ arraylist<blockchainauthorisedobj> pendingauthorisationarraylist; synchronized (pendingauthorisationcontainer){ pendingauthorisationarraylist = pendingauthorisationcontainer.getpendingaccesstokens(); logger.info("pendingauthorisationarraylist size: "+ pendingauthorisationarraylist.size()); pendingauthorisationcontainer.reset(); logger.info("pendingauthorisationarraylist size post reset: "+ pendingauthorisationarraylist.size()); } }catch (exception ex){ logger.error(ex.getstacktrace()); } return false; } | anubandhan/spring-security-oauth-BlockChain | [
1,
0,
0,
0
] |
19,202 | private void getToken(JSONArray args) {
if (args == null || args.length() == 0) {
return;
}
try {
if (args.length() >= 5) {
JSONArray jsonArray = args.getJSONArray(2);
ArrayList<String> clientScopes = new ArrayList<String>();
if (jsonArray != null) {
for (int i = 0; i < jsonArray.length(); i++) {
clientScopes.add(jsonArray.get(i).toString());
}
}
this.clientId = args.getString(0);
this.clientSecret = args.getString(1);
this.clientScopes = clientScopes;
this.clientRedirectUri = args.getString(3);
this.production = args.getBoolean(4);
//todo send to background
SSOHelper.getToken(this, this.cordova.getActivity(), clientId, clientSecret, clientScopes, clientRedirectUri, production);
}
} catch (JSONException e) {
e.printStackTrace();
callbackContext.error(e.getMessage());
}
} | private void getToken(JSONArray args) {
if (args == null || args.length() == 0) {
return;
}
try {
if (args.length() >= 5) {
JSONArray jsonArray = args.getJSONArray(2);
ArrayList<String> clientScopes = new ArrayList<String>();
if (jsonArray != null) {
for (int i = 0; i < jsonArray.length(); i++) {
clientScopes.add(jsonArray.get(i).toString());
}
}
this.clientId = args.getString(0);
this.clientSecret = args.getString(1);
this.clientScopes = clientScopes;
this.clientRedirectUri = args.getString(3);
this.production = args.getBoolean(4);
SSOHelper.getToken(this, this.cordova.getActivity(), clientId, clientSecret, clientScopes, clientRedirectUri, production);
}
} catch (JSONException e) {
e.printStackTrace();
callbackContext.error(e.getMessage());
}
} | private void gettoken(jsonarray args) { if (args == null || args.length() == 0) { return; } try { if (args.length() >= 5) { jsonarray jsonarray = args.getjsonarray(2); arraylist<string> clientscopes = new arraylist<string>(); if (jsonarray != null) { for (int i = 0; i < jsonarray.length(); i++) { clientscopes.add(jsonarray.get(i).tostring()); } } this.clientid = args.getstring(0); this.clientsecret = args.getstring(1); this.clientscopes = clientscopes; this.clientredirecturi = args.getstring(3); this.production = args.getboolean(4); ssohelper.gettoken(this, this.cordova.getactivity(), clientid, clientsecret, clientscopes, clientredirecturi, production); } } catch (jsonexception e) { e.printstacktrace(); callbackcontext.error(e.getmessage()); } } | benbenxiaoluchi/com.accenture.cordova.plugin.sso | [
0,
1,
0,
0
] |
19,203 | private void logout(JSONArray args) {
if (args == null || args.length() == 0) {
return;
}
try {
if (args.length() >= 5) {
JSONArray jsonArray = args.getJSONArray(2);
ArrayList<String> clientScopes = new ArrayList<String>();
if (jsonArray != null) {
for (int i = 0; i < jsonArray.length(); i++) {
clientScopes.add(jsonArray.get(i).toString());
}
}
this.clientId = args.getString(0);
this.clientSecret = args.getString(1);
this.clientScopes = clientScopes;
this.clientRedirectUri = args.getString(3);
this.production = args.getBoolean(4);
//todo send to background
SSOHelper.logout(this.cordova.getActivity(), clientId, clientSecret, production);
this.callbackContext.success();
}
} catch (JSONException e) {
e.printStackTrace();
callbackContext.error(e.getMessage());
}
} | private void logout(JSONArray args) {
if (args == null || args.length() == 0) {
return;
}
try {
if (args.length() >= 5) {
JSONArray jsonArray = args.getJSONArray(2);
ArrayList<String> clientScopes = new ArrayList<String>();
if (jsonArray != null) {
for (int i = 0; i < jsonArray.length(); i++) {
clientScopes.add(jsonArray.get(i).toString());
}
}
this.clientId = args.getString(0);
this.clientSecret = args.getString(1);
this.clientScopes = clientScopes;
this.clientRedirectUri = args.getString(3);
this.production = args.getBoolean(4);
SSOHelper.logout(this.cordova.getActivity(), clientId, clientSecret, production);
this.callbackContext.success();
}
} catch (JSONException e) {
e.printStackTrace();
callbackContext.error(e.getMessage());
}
} | private void logout(jsonarray args) { if (args == null || args.length() == 0) { return; } try { if (args.length() >= 5) { jsonarray jsonarray = args.getjsonarray(2); arraylist<string> clientscopes = new arraylist<string>(); if (jsonarray != null) { for (int i = 0; i < jsonarray.length(); i++) { clientscopes.add(jsonarray.get(i).tostring()); } } this.clientid = args.getstring(0); this.clientsecret = args.getstring(1); this.clientscopes = clientscopes; this.clientredirecturi = args.getstring(3); this.production = args.getboolean(4); ssohelper.logout(this.cordova.getactivity(), clientid, clientsecret, production); this.callbackcontext.success(); } } catch (jsonexception e) { e.printstacktrace(); callbackcontext.error(e.getmessage()); } } | benbenxiaoluchi/com.accenture.cordova.plugin.sso | [
0,
1,
0,
0
] |
11,077 | public static int getCurveSize(String name) throws NoSuchAlgorithmException {
name = name.toLowerCase();
if (name.equals("secp224r1")) {
return 224;
} else if (name.equals("secp256r1")) {
return 256;
} else if (name.equals("secp384r1")) {
return 384;
} else if (name.equals("secp521r1")) {
return 521;
} else if (name.equals("secp256k1")) {
return 256;
} else if (name.equals("brainpoolp224r1")) {
return 224;
} else if (name.equals("brainpoolp224t1")) {
return 224;
} else if (name.equals("brainpoolp256r1")) {
return 256;
} else if (name.equals("brainpoolp256t1")) {
return 256;
} else if (name.equals("brainpoolp320r1")) {
return 320;
} else if (name.equals("brainpoolp320t1")) {
return 320;
} else if (name.equals("brainpoolp384r1")) {
return 384;
} else if (name.equals("brainpoolp384t1")) {
return 384;
} else if (name.equals("brainpoolp512r1")) {
return 512;
} else if (name.equals("brainpoolp512t1")) {
return 512;
} else {
throw new NoSuchAlgorithmException("Curve not implemented:" + name);
}
} | public static int getCurveSize(String name) throws NoSuchAlgorithmException {
name = name.toLowerCase();
if (name.equals("secp224r1")) {
return 224;
} else if (name.equals("secp256r1")) {
return 256;
} else if (name.equals("secp384r1")) {
return 384;
} else if (name.equals("secp521r1")) {
return 521;
} else if (name.equals("secp256k1")) {
return 256;
} else if (name.equals("brainpoolp224r1")) {
return 224;
} else if (name.equals("brainpoolp224t1")) {
return 224;
} else if (name.equals("brainpoolp256r1")) {
return 256;
} else if (name.equals("brainpoolp256t1")) {
return 256;
} else if (name.equals("brainpoolp320r1")) {
return 320;
} else if (name.equals("brainpoolp320t1")) {
return 320;
} else if (name.equals("brainpoolp384r1")) {
return 384;
} else if (name.equals("brainpoolp384t1")) {
return 384;
} else if (name.equals("brainpoolp512r1")) {
return 512;
} else if (name.equals("brainpoolp512t1")) {
return 512;
} else {
throw new NoSuchAlgorithmException("Curve not implemented:" + name);
}
} | public static int getcurvesize(string name) throws nosuchalgorithmexception { name = name.tolowercase(); if (name.equals("secp224r1")) { return 224; } else if (name.equals("secp256r1")) { return 256; } else if (name.equals("secp384r1")) { return 384; } else if (name.equals("secp521r1")) { return 521; } else if (name.equals("secp256k1")) { return 256; } else if (name.equals("brainpoolp224r1")) { return 224; } else if (name.equals("brainpoolp224t1")) { return 224; } else if (name.equals("brainpoolp256r1")) { return 256; } else if (name.equals("brainpoolp256t1")) { return 256; } else if (name.equals("brainpoolp320r1")) { return 320; } else if (name.equals("brainpoolp320t1")) { return 320; } else if (name.equals("brainpoolp384r1")) { return 384; } else if (name.equals("brainpoolp384t1")) { return 384; } else if (name.equals("brainpoolp512r1")) { return 512; } else if (name.equals("brainpoolp512t1")) { return 512; } else { throw new nosuchalgorithmexception("curve not implemented:" + name); } } | avineshwar/wycheproof | [
0,
1,
0,
0
] |
2,922 | protected Boolean Authenticated()
{
String OauthToken = request().getHeader("token");
if (OauthToken == null || OauthToken.isEmpty()) {
return false;
}
Oauth token = Oauth
.find
.where()
.eq("token", OauthToken)
.eq("active", OathTokenActive)
.findUnique();
if (token != null) {
return true;
}
return false;
} | protected Boolean Authenticated()
{
String OauthToken = request().getHeader("token");
if (OauthToken == null || OauthToken.isEmpty()) {
return false;
}
Oauth token = Oauth
.find
.where()
.eq("token", OauthToken)
.eq("active", OathTokenActive)
.findUnique();
if (token != null) {
return true;
}
return false;
} | protected boolean authenticated() { string oauthtoken = request().getheader("token"); if (oauthtoken == null || oauthtoken.isempty()) { return false; } oauth token = oauth .find .where() .eq("token", oauthtoken) .eq("active", oathtokenactive) .findunique(); if (token != null) { return true; } return false; } | channing-froom/playframework-2.5.2 | [
0,
1,
0,
0
] |
19,359 | @Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Updating Connection:" + connection + " for integration " + integrationName + "_" + integrationVersion + " from " + baseUrl);
checkEnvProperties();
Connection conn = new Connection(connection, baseUrl, icsUser, icsPassword);
conn.setLog(getLog());
String projectDirectory = project.getBasedir().getAbsolutePath();
try {
//TODO add copy prop file to /target/config/ with filtering.
// Update - just copy the one file... for now lets copy them all
copyConfigFiles(projectDirectory + connectionConfigDir, projectDirectory + "/target/connections");
conn.setConfigDirectory(projectDirectory + "/target/connections");
conn.updateConnection();
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException(e.getMessage());
}
} | @Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Updating Connection:" + connection + " for integration " + integrationName + "_" + integrationVersion + " from " + baseUrl);
checkEnvProperties();
Connection conn = new Connection(connection, baseUrl, icsUser, icsPassword);
conn.setLog(getLog());
String projectDirectory = project.getBasedir().getAbsolutePath();
try {
copyConfigFiles(projectDirectory + connectionConfigDir, projectDirectory + "/target/connections");
conn.setConfigDirectory(projectDirectory + "/target/connections");
conn.updateConnection();
} catch (Exception e) {
e.printStackTrace();
throw new MojoExecutionException(e.getMessage());
}
} | @override public void execute() throws mojoexecutionexception, mojofailureexception { getlog().info("updating connection:" + connection + " for integration " + integrationname + "_" + integrationversion + " from " + baseurl); checkenvproperties(); connection conn = new connection(connection, baseurl, icsuser, icspassword); conn.setlog(getlog()); string projectdirectory = project.getbasedir().getabsolutepath(); try { copyconfigfiles(projectdirectory + connectionconfigdir, projectdirectory + "/target/connections"); conn.setconfigdirectory(projectdirectory + "/target/connections"); conn.updateconnection(); } catch (exception e) { e.printstacktrace(); throw new mojoexecutionexception(e.getmessage()); } } | avioconsulting/ics-maven-plugin | [
1,
0,
0,
0
] |
19,372 | static String resolve(String message, Object[] args) {
String formatted = String.format(message, args);
// TODO - fix! this case fails: ^cyan^warn^r^ if ^warn^ is evaluated first...really meant for ^cyan^ and ^r^
// TODO - to be resolved
for (String key : TERM_CODES.keySet()) {
TermCode termCode = TERM_CODES.get(key);
Matcher matcher = termCode.pattern.matcher(formatted);
if (matcher.find()) {
if (("warn".equals(key) && !warnLevel.get()) || ("info".equals(key) && !infoLevel.get())
|| ("dbug".equals(key) && !dbugLevel.get())) {
// this is a log statement for a disabled log-level, skip.
return null;
}
if (decorated.get()) {
String output = isColoredOutput() ? termCode.output : termCode.nonColoredOutput;
formatted = matcher.replaceAll(output);
}
}
}
return formatted;
} | static String resolve(String message, Object[] args) {
String formatted = String.format(message, args);
for (String key : TERM_CODES.keySet()) {
TermCode termCode = TERM_CODES.get(key);
Matcher matcher = termCode.pattern.matcher(formatted);
if (matcher.find()) {
if (("warn".equals(key) && !warnLevel.get()) || ("info".equals(key) && !infoLevel.get())
|| ("dbug".equals(key) && !dbugLevel.get())) {
return null;
}
if (decorated.get()) {
String output = isColoredOutput() ? termCode.output : termCode.nonColoredOutput;
formatted = matcher.replaceAll(output);
}
}
}
return formatted;
} | static string resolve(string message, object[] args) { string formatted = string.format(message, args); for (string key : term_codes.keyset()) { termcode termcode = term_codes.get(key); matcher matcher = termcode.pattern.matcher(formatted); if (matcher.find()) { if (("warn".equals(key) && !warnlevel.get()) || ("info".equals(key) && !infolevel.get()) || ("dbug".equals(key) && !dbuglevel.get())) { return null; } if (decorated.get()) { string output = iscoloredoutput() ? termcode.output : termcode.noncoloredoutput; formatted = matcher.replaceall(output); } } } return formatted; } | blangel/ply | [
0,
0,
1,
0
] |
3,003 | public void removeSegment(IndexSegment segment) {
// TODO(saurabh): Explain reload scenario here
Iterator<PrimaryKey> primaryKeyIterator = getPrimaryKeyIterator(segment, _primaryKeyColumns);
while (primaryKeyIterator.hasNext()) {
PrimaryKey pk = primaryKeyIterator.next();
_primaryKeyToSegmentMap.compute(HashUtils.hashPrimaryKey(pk, _hashFunction),
(primaryKey, currentSegment) -> {
if (currentSegment == segment) {
return null;
} else {
return currentSegment;
}
});
}
_serverMetrics.setValueOfPartitionGauge(_tableNameWithType, _partitionId, ServerGauge.DEDUP_PRIMARY_KEYS_COUNT,
_primaryKeyToSegmentMap.size());
} | public void removeSegment(IndexSegment segment) {
Iterator<PrimaryKey> primaryKeyIterator = getPrimaryKeyIterator(segment, _primaryKeyColumns);
while (primaryKeyIterator.hasNext()) {
PrimaryKey pk = primaryKeyIterator.next();
_primaryKeyToSegmentMap.compute(HashUtils.hashPrimaryKey(pk, _hashFunction),
(primaryKey, currentSegment) -> {
if (currentSegment == segment) {
return null;
} else {
return currentSegment;
}
});
}
_serverMetrics.setValueOfPartitionGauge(_tableNameWithType, _partitionId, ServerGauge.DEDUP_PRIMARY_KEYS_COUNT,
_primaryKeyToSegmentMap.size());
} | public void removesegment(indexsegment segment) { iterator<primarykey> primarykeyiterator = getprimarykeyiterator(segment, _primarykeycolumns); while (primarykeyiterator.hasnext()) { primarykey pk = primarykeyiterator.next(); _primarykeytosegmentmap.compute(hashutils.hashprimarykey(pk, _hashfunction), (primarykey, currentsegment) -> { if (currentsegment == segment) { return null; } else { return currentsegment; } }); } _servermetrics.setvalueofpartitiongauge(_tablenamewithtype, _partitionid, servergauge.dedup_primary_keys_count, _primarykeytosegmentmap.size()); } | asf-transfer/pinot | [
0,
0,
0,
0
] |
11,202 | protected InputStream getContentStream()
throws MessagingException {
try {
synchronized(this) {
if (contentStream == null) {
InputStream rawcontent = folder.getProtocol().retr(msgnum,
msgSize > 0 ? msgSize + hdrSize : 0);
if (rawcontent == null) {
expunged = true;
throw new MessageRemovedException(); // XXX - what else?
}
if (headers == null ||
((POP3Store)(folder.getStore())).forgetTopHeaders) {
headers = new InternetHeaders(rawcontent);
hdrSize =
(int)((SharedInputStream)rawcontent).getPosition();
} else {
/*
* Already have the headers, have to skip the headers
* in the content array and return the body.
*
* XXX - It seems that some mail servers return slightly
* different headers in the RETR results than were returned
* in the TOP results, so we can't depend on remembering
* the size of the headers from the TOP command and just
* skipping that many bytes. Instead, we have to process
* the content, skipping over the header until we come to
* the empty line that separates the header from the body.
*/
int offset = 0;
for (;;) {
int len = 0; // number of bytes in this line
int c1;
while ((c1 = rawcontent.read()) >= 0) {
if (c1 == '\n') // end of line
break;
else if (c1 == '\r') {
// got CR, is the next char LF?
if (rawcontent.available() > 0) {
rawcontent.mark(1);
if (rawcontent.read() != '\n')
rawcontent.reset();
}
break; // in any case, end of line
}
// not CR, NL, or CRLF, count the byte
len++;
}
// here when end of line or out of data
// if out of data, we're done
if (rawcontent.available() == 0)
break;
// if it was an empty line, we're done
if (len == 0)
break;
}
hdrSize =
(int)((SharedInputStream)rawcontent).getPosition();
}
contentStream =
((SharedInputStream)rawcontent).newStream(hdrSize, -1);
rawcontent = null; // help GC
}
}
} catch (EOFException eex) {
folder.close(false);
throw new FolderClosedException(folder, eex.toString());
} catch (IOException ex) {
throw new MessagingException("error fetching POP3 content", ex);
}
return super.getContentStream();
} | protected InputStream getContentStream()
throws MessagingException {
try {
synchronized(this) {
if (contentStream == null) {
InputStream rawcontent = folder.getProtocol().retr(msgnum,
msgSize > 0 ? msgSize + hdrSize : 0);
if (rawcontent == null) {
expunged = true;
throw new MessageRemovedException();
}
if (headers == null ||
((POP3Store)(folder.getStore())).forgetTopHeaders) {
headers = new InternetHeaders(rawcontent);
hdrSize =
(int)((SharedInputStream)rawcontent).getPosition();
} else {
int offset = 0;
for (;;) {
int len = 0;
int c1;
while ((c1 = rawcontent.read()) >= 0) {
if (c1 == '\n')
break;
else if (c1 == '\r') {
if (rawcontent.available() > 0) {
rawcontent.mark(1);
if (rawcontent.read() != '\n')
rawcontent.reset();
}
break;
}
len++;
}
if (rawcontent.available() == 0)
break;
if (len == 0)
break;
}
hdrSize =
(int)((SharedInputStream)rawcontent).getPosition();
}
contentStream =
((SharedInputStream)rawcontent).newStream(hdrSize, -1);
rawcontent = null;
}
}
} catch (EOFException eex) {
folder.close(false);
throw new FolderClosedException(folder, eex.toString());
} catch (IOException ex) {
throw new MessagingException("error fetching POP3 content", ex);
}
return super.getContentStream();
} | protected inputstream getcontentstream() throws messagingexception { try { synchronized(this) { if (contentstream == null) { inputstream rawcontent = folder.getprotocol().retr(msgnum, msgsize > 0 ? msgsize + hdrsize : 0); if (rawcontent == null) { expunged = true; throw new messageremovedexception(); } if (headers == null || ((pop3store)(folder.getstore())).forgettopheaders) { headers = new internetheaders(rawcontent); hdrsize = (int)((sharedinputstream)rawcontent).getposition(); } else { int offset = 0; for (;;) { int len = 0; int c1; while ((c1 = rawcontent.read()) >= 0) { if (c1 == '\n') break; else if (c1 == '\r') { if (rawcontent.available() > 0) { rawcontent.mark(1); if (rawcontent.read() != '\n') rawcontent.reset(); } break; } len++; } if (rawcontent.available() == 0) break; if (len == 0) break; } hdrsize = (int)((sharedinputstream)rawcontent).getposition(); } contentstream = ((sharedinputstream)rawcontent).newstream(hdrsize, -1); rawcontent = null; } } } catch (eofexception eex) { folder.close(false); throw new folderclosedexception(folder, eex.tostring()); } catch (ioexception ex) { throw new messagingexception("error fetching pop3 content", ex); } return super.getcontentstream(); } | cesarmo759/android_kernel_samsung_msm8916 | [
1,
0,
0,
0
] |
11,203 | public void setHeader(String name, String value)
throws MessagingException {
// XXX - should check for read-only folder?
throw new IllegalWriteException("POP3 messages are read-only");
} | public void setHeader(String name, String value)
throws MessagingException {
throw new IllegalWriteException("POP3 messages are read-only");
} | public void setheader(string name, string value) throws messagingexception { throw new illegalwriteexception("pop3 messages are read-only"); } | cesarmo759/android_kernel_samsung_msm8916 | [
0,
1,
0,
0
] |
11,204 | public void addHeader(String name, String value)
throws MessagingException {
// XXX - should check for read-only folder?
throw new IllegalWriteException("POP3 messages are read-only");
} | public void addHeader(String name, String value)
throws MessagingException {
throw new IllegalWriteException("POP3 messages are read-only");
} | public void addheader(string name, string value) throws messagingexception { throw new illegalwriteexception("pop3 messages are read-only"); } | cesarmo759/android_kernel_samsung_msm8916 | [
0,
1,
0,
0
] |
11,205 | public void removeHeader(String name)
throws MessagingException {
// XXX - should check for read-only folder?
throw new IllegalWriteException("POP3 messages are read-only");
} | public void removeHeader(String name)
throws MessagingException {
throw new IllegalWriteException("POP3 messages are read-only");
} | public void removeheader(string name) throws messagingexception { throw new illegalwriteexception("pop3 messages are read-only"); } | cesarmo759/android_kernel_samsung_msm8916 | [
0,
1,
0,
0
] |
11,206 | public void addHeaderLine(String line) throws MessagingException {
// XXX - should check for read-only folder?
throw new IllegalWriteException("POP3 messages are read-only");
} | public void addHeaderLine(String line) throws MessagingException {
throw new IllegalWriteException("POP3 messages are read-only");
} | public void addheaderline(string line) throws messagingexception { throw new illegalwriteexception("pop3 messages are read-only"); } | cesarmo759/android_kernel_samsung_msm8916 | [
0,
1,
0,
0
] |
11,257 | private void findEvents(Context context, String name, MultiName multiName, int pos, Watcher watcher, ReferenceValue ref)
{
String srcTypeName = null;
if (watcher.isPartOfAnonObjectGraph())
{
srcTypeName = standardDefs.CLASS_OBJECTPROXY;
}
else if (! srcTypeStack.empty())
{
srcTypeName = srcTypeStack.peek();
}
Watcher parentWatcher = watcher.getParent();
if ((parentWatcher != null) && parentWatcher.isOperation())
{
watcher.addChangeEvent("resultForBinding");
}
else if (srcTypeName != null)
{
AbcClass watchedClass = typeTable.getClass(srcTypeName);
if (watchedClass != null)
{
if ( watchedClass.isSubclassOf(standardDefs.CLASS_OBJECTPROXY) )
{
watcher.setPartOfAnonObjectGraph(true);
}
List metaData = watchedClass.getMetaData(BINDABLE, true);
boolean foundEvents = addBindables(watcher, metaData);
boolean foundSource = false;
Variable variable = getVariable(context, watchedClass, ref, multiName);
if (variable != null)
{
metaData = variable.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = variable.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = variable.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
// Object has a public static const variable names "length", which is
// some legacy compatibility crap left over from EMCA script 262, so
// we ignore it.
if (variable.isConst() &&
!(multiName.getLocalPart().equals("length") &&
variable.getDeclaringClassName().equals(SymbolTable.OBJECT)))
{
// We didn't really find any events, but we want
// to follow the same code path below as if we did.
foundEvents = true;
// TODO will this ever be something besides a PropertyWatcher?
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher) watcher).suppress();
}
}
// See comment above.
if (!(multiName.getLocalPart().equals("length") &&
variable.getDeclaringClassName().equals(SymbolTable.OBJECT)))
{
checkForStaticProperty(variable.isStatic(), watcher, srcTypeName);
}
foundSource = true;
}
if (!foundEvents)
{
Method getter = getGetter(context, watchedClass, ref, multiName);
if (getter != null)
{
metaData = getter.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = getter.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = getter.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents( watcher, metaData) || foundEvents;
checkForStaticProperty(getter.isStatic(), watcher, srcTypeName);
foundSource = true;
}
Method setter = getSetter(context, watchedClass, ref, multiName);
if (setter != null)
{
metaData = setter.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = setter.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = setter.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
checkForStaticProperty(setter.isStatic(), watcher, srcTypeName);
foundSource = true;
}
else
{
if (getter != null)
{
// getters without setters are de facto const, use same bypass as above for const vars
foundEvents = true;
}
}
}
if (!foundSource)
{
Method function = getMethod(context, watchedClass, ref, multiName);
if (function != null)
{
metaData = function.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = function.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = function.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
foundSource = true;
if (!foundEvents && callExpressionStack.isEmpty())
{
foundEvents = true;
// TODO will this ever be something besides a PropertyWatcher?
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher)watcher).suppress();
}
}
}
}
if ((!foundSource) && watchedClass.isSubclassOf(standardDefs.CLASS_ABSTRACTSERVICE))
{
watcher.setOperation(true);
}
else if (!foundEvents &&
!(watcher instanceof FunctionReturnWatcher) &&
!(watcher instanceof XMLWatcher) &&
!watcher.isOperation())
{
/***
* NOTE: when we've failed to find change events for properties of untyped or Object-typed parents, we go
* ahead and generate code to create a runtime PropertyWatcher with no change events specified. The lack
* of change events tells the runtime PW to introspect RTTI to discover change events associated with the
* actual type of the actual value being assigned to the property.
* OTOH for strongly-typed properties, we still require change events to be reachable at compile time.
*/
if (!(watchedClass.getName().equals(SymbolTable.OBJECT) || watchedClass.getName().equals(SymbolTable.NOTYPE)))
{
// TODO do we still want this to be configurable?
if (showBindingWarnings)
{
context.localizedWarning2(pos, new UnableToDetectChanges(name));
}
// TODO will this ever be something besides a PropertyWatcher?
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher)watcher).suppress();
}
}
}
}
}
} | private void findEvents(Context context, String name, MultiName multiName, int pos, Watcher watcher, ReferenceValue ref)
{
String srcTypeName = null;
if (watcher.isPartOfAnonObjectGraph())
{
srcTypeName = standardDefs.CLASS_OBJECTPROXY;
}
else if (! srcTypeStack.empty())
{
srcTypeName = srcTypeStack.peek();
}
Watcher parentWatcher = watcher.getParent();
if ((parentWatcher != null) && parentWatcher.isOperation())
{
watcher.addChangeEvent("resultForBinding");
}
else if (srcTypeName != null)
{
AbcClass watchedClass = typeTable.getClass(srcTypeName);
if (watchedClass != null)
{
if ( watchedClass.isSubclassOf(standardDefs.CLASS_OBJECTPROXY) )
{
watcher.setPartOfAnonObjectGraph(true);
}
List metaData = watchedClass.getMetaData(BINDABLE, true);
boolean foundEvents = addBindables(watcher, metaData);
boolean foundSource = false;
Variable variable = getVariable(context, watchedClass, ref, multiName);
if (variable != null)
{
metaData = variable.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = variable.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = variable.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
if (variable.isConst() &&
!(multiName.getLocalPart().equals("length") &&
variable.getDeclaringClassName().equals(SymbolTable.OBJECT)))
{
foundEvents = true;
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher) watcher).suppress();
}
}
if (!(multiName.getLocalPart().equals("length") &&
variable.getDeclaringClassName().equals(SymbolTable.OBJECT)))
{
checkForStaticProperty(variable.isStatic(), watcher, srcTypeName);
}
foundSource = true;
}
if (!foundEvents)
{
Method getter = getGetter(context, watchedClass, ref, multiName);
if (getter != null)
{
metaData = getter.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = getter.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = getter.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents( watcher, metaData) || foundEvents;
checkForStaticProperty(getter.isStatic(), watcher, srcTypeName);
foundSource = true;
}
Method setter = getSetter(context, watchedClass, ref, multiName);
if (setter != null)
{
metaData = setter.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = setter.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = setter.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
checkForStaticProperty(setter.isStatic(), watcher, srcTypeName);
foundSource = true;
}
else
{
if (getter != null)
{
foundEvents = true;
}
}
}
if (!foundSource)
{
Method function = getMethod(context, watchedClass, ref, multiName);
if (function != null)
{
metaData = function.getMetaData(BINDABLE);
foundEvents = addBindables(watcher, metaData) || foundEvents;
metaData = function.getMetaData(CHANGE_EVENT);
foundEvents = addChangeEvents(watcher, metaData) || foundEvents;
metaData = function.getMetaData(NON_COMMITTING_CHANGE_EVENT);
foundEvents = addNonCommittingChangeEvents(watcher, metaData) || foundEvents;
foundSource = true;
if (!foundEvents && callExpressionStack.isEmpty())
{
foundEvents = true;
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher)watcher).suppress();
}
}
}
}
if ((!foundSource) && watchedClass.isSubclassOf(standardDefs.CLASS_ABSTRACTSERVICE))
{
watcher.setOperation(true);
}
else if (!foundEvents &&
!(watcher instanceof FunctionReturnWatcher) &&
!(watcher instanceof XMLWatcher) &&
!watcher.isOperation())
{
if (!(watchedClass.getName().equals(SymbolTable.OBJECT) || watchedClass.getName().equals(SymbolTable.NOTYPE)))
{
if (showBindingWarnings)
{
context.localizedWarning2(pos, new UnableToDetectChanges(name));
}
if (watcher instanceof PropertyWatcher)
{
((PropertyWatcher)watcher).suppress();
}
}
}
}
}
} | private void findevents(context context, string name, multiname multiname, int pos, watcher watcher, referencevalue ref) { string srctypename = null; if (watcher.ispartofanonobjectgraph()) { srctypename = standarddefs.class_objectproxy; } else if (! srctypestack.empty()) { srctypename = srctypestack.peek(); } watcher parentwatcher = watcher.getparent(); if ((parentwatcher != null) && parentwatcher.isoperation()) { watcher.addchangeevent("resultforbinding"); } else if (srctypename != null) { abcclass watchedclass = typetable.getclass(srctypename); if (watchedclass != null) { if ( watchedclass.issubclassof(standarddefs.class_objectproxy) ) { watcher.setpartofanonobjectgraph(true); } list metadata = watchedclass.getmetadata(bindable, true); boolean foundevents = addbindables(watcher, metadata); boolean foundsource = false; variable variable = getvariable(context, watchedclass, ref, multiname); if (variable != null) { metadata = variable.getmetadata(bindable); foundevents = addbindables(watcher, metadata) || foundevents; metadata = variable.getmetadata(change_event); foundevents = addchangeevents(watcher, metadata) || foundevents; metadata = variable.getmetadata(non_committing_change_event); foundevents = addnoncommittingchangeevents(watcher, metadata) || foundevents; if (variable.isconst() && !(multiname.getlocalpart().equals("length") && variable.getdeclaringclassname().equals(symboltable.object))) { foundevents = true; if (watcher instanceof propertywatcher) { ((propertywatcher) watcher).suppress(); } } if (!(multiname.getlocalpart().equals("length") && variable.getdeclaringclassname().equals(symboltable.object))) { checkforstaticproperty(variable.isstatic(), watcher, srctypename); } foundsource = true; } if (!foundevents) { method getter = getgetter(context, watchedclass, ref, multiname); if (getter != null) { metadata = getter.getmetadata(bindable); foundevents = addbindables(watcher, metadata) || foundevents; metadata = getter.getmetadata(change_event); foundevents = addchangeevents(watcher, metadata) || foundevents; metadata = getter.getmetadata(non_committing_change_event); foundevents = addnoncommittingchangeevents( watcher, metadata) || foundevents; checkforstaticproperty(getter.isstatic(), watcher, srctypename); foundsource = true; } method setter = getsetter(context, watchedclass, ref, multiname); if (setter != null) { metadata = setter.getmetadata(bindable); foundevents = addbindables(watcher, metadata) || foundevents; metadata = setter.getmetadata(change_event); foundevents = addchangeevents(watcher, metadata) || foundevents; metadata = setter.getmetadata(non_committing_change_event); foundevents = addnoncommittingchangeevents(watcher, metadata) || foundevents; checkforstaticproperty(setter.isstatic(), watcher, srctypename); foundsource = true; } else { if (getter != null) { foundevents = true; } } } if (!foundsource) { method function = getmethod(context, watchedclass, ref, multiname); if (function != null) { metadata = function.getmetadata(bindable); foundevents = addbindables(watcher, metadata) || foundevents; metadata = function.getmetadata(change_event); foundevents = addchangeevents(watcher, metadata) || foundevents; metadata = function.getmetadata(non_committing_change_event); foundevents = addnoncommittingchangeevents(watcher, metadata) || foundevents; foundsource = true; if (!foundevents && callexpressionstack.isempty()) { foundevents = true; if (watcher instanceof propertywatcher) { ((propertywatcher)watcher).suppress(); } } } } if ((!foundsource) && watchedclass.issubclassof(standarddefs.class_abstractservice)) { watcher.setoperation(true); } else if (!foundevents && !(watcher instanceof functionreturnwatcher) && !(watcher instanceof xmlwatcher) && !watcher.isoperation()) { if (!(watchedclass.getname().equals(symboltable.object) || watchedclass.getname().equals(symboltable.notype))) { if (showbindingwarnings) { context.localizedwarning2(pos, new unabletodetectchanges(name)); } if (watcher instanceof propertywatcher) { ((propertywatcher)watcher).suppress(); } } } } } } | blackjyn/flex-sdk | [
1,
0,
0,
0
] |
11,345 | @Override
public void show() {
super.show();
this.x = r.nextFloat();
this.y = r.nextFloat();
float angle = r.nextFloat() * 2 * 3.14159f;
this.vx = (float)java.lang.Math.cos(angle);
this.vy = (float)java.lang.Math.sin(angle);
//todo instead of going back to main menu, pause the game with pause screen?
Gdx.input.setCatchBackKey(true);
getStage().addListener(new InputListener() {
@Override
public boolean keyUp(InputEvent event, int keycode) {
if (keycode == Input.Keys.BACK || keycode == Input.Keys.ESCAPE) {
app.setScreen(app.getMainScreen());
return true;
} else {
return false;
}
}
});
} | @Override
public void show() {
super.show();
this.x = r.nextFloat();
this.y = r.nextFloat();
float angle = r.nextFloat() * 2 * 3.14159f;
this.vx = (float)java.lang.Math.cos(angle);
this.vy = (float)java.lang.Math.sin(angle);
Gdx.input.setCatchBackKey(true);
getStage().addListener(new InputListener() {
@Override
public boolean keyUp(InputEvent event, int keycode) {
if (keycode == Input.Keys.BACK || keycode == Input.Keys.ESCAPE) {
app.setScreen(app.getMainScreen());
return true;
} else {
return false;
}
}
});
} | @override public void show() { super.show(); this.x = r.nextfloat(); this.y = r.nextfloat(); float angle = r.nextfloat() * 2 * 3.14159f; this.vx = (float)java.lang.math.cos(angle); this.vy = (float)java.lang.math.sin(angle); gdx.input.setcatchbackkey(true); getstage().addlistener(new inputlistener() { @override public boolean keyup(inputevent event, int keycode) { if (keycode == input.keys.back || keycode == input.keys.escape) { app.setscreen(app.getmainscreen()); return true; } else { return false; } } }); } | cmsd2/toyapp | [
1,
0,
0,
0
] |
19,729 | public void GetStatusOperation(org.oasis_open.docs.ws_tx.wsba._2006._06.GetStatus param5) {
// Todo fill this with the necessary business logic
} | public void GetStatusOperation(org.oasis_open.docs.ws_tx.wsba._2006._06.GetStatus param5) {
} | public void getstatusoperation(org.oasis_open.docs.ws_tx.wsba._2006._06.getstatus param5) { } | apache/kandula | [
0,
1,
0,
0
] |
19,730 | public void NotCompleted(org.oasis_open.docs.ws_tx.wsba._2006._06.NotCompleted param6)
{
// Todo fill this with the necessary business logic
} | public void NotCompleted(org.oasis_open.docs.ws_tx.wsba._2006._06.NotCompleted param6)
{
} | public void notcompleted(org.oasis_open.docs.ws_tx.wsba._2006._06.notcompleted param6) { } | apache/kandula | [
0,
1,
0,
0
] |
19,731 | public void StatusOperation(org.oasis_open.docs.ws_tx.wsba._2006._06.Status param7)
{
// Todo fill this with the necessary business logic
} | public void StatusOperation(org.oasis_open.docs.ws_tx.wsba._2006._06.Status param7)
{
} | public void statusoperation(org.oasis_open.docs.ws_tx.wsba._2006._06.status param7) { } | apache/kandula | [
0,
1,
0,
0
] |
11,728 | public GenomicLocation normalizeGenomicLocation(GenomicLocation genomicLocation) {
GenomicLocation normalizedGenomicLocation = new GenomicLocation();
// if original input is set in the incoming genomic location object then use the same value
// for the normalized genomic location object returned, otherwise set it to the
// string representation of the incoming genomic location object
if (genomicLocation.getOriginalInput() != null && !genomicLocation.getOriginalInput().isEmpty()) {
normalizedGenomicLocation.setOriginalInput(genomicLocation.getOriginalInput());
} else {
normalizedGenomicLocation.setOriginalInput(genomicLocation.toString());
}
// normalize chromosome name
String chr = chromosomeNormalizer(genomicLocation.getChromosome().trim());
normalizedGenomicLocation.setChromosome(chr);
// convert vcf style start,end,ref,alt to MAF style
Integer start = genomicLocation.getStart();
Integer end = genomicLocation.getEnd();
String ref = genomicLocation.getReferenceAllele().trim();
String var = genomicLocation.getVariantAllele().trim();
String prefix = "";
if (!ref.equals(var)) {
prefix = longestCommonPrefix(ref, var);
}
// Remove common prefix and adjust variant position accordingly
if (prefix.length() > 0) {
ref = ref.substring(prefix.length());
var = var.substring(prefix.length());
int nStart = start + prefix.length();
if (ref.length() == 0) {
nStart -= 1;
}
start = nStart;
}
normalizedGenomicLocation.setStart(start);
normalizedGenomicLocation.setEnd(end);
normalizedGenomicLocation.setReferenceAllele(ref);
normalizedGenomicLocation.setVariantAllele(var);
return normalizedGenomicLocation;
} | public GenomicLocation normalizeGenomicLocation(GenomicLocation genomicLocation) {
GenomicLocation normalizedGenomicLocation = new GenomicLocation();
if (genomicLocation.getOriginalInput() != null && !genomicLocation.getOriginalInput().isEmpty()) {
normalizedGenomicLocation.setOriginalInput(genomicLocation.getOriginalInput());
} else {
normalizedGenomicLocation.setOriginalInput(genomicLocation.toString());
}
String chr = chromosomeNormalizer(genomicLocation.getChromosome().trim());
normalizedGenomicLocation.setChromosome(chr);
Integer start = genomicLocation.getStart();
Integer end = genomicLocation.getEnd();
String ref = genomicLocation.getReferenceAllele().trim();
String var = genomicLocation.getVariantAllele().trim();
String prefix = "";
if (!ref.equals(var)) {
prefix = longestCommonPrefix(ref, var);
}
if (prefix.length() > 0) {
ref = ref.substring(prefix.length());
var = var.substring(prefix.length());
int nStart = start + prefix.length();
if (ref.length() == 0) {
nStart -= 1;
}
start = nStart;
}
normalizedGenomicLocation.setStart(start);
normalizedGenomicLocation.setEnd(end);
normalizedGenomicLocation.setReferenceAllele(ref);
normalizedGenomicLocation.setVariantAllele(var);
return normalizedGenomicLocation;
} | public genomiclocation normalizegenomiclocation(genomiclocation genomiclocation) { genomiclocation normalizedgenomiclocation = new genomiclocation(); if (genomiclocation.getoriginalinput() != null && !genomiclocation.getoriginalinput().isempty()) { normalizedgenomiclocation.setoriginalinput(genomiclocation.getoriginalinput()); } else { normalizedgenomiclocation.setoriginalinput(genomiclocation.tostring()); } string chr = chromosomenormalizer(genomiclocation.getchromosome().trim()); normalizedgenomiclocation.setchromosome(chr); integer start = genomiclocation.getstart(); integer end = genomiclocation.getend(); string ref = genomiclocation.getreferenceallele().trim(); string var = genomiclocation.getvariantallele().trim(); string prefix = ""; if (!ref.equals(var)) { prefix = longestcommonprefix(ref, var); } if (prefix.length() > 0) { ref = ref.substring(prefix.length()); var = var.substring(prefix.length()); int nstart = start + prefix.length(); if (ref.length() == 0) { nstart -= 1; } start = nstart; } normalizedgenomiclocation.setstart(start); normalizedgenomiclocation.setend(end); normalizedgenomiclocation.setreferenceallele(ref); normalizedgenomiclocation.setvariantallele(var); return normalizedgenomiclocation; } | as1000/genome-nexus | [
1,
0,
0,
0
] |
20,003 | public Builder withOriginalCourseId(final String originalCourseId) {
getObject().setOriginalCourseId(originalCourseId);
return thisBuilder();
} | public Builder withOriginalCourseId(final String originalCourseId) {
getObject().setOriginalCourseId(originalCourseId);
return thisBuilder();
} | public builder withoriginalcourseid(final string originalcourseid) { getobject().setoriginalcourseid(originalcourseid); return thisbuilder(); } | bcgov/EDUC-GRAD-REPORT-API | [
1,
0,
0,
0
] |
3,835 | protected Serializable[] getParamValues(Serializable[] rawParamValues) throws SQLException
{
int nonPositionedParametersCount = 0;
for (Serializable rawParamValue : rawParamValues)
{
if (rawParamValue != null && rawParamValue instanceof NonPositionedParameter) ++nonPositionedParametersCount;
}
Serializable[] paramValues = new Serializable[parameterNames.size() + nonPositionedParametersCount];
Entity entity = (Entity)Optional.ofNullable(getParent()).filter(x -> x instanceof Entity).orElse(null);
if (entity == null)
{
for (int i = 0; i < paramValues.length - nonPositionedParametersCount; ++i)
{
paramValues[i] = rawParamValues[paramMapping[i]];
}
}
else
{
for (int i = 0; i < paramValues.length - nonPositionedParametersCount; ++i)
{
String paramName = parameterNames.get(i);
paramValues[i] = entity.filterValue(paramName, rawParamValues[paramMapping[i]]);
}
}
// non positioned parameters should always be given last (CB TODO - document)
for (int i = 0; i < nonPositionedParametersCount; ++i)
{
paramValues[paramValues.length - nonPositionedParametersCount + i] = rawParamValues[rawParamValues.length - nonPositionedParametersCount + i];
}
return paramValues;
} | protected Serializable[] getParamValues(Serializable[] rawParamValues) throws SQLException
{
int nonPositionedParametersCount = 0;
for (Serializable rawParamValue : rawParamValues)
{
if (rawParamValue != null && rawParamValue instanceof NonPositionedParameter) ++nonPositionedParametersCount;
}
Serializable[] paramValues = new Serializable[parameterNames.size() + nonPositionedParametersCount];
Entity entity = (Entity)Optional.ofNullable(getParent()).filter(x -> x instanceof Entity).orElse(null);
if (entity == null)
{
for (int i = 0; i < paramValues.length - nonPositionedParametersCount; ++i)
{
paramValues[i] = rawParamValues[paramMapping[i]];
}
}
else
{
for (int i = 0; i < paramValues.length - nonPositionedParametersCount; ++i)
{
String paramName = parameterNames.get(i);
paramValues[i] = entity.filterValue(paramName, rawParamValues[paramMapping[i]]);
}
}
for (int i = 0; i < nonPositionedParametersCount; ++i)
{
paramValues[paramValues.length - nonPositionedParametersCount + i] = rawParamValues[rawParamValues.length - nonPositionedParametersCount + i];
}
return paramValues;
} | protected serializable[] getparamvalues(serializable[] rawparamvalues) throws sqlexception { int nonpositionedparameterscount = 0; for (serializable rawparamvalue : rawparamvalues) { if (rawparamvalue != null && rawparamvalue instanceof nonpositionedparameter) ++nonpositionedparameterscount; } serializable[] paramvalues = new serializable[parameternames.size() + nonpositionedparameterscount]; entity entity = (entity)optional.ofnullable(getparent()).filter(x -> x instanceof entity).orelse(null); if (entity == null) { for (int i = 0; i < paramvalues.length - nonpositionedparameterscount; ++i) { paramvalues[i] = rawparamvalues[parammapping[i]]; } } else { for (int i = 0; i < paramvalues.length - nonpositionedparameterscount; ++i) { string paramname = parameternames.get(i); paramvalues[i] = entity.filtervalue(paramname, rawparamvalues[parammapping[i]]); } } for (int i = 0; i < nonpositionedparameterscount; ++i) { paramvalues[paramvalues.length - nonpositionedparameterscount + i] = rawparamvalues[rawparamvalues.length - nonpositionedparameterscount + i]; } return paramvalues; } | arkanovicz/velocity-model | [
0,
0,
0,
0
] |
12,164 | public void testSeekToBeforeAttachedAsNextMediaPlayer() throws Throwable {
IBasicMediaPlayer player = createWrappedPlayerInstance();
IBasicMediaPlayer nextPlayer = null;
Object args = getTestParams();
// NOTE:
// seekTo() call will be ignored if the player is attached as a next
// player.
try {
SeekCompleteListenerObject seekComp = new SeekCompleteListenerObject();
// prepare
transitStateToPrepared(player, args);
nextPlayer = createNextPlayer(player, NextPlayerType.PREPARED);
nextPlayer.setOnSeekCompleteListener(seekComp);
// seek
int seekPosition = nextPlayer.getDuration() / 2;
nextPlayer.seekTo(seekPosition);
if (!seekComp.await(DEFAULT_EVENT_WAIT_DURATION)) {
fail();
}
// NOTE: valid position is returned
assertEquals(seekPosition, nextPlayer.getCurrentPosition());
setUnwrappedtNextPlayer(player, nextPlayer);
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
// start
player.start();
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
Thread.sleep(SHORT_EVENT_WAIT_DURATION);
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
// wait for the next player started
Thread.sleep(player.getDuration());
// NOTE: StandardMediaPlayer fails on this assertion if using NuPlayer
assertEquals(player.getDuration(), player.getCurrentPosition());
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
// NOTE: valid current position position is returned
assertLargerThanOrEqual(seekPosition,
nextPlayer.getCurrentPosition());
// wait for the next player completion
Thread.sleep(nextPlayer.getDuration() - seekPosition
+ SHORT_EVENT_WAIT_DURATION);
if (true) {
Log.w("XXX",
"testSeekToBeforeAttachedAsNextMediaPlayer() - WORKAROUND");
// WTF! Android's native audio decoder gives very poor seek
// accuracy!
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
// wait for the next player completion
Thread.sleep(seekPosition);
}
assertEquals(nextPlayer.getDuration(),
nextPlayer.getCurrentPosition());
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
} finally {
releaseQuietly(nextPlayer);
releaseQuietly(player);
}
} | public void testSeekToBeforeAttachedAsNextMediaPlayer() throws Throwable {
IBasicMediaPlayer player = createWrappedPlayerInstance();
IBasicMediaPlayer nextPlayer = null;
Object args = getTestParams();
try {
SeekCompleteListenerObject seekComp = new SeekCompleteListenerObject();
transitStateToPrepared(player, args);
nextPlayer = createNextPlayer(player, NextPlayerType.PREPARED);
nextPlayer.setOnSeekCompleteListener(seekComp);
int seekPosition = nextPlayer.getDuration() / 2;
nextPlayer.seekTo(seekPosition);
if (!seekComp.await(DEFAULT_EVENT_WAIT_DURATION)) {
fail();
}
assertEquals(seekPosition, nextPlayer.getCurrentPosition());
setUnwrappedtNextPlayer(player, nextPlayer);
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
player.start();
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
Thread.sleep(SHORT_EVENT_WAIT_DURATION);
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
Thread.sleep(player.getDuration());
assertEquals(player.getDuration(), player.getCurrentPosition());
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
assertLargerThanOrEqual(seekPosition,
nextPlayer.getCurrentPosition());
Thread.sleep(nextPlayer.getDuration() - seekPosition
+ SHORT_EVENT_WAIT_DURATION);
if (true) {
Log.w("XXX",
"testSeekToBeforeAttachedAsNextMediaPlayer() - WORKAROUND");
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
Thread.sleep(seekPosition);
}
assertEquals(nextPlayer.getDuration(),
nextPlayer.getCurrentPosition());
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
} finally {
releaseQuietly(nextPlayer);
releaseQuietly(player);
}
} | public void testseektobeforeattachedasnextmediaplayer() throws throwable { ibasicmediaplayer player = createwrappedplayerinstance(); ibasicmediaplayer nextplayer = null; object args = gettestparams(); try { seekcompletelistenerobject seekcomp = new seekcompletelistenerobject(); transitstatetoprepared(player, args); nextplayer = createnextplayer(player, nextplayertype.prepared); nextplayer.setonseekcompletelistener(seekcomp); int seekposition = nextplayer.getduration() / 2; nextplayer.seekto(seekposition); if (!seekcomp.await(default_event_wait_duration)) { fail(); } assertequals(seekposition, nextplayer.getcurrentposition()); setunwrappedtnextplayer(player, nextplayer); assertfalse(player.isplaying()); assertfalse(nextplayer.isplaying()); player.start(); asserttrue(player.isplaying()); assertfalse(nextplayer.isplaying()); thread.sleep(short_event_wait_duration); asserttrue(player.isplaying()); assertfalse(nextplayer.isplaying()); thread.sleep(player.getduration()); assertequals(player.getduration(), player.getcurrentposition()); assertfalse(player.isplaying()); asserttrue(nextplayer.isplaying()); assertlargerthanorequal(seekposition, nextplayer.getcurrentposition()); thread.sleep(nextplayer.getduration() - seekposition + short_event_wait_duration); if (true) { log.w("xxx", "testseektobeforeattachedasnextmediaplayer() - workaround"); assertfalse(player.isplaying()); asserttrue(nextplayer.isplaying()); thread.sleep(seekposition); } assertequals(nextplayer.getduration(), nextplayer.getcurrentposition()); assertfalse(player.isplaying()); assertfalse(nextplayer.isplaying()); } finally { releasequietly(nextplayer); releasequietly(player); } } | cirnoftw/android-openslmediaplayer | [
1,
0,
1,
0
] |
12,165 | public void testSeekToAfterAttachedAsNextMediaPlayer() throws Throwable {
IBasicMediaPlayer player = createWrappedPlayerInstance();
IBasicMediaPlayer nextPlayer = null;
Object args = getTestParams();
// NOTE:
// seekTo() call will be ignored if the player is attached as a next
// player.
try {
// prepare
transitStateToPrepared(player, args);
nextPlayer = createNextPlayer(player, NextPlayerType.PREPARED);
setUnwrappedtNextPlayer(player, nextPlayer);
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
// start
player.start();
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
int seekPosition = nextPlayer.getDuration() / 2;
SeekCompleteListenerObject seekComp = new SeekCompleteListenerObject();
nextPlayer.setOnSeekCompleteListener(seekComp);
nextPlayer.seekTo(seekPosition);
if (!seekComp.await(DEFAULT_EVENT_WAIT_DURATION)) {
fail();
}
// NOTE: valid position is returned
assertEquals(seekPosition, nextPlayer.getCurrentPosition());
Thread.sleep(SHORT_EVENT_WAIT_DURATION);
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
// wait for the next player started
Thread.sleep(player.getDuration());
// NOTE: StandardMediaPlayer fails on this assertion if using NuPlayer
assertEquals(player.getDuration(), player.getCurrentPosition());
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
// NOTE: valid current position position is returned
// XXX Galaxy S4 (SC-04E, Android 4.4.2) fails on this assertion
assertLargerThanOrEqual(seekPosition,
nextPlayer.getCurrentPosition());
// wait for the next player completion
Thread.sleep(nextPlayer.getDuration() - seekPosition
+ SHORT_EVENT_WAIT_DURATION);
if (true) {
Log.w("XXX",
"testSeekToAfterAttachedAsNextMediaPlayer() - WORKAROUND");
// WTF! Android's native audio decoder gives very poor seek
// accuracy!
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
// wait for the next player completion
Thread.sleep(seekPosition);
}
assertEquals(nextPlayer.getDuration(),
nextPlayer.getCurrentPosition());
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
} finally {
releaseQuietly(nextPlayer);
releaseQuietly(player);
}
} | public void testSeekToAfterAttachedAsNextMediaPlayer() throws Throwable {
IBasicMediaPlayer player = createWrappedPlayerInstance();
IBasicMediaPlayer nextPlayer = null;
Object args = getTestParams();
try {
transitStateToPrepared(player, args);
nextPlayer = createNextPlayer(player, NextPlayerType.PREPARED);
setUnwrappedtNextPlayer(player, nextPlayer);
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
player.start();
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
int seekPosition = nextPlayer.getDuration() / 2;
SeekCompleteListenerObject seekComp = new SeekCompleteListenerObject();
nextPlayer.setOnSeekCompleteListener(seekComp);
nextPlayer.seekTo(seekPosition);
if (!seekComp.await(DEFAULT_EVENT_WAIT_DURATION)) {
fail();
}
assertEquals(seekPosition, nextPlayer.getCurrentPosition());
Thread.sleep(SHORT_EVENT_WAIT_DURATION);
assertTrue(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
Thread.sleep(player.getDuration());
assertEquals(player.getDuration(), player.getCurrentPosition());
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
assertLargerThanOrEqual(seekPosition,
nextPlayer.getCurrentPosition());
Thread.sleep(nextPlayer.getDuration() - seekPosition
+ SHORT_EVENT_WAIT_DURATION);
if (true) {
Log.w("XXX",
"testSeekToAfterAttachedAsNextMediaPlayer() - WORKAROUND");
assertFalse(player.isPlaying());
assertTrue(nextPlayer.isPlaying());
Thread.sleep(seekPosition);
}
assertEquals(nextPlayer.getDuration(),
nextPlayer.getCurrentPosition());
assertFalse(player.isPlaying());
assertFalse(nextPlayer.isPlaying());
} finally {
releaseQuietly(nextPlayer);
releaseQuietly(player);
}
} | public void testseektoafterattachedasnextmediaplayer() throws throwable { ibasicmediaplayer player = createwrappedplayerinstance(); ibasicmediaplayer nextplayer = null; object args = gettestparams(); try { transitstatetoprepared(player, args); nextplayer = createnextplayer(player, nextplayertype.prepared); setunwrappedtnextplayer(player, nextplayer); assertfalse(player.isplaying()); assertfalse(nextplayer.isplaying()); player.start(); asserttrue(player.isplaying()); assertfalse(nextplayer.isplaying()); int seekposition = nextplayer.getduration() / 2; seekcompletelistenerobject seekcomp = new seekcompletelistenerobject(); nextplayer.setonseekcompletelistener(seekcomp); nextplayer.seekto(seekposition); if (!seekcomp.await(default_event_wait_duration)) { fail(); } assertequals(seekposition, nextplayer.getcurrentposition()); thread.sleep(short_event_wait_duration); asserttrue(player.isplaying()); assertfalse(nextplayer.isplaying()); thread.sleep(player.getduration()); assertequals(player.getduration(), player.getcurrentposition()); assertfalse(player.isplaying()); asserttrue(nextplayer.isplaying()); assertlargerthanorequal(seekposition, nextplayer.getcurrentposition()); thread.sleep(nextplayer.getduration() - seekposition + short_event_wait_duration); if (true) { log.w("xxx", "testseektoafterattachedasnextmediaplayer() - workaround"); assertfalse(player.isplaying()); asserttrue(nextplayer.isplaying()); thread.sleep(seekposition); } assertequals(nextplayer.getduration(), nextplayer.getcurrentposition()); assertfalse(player.isplaying()); assertfalse(nextplayer.isplaying()); } finally { releasequietly(nextplayer); releasequietly(player); } } | cirnoftw/android-openslmediaplayer | [
1,
0,
1,
0
] |
20,677 | private <U> CompletableFuture<U> supplyInContext(Supplier<U> supplier, String opDescription) {
return CompletableFuture.supplyAsync(() -> {
// TODO: explore a more relaxed locking strategy.
writeLock.lock();
try {
return supplier.get();
} catch (Throwable ex) {
log.error("Exception in P4Runtime client of {}, executing {}", deviceId, opDescription, ex);
throw ex;
} finally {
writeLock.unlock();
}
}, contextExecutor);
} | private <U> CompletableFuture<U> supplyInContext(Supplier<U> supplier, String opDescription) {
return CompletableFuture.supplyAsync(() -> {
writeLock.lock();
try {
return supplier.get();
} catch (Throwable ex) {
log.error("Exception in P4Runtime client of {}, executing {}", deviceId, opDescription, ex);
throw ex;
} finally {
writeLock.unlock();
}
}, contextExecutor);
} | private <u> completablefuture<u> supplyincontext(supplier<u> supplier, string opdescription) { return completablefuture.supplyasync(() -> { writelock.lock(); try { return supplier.get(); } catch (throwable ex) { log.error("exception in p4runtime client of {}, executing {}", deviceid, opdescription, ex); throw ex; } finally { writelock.unlock(); } }, contextexecutor); } | antiguru/onos | [
1,
0,
0,
0
] |
20,862 | private void fetchMoreEntries() throws SQLException, AuditLogEntryException {
LongRange idsToRead = getIdsToRead();
// No more entries to read
if (idsToRead.getMaximumLong() == 0) {
return;
}
// TODO: Remove left outer join and command type filter once the
// exchange partition bug is fixed in HIVE-12215
String queryFormatString = "SELECT a.id, a.create_time, "
+ "command_type, command, name, category, "
+ "type, serialized_object "
+ "FROM %s a LEFT OUTER JOIN %s b on a.id = b.audit_log_id "
+ "WHERE a.id >= ? AND a.id <= ? "
+ "AND (command_type IS NULL OR command_type "
+ "NOT IN('SHOWTABLES', 'SHOWPARTITIONS', 'SWITCHDATABASE')) "
+ "ORDER BY id "
// Get read locks on the specified rows to prevent skipping of rows that haven't committed
// yet, but have an ID between idsToRead. For example, one transaction starts and
// inserts id = 1, but another transaction starts, inserts, and commits i=2 before the
// first transaction commits. Locking can also be done with serializable isolation level.
+ "LOCK IN SHARE MODE";
String query = String.format(queryFormatString,
auditLogTableName, outputObjectsTableName,
idsToRead.getMinimumLong(), idsToRead.getMaximumLong());
Connection connection = dbConnectionFactory.getConnection();
PreparedStatement ps = connection.prepareStatement(query);
int index = 1;
ps.setLong(index++, idsToRead.getMinimumLong());
ps.setLong(index++, idsToRead.getMaximumLong());
ResultSet rs = ps.executeQuery();
long id = -1;
Timestamp createTime = null;
HiveOperation commandType = null;
String command = null;
String objectName;
String objectCategory;
String objectType;
String objectSerialized;
long previouslyReadId = -1;
Timestamp previouslyReadTs = null;
HiveOperation previousCommandType = null;
String previousCommand = null;
// For a given audit log ID, the join would have produced multiple rows
// for each ID. Each row contains a single output. Group all the rows
// and the outputs into a AuditLogEntry.
// For a given audit log ID, these accumulate the outputs from the
// different rows.
List<String> outputDirectories = new LinkedList<>();
List<Table> outputTables = new LinkedList<>();
List<NamedPartition> outputPartitions = new LinkedList<>();
List<Table> referenceTables = new LinkedList<>();
Table inputTable = null;
NamedPartition renameFromPartition = null;
while (rs.next()) {
id = rs.getLong("id");
createTime = rs.getTimestamp("create_time");
// Invalid operations are returned as null
String commandTypeString = rs.getString("command_type");
commandType = convertToHiveOperation(commandTypeString);
if (commandType == null) {
LOG.debug(String.format("Invalid operation %s in audit log id: %s", commandTypeString, id));
}
command = rs.getString("command");
objectName = rs.getString("name");
objectCategory = rs.getString("category");
objectType = rs.getString("type");
objectSerialized = rs.getString("serialized_object");
if (previouslyReadId != -1 && id != previouslyReadId) {
lastReadId = previouslyReadId;
// This means that all the outputs for a given audit log entry
// has been read.
AuditLogEntry entry = new AuditLogEntry(
previouslyReadId,
previouslyReadTs,
previousCommandType,
previousCommand,
outputDirectories,
referenceTables,
outputTables,
outputPartitions,
inputTable,
renameFromPartition);
auditLogEntries.add(entry);
// Reset these accumulated values
outputDirectories = new LinkedList<>();
referenceTables = new LinkedList<>();
outputTables = new LinkedList<>();
outputPartitions = new LinkedList<>();
renameFromPartition = null;
inputTable = null;
}
previouslyReadId = id;
previouslyReadTs = createTime;
previousCommandType = commandType;
previousCommand = command;
if ("DIRECTORY".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("TABLE".equals(objectType)) {
Table table = new Table();
try {
ReplicationUtils.deserializeObject(objectSerialized, table);
} catch (MetadataException e) {
throw new AuditLogEntryException(e);
}
ReplicationUtils.normalizeNames(table);
if ("OUTPUT".equals(objectCategory)) {
outputTables.add(table);
} else if ("REFERENCE_TABLE".equals(objectCategory)) {
referenceTables.add(table);
} else if ("RENAME_FROM".equals(objectCategory) || "INPUT".equals(objectCategory)) {
inputTable = table;
} else {
throw new RuntimeException("Unhandled category: " + objectCategory);
}
} else if ("PARTITION".equals(objectType)) {
Partition partition = new Partition();
try {
ReplicationUtils.deserializeObject(objectSerialized, partition);
} catch (MetadataException e) {
throw new AuditLogEntryException(e);
}
ReplicationUtils.normalizeNames(partition);
String partitionName = getPartitionNameFromOutputCol(objectName);
NamedPartition namedPartition = new NamedPartition(partitionName, partition);
if ("OUTPUT".equals(objectCategory)) {
outputPartitions.add(namedPartition);
} else if ("RENAME_FROM".equals(objectCategory) || "INPUT".equals(objectCategory)) {
renameFromPartition = namedPartition;
} else {
throw new RuntimeException("Unhandled category: " + objectCategory);
}
} else if ("DFS_DIR".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("LOCAL_DIR".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("DATABASE".equals(objectType)) {
// Currently, nothing is done with DB's
} else if (objectType == null) {
// This will happen for queries that don't have any output
// objects. This can be removed a long with the OUTER aspect
// of the join above once the bug with exchange partitions is
// fixed.
LOG.debug("No output objects");
} else {
throw new RuntimeException("Unhandled output type: " + objectType);
}
}
// This is the case where we read to the end of the table.
if (id != -1) {
AuditLogEntry entry = new AuditLogEntry(
id,
createTime,
commandType,
command,
outputDirectories,
referenceTables,
outputTables,
outputPartitions,
inputTable,
renameFromPartition);
auditLogEntries.add(entry);
}
// Note: if we constantly get empty results (i.e. no valid entries
// because all the commands got filtered out), then the lastReadId won't
// be updated for a while.
lastReadId = idsToRead.getMaximumLong();
return;
} | private void fetchMoreEntries() throws SQLException, AuditLogEntryException {
LongRange idsToRead = getIdsToRead();
if (idsToRead.getMaximumLong() == 0) {
return;
}
String queryFormatString = "SELECT a.id, a.create_time, "
+ "command_type, command, name, category, "
+ "type, serialized_object "
+ "FROM %s a LEFT OUTER JOIN %s b on a.id = b.audit_log_id "
+ "WHERE a.id >= ? AND a.id <= ? "
+ "AND (command_type IS NULL OR command_type "
+ "NOT IN('SHOWTABLES', 'SHOWPARTITIONS', 'SWITCHDATABASE')) "
+ "ORDER BY id "
+ "LOCK IN SHARE MODE";
String query = String.format(queryFormatString,
auditLogTableName, outputObjectsTableName,
idsToRead.getMinimumLong(), idsToRead.getMaximumLong());
Connection connection = dbConnectionFactory.getConnection();
PreparedStatement ps = connection.prepareStatement(query);
int index = 1;
ps.setLong(index++, idsToRead.getMinimumLong());
ps.setLong(index++, idsToRead.getMaximumLong());
ResultSet rs = ps.executeQuery();
long id = -1;
Timestamp createTime = null;
HiveOperation commandType = null;
String command = null;
String objectName;
String objectCategory;
String objectType;
String objectSerialized;
long previouslyReadId = -1;
Timestamp previouslyReadTs = null;
HiveOperation previousCommandType = null;
String previousCommand = null;
List<String> outputDirectories = new LinkedList<>();
List<Table> outputTables = new LinkedList<>();
List<NamedPartition> outputPartitions = new LinkedList<>();
List<Table> referenceTables = new LinkedList<>();
Table inputTable = null;
NamedPartition renameFromPartition = null;
while (rs.next()) {
id = rs.getLong("id");
createTime = rs.getTimestamp("create_time");
String commandTypeString = rs.getString("command_type");
commandType = convertToHiveOperation(commandTypeString);
if (commandType == null) {
LOG.debug(String.format("Invalid operation %s in audit log id: %s", commandTypeString, id));
}
command = rs.getString("command");
objectName = rs.getString("name");
objectCategory = rs.getString("category");
objectType = rs.getString("type");
objectSerialized = rs.getString("serialized_object");
if (previouslyReadId != -1 && id != previouslyReadId) {
lastReadId = previouslyReadId;
AuditLogEntry entry = new AuditLogEntry(
previouslyReadId,
previouslyReadTs,
previousCommandType,
previousCommand,
outputDirectories,
referenceTables,
outputTables,
outputPartitions,
inputTable,
renameFromPartition);
auditLogEntries.add(entry);
outputDirectories = new LinkedList<>();
referenceTables = new LinkedList<>();
outputTables = new LinkedList<>();
outputPartitions = new LinkedList<>();
renameFromPartition = null;
inputTable = null;
}
previouslyReadId = id;
previouslyReadTs = createTime;
previousCommandType = commandType;
previousCommand = command;
if ("DIRECTORY".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("TABLE".equals(objectType)) {
Table table = new Table();
try {
ReplicationUtils.deserializeObject(objectSerialized, table);
} catch (MetadataException e) {
throw new AuditLogEntryException(e);
}
ReplicationUtils.normalizeNames(table);
if ("OUTPUT".equals(objectCategory)) {
outputTables.add(table);
} else if ("REFERENCE_TABLE".equals(objectCategory)) {
referenceTables.add(table);
} else if ("RENAME_FROM".equals(objectCategory) || "INPUT".equals(objectCategory)) {
inputTable = table;
} else {
throw new RuntimeException("Unhandled category: " + objectCategory);
}
} else if ("PARTITION".equals(objectType)) {
Partition partition = new Partition();
try {
ReplicationUtils.deserializeObject(objectSerialized, partition);
} catch (MetadataException e) {
throw new AuditLogEntryException(e);
}
ReplicationUtils.normalizeNames(partition);
String partitionName = getPartitionNameFromOutputCol(objectName);
NamedPartition namedPartition = new NamedPartition(partitionName, partition);
if ("OUTPUT".equals(objectCategory)) {
outputPartitions.add(namedPartition);
} else if ("RENAME_FROM".equals(objectCategory) || "INPUT".equals(objectCategory)) {
renameFromPartition = namedPartition;
} else {
throw new RuntimeException("Unhandled category: " + objectCategory);
}
} else if ("DFS_DIR".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("LOCAL_DIR".equals(objectType)) {
outputDirectories.add(objectName);
} else if ("DATABASE".equals(objectType)) {
} else if (objectType == null) {
LOG.debug("No output objects");
} else {
throw new RuntimeException("Unhandled output type: " + objectType);
}
}
if (id != -1) {
AuditLogEntry entry = new AuditLogEntry(
id,
createTime,
commandType,
command,
outputDirectories,
referenceTables,
outputTables,
outputPartitions,
inputTable,
renameFromPartition);
auditLogEntries.add(entry);
}
lastReadId = idsToRead.getMaximumLong();
return;
} | private void fetchmoreentries() throws sqlexception, auditlogentryexception { longrange idstoread = getidstoread(); if (idstoread.getmaximumlong() == 0) { return; } string queryformatstring = "select a.id, a.create_time, " + "command_type, command, name, category, " + "type, serialized_object " + "from %s a left outer join %s b on a.id = b.audit_log_id " + "where a.id >= ? and a.id <= ? " + "and (command_type is null or command_type " + "not in('showtables', 'showpartitions', 'switchdatabase')) " + "order by id " + "lock in share mode"; string query = string.format(queryformatstring, auditlogtablename, outputobjectstablename, idstoread.getminimumlong(), idstoread.getmaximumlong()); connection connection = dbconnectionfactory.getconnection(); preparedstatement ps = connection.preparestatement(query); int index = 1; ps.setlong(index++, idstoread.getminimumlong()); ps.setlong(index++, idstoread.getmaximumlong()); resultset rs = ps.executequery(); long id = -1; timestamp createtime = null; hiveoperation commandtype = null; string command = null; string objectname; string objectcategory; string objecttype; string objectserialized; long previouslyreadid = -1; timestamp previouslyreadts = null; hiveoperation previouscommandtype = null; string previouscommand = null; list<string> outputdirectories = new linkedlist<>(); list<table> outputtables = new linkedlist<>(); list<namedpartition> outputpartitions = new linkedlist<>(); list<table> referencetables = new linkedlist<>(); table inputtable = null; namedpartition renamefrompartition = null; while (rs.next()) { id = rs.getlong("id"); createtime = rs.gettimestamp("create_time"); string commandtypestring = rs.getstring("command_type"); commandtype = converttohiveoperation(commandtypestring); if (commandtype == null) { log.debug(string.format("invalid operation %s in audit log id: %s", commandtypestring, id)); } command = rs.getstring("command"); objectname = rs.getstring("name"); objectcategory = rs.getstring("category"); objecttype = rs.getstring("type"); objectserialized = rs.getstring("serialized_object"); if (previouslyreadid != -1 && id != previouslyreadid) { lastreadid = previouslyreadid; auditlogentry entry = new auditlogentry( previouslyreadid, previouslyreadts, previouscommandtype, previouscommand, outputdirectories, referencetables, outputtables, outputpartitions, inputtable, renamefrompartition); auditlogentries.add(entry); outputdirectories = new linkedlist<>(); referencetables = new linkedlist<>(); outputtables = new linkedlist<>(); outputpartitions = new linkedlist<>(); renamefrompartition = null; inputtable = null; } previouslyreadid = id; previouslyreadts = createtime; previouscommandtype = commandtype; previouscommand = command; if ("directory".equals(objecttype)) { outputdirectories.add(objectname); } else if ("table".equals(objecttype)) { table table = new table(); try { replicationutils.deserializeobject(objectserialized, table); } catch (metadataexception e) { throw new auditlogentryexception(e); } replicationutils.normalizenames(table); if ("output".equals(objectcategory)) { outputtables.add(table); } else if ("reference_table".equals(objectcategory)) { referencetables.add(table); } else if ("rename_from".equals(objectcategory) || "input".equals(objectcategory)) { inputtable = table; } else { throw new runtimeexception("unhandled category: " + objectcategory); } } else if ("partition".equals(objecttype)) { partition partition = new partition(); try { replicationutils.deserializeobject(objectserialized, partition); } catch (metadataexception e) { throw new auditlogentryexception(e); } replicationutils.normalizenames(partition); string partitionname = getpartitionnamefromoutputcol(objectname); namedpartition namedpartition = new namedpartition(partitionname, partition); if ("output".equals(objectcategory)) { outputpartitions.add(namedpartition); } else if ("rename_from".equals(objectcategory) || "input".equals(objectcategory)) { renamefrompartition = namedpartition; } else { throw new runtimeexception("unhandled category: " + objectcategory); } } else if ("dfs_dir".equals(objecttype)) { outputdirectories.add(objectname); } else if ("local_dir".equals(objecttype)) { outputdirectories.add(objectname); } else if ("database".equals(objecttype)) { } else if (objecttype == null) { log.debug("no output objects"); } else { throw new runtimeexception("unhandled output type: " + objecttype); } } if (id != -1) { auditlogentry entry = new auditlogentry( id, createtime, commandtype, command, outputdirectories, referencetables, outputtables, outputpartitions, inputtable, renamefrompartition); auditlogentries.add(entry); } lastreadid = idstoread.getmaximumlong(); return; } | aoen/reair | [
1,
1,
1,
0
] |
12,727 | @Override
public void run(Engine.Negative negative) { // TODO: Use "advanced probabilistic techniques".
for (int i = 1; i <= TASK_SAMPLE_SIZE; i++) {
int j, bufferX, bufferY; // location of (zR, zI) in histogram
double cR, cI, zR, zI, p;
// use ThreadLocalRandom for improved performance
cR = zR = minX + ThreadLocalRandom.current().nextDouble(rangeX);
cI = zI = minY + ThreadLocalRandom.current().nextDouble(rangeY);
// check if the sequence generated by (cR, cI) escapes
for (j = 0; j < iterationLimit; j++) {
for (int k = 1; k < degree; k++) {
p = zR;
zR = zR * zR - zI * zI;
zI = 2 * p * zI;
}
zR += cR;
zI += cI;
if (zR * zR + zI * zI > escapeDistance * escapeDistance) {
break;
}
}
if (j == iterationLimit) { // does the sequence escape?
continue;
}
zR = cR;
zI = cI;
for (j = 0; j < iterationLimit; j++) {
// Is the point within the rendering region?
if (zR >= minX && zR - minX <= rangeX && zI >= minY && zI - minY <= rangeY) {
bufferX = (int) (((zR - minX) / rangeX) * negative.size.width);
bufferY = (int) (((zI - minY) / rangeY) * negative.size.height);
negative.buffer.setElem(bufferY * negative.size.width + bufferX,
negative.buffer.getElem(bufferY * negative.size.width + bufferX) + 1);
}
for (int k = 1; k < degree; k++) {
p = zR;
zR = zR * zR - zI * zI;
zI = 2 * p * zI;
}
zR += cR;
zI += cI;
}
}
} | @Override
public void run(Engine.Negative negative) {
for (int i = 1; i <= TASK_SAMPLE_SIZE; i++) {
int j, bufferX, bufferY;
double cR, cI, zR, zI, p;
cR = zR = minX + ThreadLocalRandom.current().nextDouble(rangeX);
cI = zI = minY + ThreadLocalRandom.current().nextDouble(rangeY);
for (j = 0; j < iterationLimit; j++) {
for (int k = 1; k < degree; k++) {
p = zR;
zR = zR * zR - zI * zI;
zI = 2 * p * zI;
}
zR += cR;
zI += cI;
if (zR * zR + zI * zI > escapeDistance * escapeDistance) {
break;
}
}
if (j == iterationLimit) {
continue;
}
zR = cR;
zI = cI;
for (j = 0; j < iterationLimit; j++) {
if (zR >= minX && zR - minX <= rangeX && zI >= minY && zI - minY <= rangeY) {
bufferX = (int) (((zR - minX) / rangeX) * negative.size.width);
bufferY = (int) (((zI - minY) / rangeY) * negative.size.height);
negative.buffer.setElem(bufferY * negative.size.width + bufferX,
negative.buffer.getElem(bufferY * negative.size.width + bufferX) + 1);
}
for (int k = 1; k < degree; k++) {
p = zR;
zR = zR * zR - zI * zI;
zI = 2 * p * zI;
}
zR += cR;
zI += cI;
}
}
} | @override public void run(engine.negative negative) { for (int i = 1; i <= task_sample_size; i++) { int j, bufferx, buffery; double cr, ci, zr, zi, p; cr = zr = minx + threadlocalrandom.current().nextdouble(rangex); ci = zi = miny + threadlocalrandom.current().nextdouble(rangey); for (j = 0; j < iterationlimit; j++) { for (int k = 1; k < degree; k++) { p = zr; zr = zr * zr - zi * zi; zi = 2 * p * zi; } zr += cr; zi += ci; if (zr * zr + zi * zi > escapedistance * escapedistance) { break; } } if (j == iterationlimit) { continue; } zr = cr; zi = ci; for (j = 0; j < iterationlimit; j++) { if (zr >= minx && zr - minx <= rangex && zi >= miny && zi - miny <= rangey) { bufferx = (int) (((zr - minx) / rangex) * negative.size.width); buffery = (int) (((zi - miny) / rangey) * negative.size.height); negative.buffer.setelem(buffery * negative.size.width + bufferx, negative.buffer.getelem(buffery * negative.size.width + bufferx) + 1); } for (int k = 1; k < degree; k++) { p = zr; zr = zr * zr - zi * zi; zi = 2 * p * zi; } zr += cr; zi += ci; } } } | ari-b/neb | [
1,
0,
0,
0
] |
20,951 | public void xtestNameClash() {
WritableVersion wv = model.getWritableVersion(AbstractModel.SUPERUSER);
try {
doTestVersion(wv);
doTestVersion(wv);
fail("created two groups with same name");
} catch (ModelException ex) {
wv.abort();
Throwable cause = ex;
while (null != cause.getCause()) {
cause = cause.getCause();
}
cause.printStackTrace();
fail(cause.getClass().getName() + ": " + cause.getMessage());
} finally {
if (!wv.isCompleted()) {
wv.abort();
} // not testing persistence here
}
} | public void xtestNameClash() {
WritableVersion wv = model.getWritableVersion(AbstractModel.SUPERUSER);
try {
doTestVersion(wv);
doTestVersion(wv);
fail("created two groups with same name");
} catch (ModelException ex) {
wv.abort();
Throwable cause = ex;
while (null != cause.getCause()) {
cause = cause.getCause();
}
cause.printStackTrace();
fail(cause.getClass().getName() + ": " + cause.getMessage());
} finally {
if (!wv.isCompleted()) {
wv.abort();
}
}
} | public void xtestnameclash() { writableversion wv = model.getwritableversion(abstractmodel.superuser); try { dotestversion(wv); dotestversion(wv); fail("created two groups with same name"); } catch (modelexception ex) { wv.abort(); throwable cause = ex; while (null != cause.getcause()) { cause = cause.getcause(); } cause.printstacktrace(); fail(cause.getclass().getname() + ": " + cause.getmessage()); } finally { if (!wv.iscompleted()) { wv.abort(); } } } | chrishmorris/PiMS | [
0,
0,
1,
0
] |
13,109 | public void hasLength(int length) {
// TODO Support string length?
checkArgument(length >= 0, "length (%s) must be >= 0", length);
if (actual() == null || !actual().isContainerNode()) {
fail("is a container");
} else if (actual().size() != length) {
fail("has length", length);
}
} | public void hasLength(int length) {
checkArgument(length >= 0, "length (%s) must be >= 0", length);
if (actual() == null || !actual().isContainerNode()) {
fail("is a container");
} else if (actual().size() != length) {
fail("has length", length);
}
} | public void haslength(int length) { checkargument(length >= 0, "length (%s) must be >= 0", length); if (actual() == null || !actual().iscontainernode()) { fail("is a container"); } else if (actual().size() != length) { fail("has length", length); } } | blackducksoftware/magpie-libraries | [
1,
0,
1,
0
] |
13,111 | public JsonSubjectFactory configure(DeserializationFeature feature, boolean state) {
objectMapper.configure(feature, state);
return this;
} | public JsonSubjectFactory configure(DeserializationFeature feature, boolean state) {
objectMapper.configure(feature, state);
return this;
} | public jsonsubjectfactory configure(deserializationfeature feature, boolean state) { objectmapper.configure(feature, state); return this; } | blackducksoftware/magpie-libraries | [
1,
0,
0,
0
] |
21,976 | @Override
public void onBindViewHolder(final ViewHolder holder, final int position) {
final Item item = mItemList.get(position);
setListener(mListener);
String dateStr = item.getPubDate();
DateFormat formatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss", Locale.getDefault());
try {
Date date = formatter.parse(dateStr);
String pubDateStr = DateUtils.formatAbbrev(mContext, date);
String[] dateMonth = pubDateStr.split(" ");
holder.mTvMonth.setText(dateMonth[0]);
holder.mTvDate.setText(dateMonth[1]);
} catch (ParseException e) {
e.printStackTrace();
GoogleAnalyticsUtil.trackException(mContext, e);
}
if (null != item.getItunesDuration()) {
//Check if the duration is already formatted
if (item.getItunesDuration().contains(":")) {
holder.mTvPodcastDuration.setText(item.getItunesDuration());
} else {
long min = Integer.parseInt(item.getItunesDuration()) / 60000;
long sec = Integer.parseInt(item.getItunesDuration()) % 60000 / 1000;
holder.mTvPodcastDuration.setText(String.format(mContext.getResources().getString(R.string.duration), min, sec));
}
}
holder.mTvPodcastTitle.setText(item.getTitle());
holder.mPlayImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//TODO:: Change Icon based on action. If playing, equalizer icon, pause pause-icon etc
String imageUrl = "";
Cursor cursor = mContext.getContentResolver().query(mUri, null, null, null, null);
if (cursor != null) {
while (cursor.moveToNext()) {
imageUrl = cursor.getString(ApplicationConstants.COLUMN_SUBSCRIBED_PODCAST_FEED_IMAGE_URL);
}
}
Track track = new Track();
track.setTitle(item.getTitle());
track.setStreamUrl(item.getEnclosure().getUrl());
track.setArtist(item.getItunesAuthor());
track.setArtworkUrl(imageUrl);
track.setDurationInMilli(Converter.getMilliSeconds(item.getItunesDuration()));
if (mListener != null) {
mListener.onTrackClicked(track);
}
}
});
holder.mEpisodeRl.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
displayBottomSheet(item);
}
});
} | @Override
public void onBindViewHolder(final ViewHolder holder, final int position) {
final Item item = mItemList.get(position);
setListener(mListener);
String dateStr = item.getPubDate();
DateFormat formatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss", Locale.getDefault());
try {
Date date = formatter.parse(dateStr);
String pubDateStr = DateUtils.formatAbbrev(mContext, date);
String[] dateMonth = pubDateStr.split(" ");
holder.mTvMonth.setText(dateMonth[0]);
holder.mTvDate.setText(dateMonth[1]);
} catch (ParseException e) {
e.printStackTrace();
GoogleAnalyticsUtil.trackException(mContext, e);
}
if (null != item.getItunesDuration()) {
if (item.getItunesDuration().contains(":")) {
holder.mTvPodcastDuration.setText(item.getItunesDuration());
} else {
long min = Integer.parseInt(item.getItunesDuration()) / 60000;
long sec = Integer.parseInt(item.getItunesDuration()) % 60000 / 1000;
holder.mTvPodcastDuration.setText(String.format(mContext.getResources().getString(R.string.duration), min, sec));
}
}
holder.mTvPodcastTitle.setText(item.getTitle());
holder.mPlayImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String imageUrl = "";
Cursor cursor = mContext.getContentResolver().query(mUri, null, null, null, null);
if (cursor != null) {
while (cursor.moveToNext()) {
imageUrl = cursor.getString(ApplicationConstants.COLUMN_SUBSCRIBED_PODCAST_FEED_IMAGE_URL);
}
}
Track track = new Track();
track.setTitle(item.getTitle());
track.setStreamUrl(item.getEnclosure().getUrl());
track.setArtist(item.getItunesAuthor());
track.setArtworkUrl(imageUrl);
track.setDurationInMilli(Converter.getMilliSeconds(item.getItunesDuration()));
if (mListener != null) {
mListener.onTrackClicked(track);
}
}
});
holder.mEpisodeRl.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
displayBottomSheet(item);
}
});
} | @override public void onbindviewholder(final viewholder holder, final int position) { final item item = mitemlist.get(position); setlistener(mlistener); string datestr = item.getpubdate(); dateformat formatter = new simpledateformat("eee, dd mmm yyyy hh:mm:ss", locale.getdefault()); try { date date = formatter.parse(datestr); string pubdatestr = dateutils.formatabbrev(mcontext, date); string[] datemonth = pubdatestr.split(" "); holder.mtvmonth.settext(datemonth[0]); holder.mtvdate.settext(datemonth[1]); } catch (parseexception e) { e.printstacktrace(); googleanalyticsutil.trackexception(mcontext, e); } if (null != item.getitunesduration()) { if (item.getitunesduration().contains(":")) { holder.mtvpodcastduration.settext(item.getitunesduration()); } else { long min = integer.parseint(item.getitunesduration()) / 60000; long sec = integer.parseint(item.getitunesduration()) % 60000 / 1000; holder.mtvpodcastduration.settext(string.format(mcontext.getresources().getstring(r.string.duration), min, sec)); } } holder.mtvpodcasttitle.settext(item.gettitle()); holder.mplayimagebutton.setonclicklistener(new view.onclicklistener() { @override public void onclick(view v) { string imageurl = ""; cursor cursor = mcontext.getcontentresolver().query(muri, null, null, null, null); if (cursor != null) { while (cursor.movetonext()) { imageurl = cursor.getstring(applicationconstants.column_subscribed_podcast_feed_image_url); } } track track = new track(); track.settitle(item.gettitle()); track.setstreamurl(item.getenclosure().geturl()); track.setartist(item.getitunesauthor()); track.setartworkurl(imageurl); track.setdurationinmilli(converter.getmilliseconds(item.getitunesduration())); if (mlistener != null) { mlistener.ontrackclicked(track); } } }); holder.mepisoderl.setonclicklistener(new view.onclicklistener() { @override public void onclick(view v) { displaybottomsheet(item); } }); } | c0de-wizard/pod-adddict | [
0,
1,
0,
0
] |
21,985 | protected Model filterModel(Model model, URI documentBaseUri) throws IOException
{
final boolean IGNORE_STYLESHEETS = true;
final boolean IGNORE_FAVICON = true;
final boolean DOCBASE_ONLY = false;
ValueFactory factory = SimpleValueFactory.getInstance();
String documentBaseUriStr = documentBaseUri.toString();
Iterator<Statement> iter = model.iterator();
//Note that the model has a "predictable iteration order", and since we get a ConcurrentModException while altering the model itself,
// we choose to instance a new one, and add the valid filtered nodes in order to this new one
Model filteredModel = new LinkedHashModel();
while (iter.hasNext()) {
Statement stmt = iter.next();
//!!! Note !!!
//The double (eg. both XHTML_NS_STYLESHEET and LOCAL_NS_STYLESHEET) checks below are the result of a bug:
//Because we set the @vocab attribute on the <html> element, all the stylesheet-links are prefixed with that namespace,
//so they're actually not real XHTML predicates, but changed into eg. http://www.mot.be/ontology/stylesheet
//Because it's quite a hard fix to implement, we work around it here.
//remove all the XHTML stylesheets predicates from the model
boolean removed = false;
if (!removed && IGNORE_STYLESHEETS && (stmt.getPredicate().toString().equals(XHTML_NS_STYLESHEET) || stmt.getPredicate().toString().equals(LOCAL_NS_STYLESHEET))) {
removed = true;
}
//removes all favicon statements. Note that this last check isn't waterproof (we can use any name for our favicons), but it works 99% of the time
if (!removed && IGNORE_FAVICON &&
stmt.getObject().toString().contains("favicon") &&
(stmt.getPredicate().toString().equals(XHTML_NS_ICON) || stmt.getPredicate().toString().equals(LOCAL_NS_ICON) ||
stmt.getPredicate().toString().equals(XHTML_NS_APPLE_TOUCH_ICON) || stmt.getPredicate().toString().equals(LOCAL_NS_APPLE_TOUCH_ICON) ||
stmt.getPredicate().toString().equals(LOCAL_NS_MANIFEST)
)) {
removed = true;
}
//remove all non-documentBaseUri subjects from the model
if (!removed && DOCBASE_ONLY && !stmt.getSubject().toString().equals(documentBaseUriStr)) {
removed = true;
}
//we're not interested in blank nodes because they don't have a page as subject, so we might as well filter them out
if (!removed && stmt.getSubject() instanceof BNode) {
removed = true;
}
//we'll use this loop to whitespace-trim the values while we're at it
//Note that this is a bit hacky. The more proper way would be to edit the com.beligum.blocks.rdf.importers.semargl.SesameSink.addPlainLiteral()
// method, but that class is supposed to be part of the core Sesame library (only copied over to have RDFa support in Sesame4), so I opted to implement it here
//Also note that this implementation doens't trim the non-plain literal values (like datetime). They are passed over via the @content attribute inside RDFa,
// so it should be quite clean already. Also because the HTML input stream, coming from JSoup is minified before passed to Sesame.
// But it can be easily supported (the part where the exception is thrown)
if (!removed) {
//these will hold possible modified pieces of the statement
Resource newSubject = null;
IRI newPredicate = null;
Value newObject = null;
//trim all "lang" params from the subject
//This is an important change:
// From time to time, the eg. ?lang=en part of an URI seeps through, even though we set every RDFa html tag with a clear @about attribute
// eg. this is the case as subject of "http://www.w3.org/ns/rdfa#usesVocabulary"
// Because we don't save the statements of a resource by language (the @lang tag makes sure it is added statement-per-statement for certain datatypes),
// the subject of the resource needs to be cleared from all lang query params to avoid false doubles while querying the triplestore (eg. using SPARQL).
// We group by subject-URI and double (or triple, or ..., depends on the amount of languages in the system) entries mess up the counts, iterations, group-by's, etc.
// That's why we try hard here to avoid these during import.
if (stmt.getSubject() instanceof IRI) {
URI subject = URI.create(stmt.getSubject().stringValue());
MultivaluedMap<String, String> queryParams = StringFunctions.getQueryParameters(subject);
if (queryParams != null && queryParams.containsKey(I18nFactory.LANG_QUERY_PARAM)) {
URI noLangUri = UriBuilder.fromUri(subject).replaceQueryParam(I18nFactory.LANG_QUERY_PARAM).build();
newSubject = factory.createIRI(noLangUri.toString());
}
}
//if the object is a literal, check if it needs to be trimmed
//BIG NOTE: XSD.anyURI is also an instance of a Literal!!
if (stmt.getObject() instanceof Literal) {
Literal literal = (Literal) stmt.getObject();
String objectValue = literal.getLabel();
//we like to use and save relative URIs as relative URIs because it means we don't need to
//update them if the site domain changes. But the RDFa doc clearly states all relative URIs need
//to be converted to absolute URIs: https://www.w3.org/TR/rdfa-core/#s_curieprocessing
//However, when dealing with custom html tags (eg. <meta datatype="xsd:anyURI"> tags), this doesn't happen
//automatically, so let's do it manually.
if (literal.getDatatype().equals(XMLSchema.ANYURI)) {
//this is important: blank URIs (and this happens; eg. a tag <meta property="sameAs" datatype="xsd:anyURI" /> will yield a blank value for "sameAs"!)
// would get filled in without this check; eg. a "" value would become "http://localhost:8080/en/" by default and this is bad! (eg. for sameAs properties)
if (!StringUtils.isBlank(objectValue)) {
URI objectUri = URI.create(objectValue);
if (!objectUri.isAbsolute()) {
objectUri = documentBaseUri.resolve(objectUri);
}
//it makes sense to convert the data type to IRI as well
newObject = factory.createIRI(objectUri.toString());
}
}
//this means it's a 'true' literal -> check if we can trim
else {
String objectValueTrimmed = StringUtils.strip(objectValue);
if (!objectValue.equals(objectValueTrimmed)) {
//Note: this makes sense: see SimpleLiteral(String label, IRI datatype) constructor code
if (literal.getDatatype().equals(RDF.LANGSTRING)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getLanguage().get());
}
else if (literal.getDatatype().equals(XMLSchema.STRING)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getDatatype());
}
else if (literal.getDatatype().equals(RDF.HTML)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getDatatype());
}
else {
throw new IOException("Encountered unsupported simple literal value, this shouldn't happen; " + literal.getDatatype() + " - " + objectValue);
}
}
}
}
//After all the filtering above, check if we need to change the statement: remove it from the model and add it again later on
Statement validStmt = null;
if (newSubject != null || newObject != null || newPredicate != null) {
if (newSubject == null) {
newSubject = stmt.getSubject();
}
if (newPredicate == null) {
newPredicate = stmt.getPredicate();
}
if (newObject == null) {
newObject = stmt.getObject();
}
if (stmt.getContext() == null) {
validStmt = factory.createStatement(newSubject, newPredicate, newObject);
}
else {
validStmt = factory.createStatement(newSubject, newPredicate, newObject, stmt.getContext());
}
}
else {
validStmt = stmt;
}
filteredModel.add(validStmt);
}
}
// if the settings say so, validate the incoming (filtered) model
if (Settings.instance().getEnableRdfValidation()) {
new RdfModelImpl(filteredModel).validate();
}
//DEBUG
//RDFDataMgr.write(System.out, model, RDFFormat.NTRIPLES);
return filteredModel;
} | protected Model filterModel(Model model, URI documentBaseUri) throws IOException
{
final boolean IGNORE_STYLESHEETS = true;
final boolean IGNORE_FAVICON = true;
final boolean DOCBASE_ONLY = false;
ValueFactory factory = SimpleValueFactory.getInstance();
String documentBaseUriStr = documentBaseUri.toString();
Iterator<Statement> iter = model.iterator();
Model filteredModel = new LinkedHashModel();
while (iter.hasNext()) {
Statement stmt = iter.next();
boolean removed = false;
if (!removed && IGNORE_STYLESHEETS && (stmt.getPredicate().toString().equals(XHTML_NS_STYLESHEET) || stmt.getPredicate().toString().equals(LOCAL_NS_STYLESHEET))) {
removed = true;
}
if (!removed && IGNORE_FAVICON &&
stmt.getObject().toString().contains("favicon") &&
(stmt.getPredicate().toString().equals(XHTML_NS_ICON) || stmt.getPredicate().toString().equals(LOCAL_NS_ICON) ||
stmt.getPredicate().toString().equals(XHTML_NS_APPLE_TOUCH_ICON) || stmt.getPredicate().toString().equals(LOCAL_NS_APPLE_TOUCH_ICON) ||
stmt.getPredicate().toString().equals(LOCAL_NS_MANIFEST)
)) {
removed = true;
}
if (!removed && DOCBASE_ONLY && !stmt.getSubject().toString().equals(documentBaseUriStr)) {
removed = true;
}
if (!removed && stmt.getSubject() instanceof BNode) {
removed = true;
}
if (!removed) {
Resource newSubject = null;
IRI newPredicate = null;
Value newObject = null;
if (stmt.getSubject() instanceof IRI) {
URI subject = URI.create(stmt.getSubject().stringValue());
MultivaluedMap<String, String> queryParams = StringFunctions.getQueryParameters(subject);
if (queryParams != null && queryParams.containsKey(I18nFactory.LANG_QUERY_PARAM)) {
URI noLangUri = UriBuilder.fromUri(subject).replaceQueryParam(I18nFactory.LANG_QUERY_PARAM).build();
newSubject = factory.createIRI(noLangUri.toString());
}
}
if (stmt.getObject() instanceof Literal) {
Literal literal = (Literal) stmt.getObject();
String objectValue = literal.getLabel();
if (literal.getDatatype().equals(XMLSchema.ANYURI)) {
if (!StringUtils.isBlank(objectValue)) {
URI objectUri = URI.create(objectValue);
if (!objectUri.isAbsolute()) {
objectUri = documentBaseUri.resolve(objectUri);
}
newObject = factory.createIRI(objectUri.toString());
}
}
else {
String objectValueTrimmed = StringUtils.strip(objectValue);
if (!objectValue.equals(objectValueTrimmed)) {
if (literal.getDatatype().equals(RDF.LANGSTRING)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getLanguage().get());
}
else if (literal.getDatatype().equals(XMLSchema.STRING)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getDatatype());
}
else if (literal.getDatatype().equals(RDF.HTML)) {
newObject = factory.createLiteral(objectValueTrimmed, literal.getDatatype());
}
else {
throw new IOException("Encountered unsupported simple literal value, this shouldn't happen; " + literal.getDatatype() + " - " + objectValue);
}
}
}
}
Statement validStmt = null;
if (newSubject != null || newObject != null || newPredicate != null) {
if (newSubject == null) {
newSubject = stmt.getSubject();
}
if (newPredicate == null) {
newPredicate = stmt.getPredicate();
}
if (newObject == null) {
newObject = stmt.getObject();
}
if (stmt.getContext() == null) {
validStmt = factory.createStatement(newSubject, newPredicate, newObject);
}
else {
validStmt = factory.createStatement(newSubject, newPredicate, newObject, stmt.getContext());
}
}
else {
validStmt = stmt;
}
filteredModel.add(validStmt);
}
}
if (Settings.instance().getEnableRdfValidation()) {
new RdfModelImpl(filteredModel).validate();
}
return filteredModel;
} | protected model filtermodel(model model, uri documentbaseuri) throws ioexception { final boolean ignore_stylesheets = true; final boolean ignore_favicon = true; final boolean docbase_only = false; valuefactory factory = simplevaluefactory.getinstance(); string documentbaseuristr = documentbaseuri.tostring(); iterator<statement> iter = model.iterator(); model filteredmodel = new linkedhashmodel(); while (iter.hasnext()) { statement stmt = iter.next(); boolean removed = false; if (!removed && ignore_stylesheets && (stmt.getpredicate().tostring().equals(xhtml_ns_stylesheet) || stmt.getpredicate().tostring().equals(local_ns_stylesheet))) { removed = true; } if (!removed && ignore_favicon && stmt.getobject().tostring().contains("favicon") && (stmt.getpredicate().tostring().equals(xhtml_ns_icon) || stmt.getpredicate().tostring().equals(local_ns_icon) || stmt.getpredicate().tostring().equals(xhtml_ns_apple_touch_icon) || stmt.getpredicate().tostring().equals(local_ns_apple_touch_icon) || stmt.getpredicate().tostring().equals(local_ns_manifest) )) { removed = true; } if (!removed && docbase_only && !stmt.getsubject().tostring().equals(documentbaseuristr)) { removed = true; } if (!removed && stmt.getsubject() instanceof bnode) { removed = true; } if (!removed) { resource newsubject = null; iri newpredicate = null; value newobject = null; if (stmt.getsubject() instanceof iri) { uri subject = uri.create(stmt.getsubject().stringvalue()); multivaluedmap<string, string> queryparams = stringfunctions.getqueryparameters(subject); if (queryparams != null && queryparams.containskey(i18nfactory.lang_query_param)) { uri nolanguri = uribuilder.fromuri(subject).replacequeryparam(i18nfactory.lang_query_param).build(); newsubject = factory.createiri(nolanguri.tostring()); } } if (stmt.getobject() instanceof literal) { literal literal = (literal) stmt.getobject(); string objectvalue = literal.getlabel(); if (literal.getdatatype().equals(xmlschema.anyuri)) { if (!stringutils.isblank(objectvalue)) { uri objecturi = uri.create(objectvalue); if (!objecturi.isabsolute()) { objecturi = documentbaseuri.resolve(objecturi); } newobject = factory.createiri(objecturi.tostring()); } } else { string objectvaluetrimmed = stringutils.strip(objectvalue); if (!objectvalue.equals(objectvaluetrimmed)) { if (literal.getdatatype().equals(rdf.langstring)) { newobject = factory.createliteral(objectvaluetrimmed, literal.getlanguage().get()); } else if (literal.getdatatype().equals(xmlschema.string)) { newobject = factory.createliteral(objectvaluetrimmed, literal.getdatatype()); } else if (literal.getdatatype().equals(rdf.html)) { newobject = factory.createliteral(objectvaluetrimmed, literal.getdatatype()); } else { throw new ioexception("encountered unsupported simple literal value, this shouldn't happen; " + literal.getdatatype() + " - " + objectvalue); } } } } statement validstmt = null; if (newsubject != null || newobject != null || newpredicate != null) { if (newsubject == null) { newsubject = stmt.getsubject(); } if (newpredicate == null) { newpredicate = stmt.getpredicate(); } if (newobject == null) { newobject = stmt.getobject(); } if (stmt.getcontext() == null) { validstmt = factory.createstatement(newsubject, newpredicate, newobject); } else { validstmt = factory.createstatement(newsubject, newpredicate, newobject, stmt.getcontext()); } } else { validstmt = stmt; } filteredmodel.add(validstmt); } } if (settings.instance().getenablerdfvalidation()) { new rdfmodelimpl(filteredmodel).validate(); } return filteredmodel; } | bramdvrepublic/com.beligum.blocks.core | [
1,
0,
0,
0
] |
13,815 | public void updateRawRatioDataModelsWithPrimaryStandardValue(Double[] standardValuesMap) {
// since ratios are sorted, we send in ordered array of corresponding standard values
//TODO: make more robust
SortedSet<DataModelInterface> ratiosSortedSet = getRatiosForFractionFitting();//getValidRawRatios();
int count = 0;
Iterator<DataModelInterface> ratiosSortedSetIterator = ratiosSortedSet.iterator();
while (ratiosSortedSetIterator.hasNext()) {
DataModelInterface ratio = ratiosSortedSetIterator.next();
((RawRatioDataModel) ratio).setStandardValue(standardValuesMap[count]);
// oct 2012 check for zero or missing standard ratio and remove ratio from fractination consideration
//if ( standardValuesMap[count] == 0.0 ) {
((RawRatioDataModel) ratio).setUsedForFractionationCorrections(standardValuesMap[count] != 0.0);
//}
count++;
}
} | public void updateRawRatioDataModelsWithPrimaryStandardValue(Double[] standardValuesMap) {
SortedSet<DataModelInterface> ratiosSortedSet = getRatiosForFractionFitting()
int count = 0;
Iterator<DataModelInterface> ratiosSortedSetIterator = ratiosSortedSet.iterator();
while (ratiosSortedSetIterator.hasNext()) {
DataModelInterface ratio = ratiosSortedSetIterator.next();
((RawRatioDataModel) ratio).setStandardValue(standardValuesMap[count]);
((RawRatioDataModel) ratio).setUsedForFractionationCorrections(standardValuesMap[count] != 0.0);
count++;
}
} | public void updaterawratiodatamodelswithprimarystandardvalue(double[] standardvaluesmap) { sortedset<datamodelinterface> ratiossortedset = getratiosforfractionfitting() int count = 0; iterator<datamodelinterface> ratiossortedsetiterator = ratiossortedset.iterator(); while (ratiossortedsetiterator.hasnext()) { datamodelinterface ratio = ratiossortedsetiterator.next(); ((rawratiodatamodel) ratio).setstandardvalue(standardvaluesmap[count]); ((rawratiodatamodel) ratio).setusedforfractionationcorrections(standardvaluesmap[count] != 0.0); count++; } } | bowring/ET_Redux | [
1,
0,
0,
0
] |
13,882 | protected void computeAverageChainingDistances(KNNQuery<O> knnq, DistanceQuery<O> dq, DBIDs ids, WritableDoubleDataStore acds) {
FiniteProgress lrdsProgress = LOG.isVerbose() ? new FiniteProgress("Computing average chaining distances", ids.size(), LOG) : null;
// Compute the chaining distances.
// We do <i>not</i> bother to materialize the chaining order.
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
final KNNList neighbors = knnq.getKNNForDBID(iter, k);
final int r = neighbors.size();
DoubleDBIDListIter it1 = neighbors.iter(), it2 = neighbors.iter();
// Store the current lowest reachability.
final double[] mindists = new double[r];
for(int i = 0; it1.valid(); it1.advance(), ++i) {
mindists[i] = DBIDUtil.equal(it1, iter) ? Double.NaN : it1.doubleValue();
}
double acsum = 0.;
for(int j = ((r < k) ? r : k) - 1; j > 0; --j) {
// Find the minimum:
int minpos = -1;
double mindist = Double.NaN;
for(int i = 0; i < mindists.length; ++i) {
double curdist = mindists[i];
// Both values could be NaN, deliberately.
if(curdist == curdist && !(curdist > mindist)) {
minpos = i;
mindist = curdist;
}
}
acsum += mindist * j; // Weighted sum, decreasing weights
mindists[minpos] = Double.NaN;
it1.seek(minpos);
// Update distances
it2.seek(0);
for(int i = 0; it2.valid(); it2.advance(), ++i) {
final double curdist = mindists[i];
if(curdist != curdist) {
continue; // NaN = processed!
}
double newdist = dq.distance(it1, it2);
if(newdist < curdist) {
mindists[i] = newdist;
}
}
}
acds.putDouble(iter, acsum / (r * 0.5 * (r - 1.)));
LOG.incrementProcessed(lrdsProgress);
}
LOG.ensureCompleted(lrdsProgress);
} | protected void computeAverageChainingDistances(KNNQuery<O> knnq, DistanceQuery<O> dq, DBIDs ids, WritableDoubleDataStore acds) {
FiniteProgress lrdsProgress = LOG.isVerbose() ? new FiniteProgress("Computing average chaining distances", ids.size(), LOG) : null;
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
final KNNList neighbors = knnq.getKNNForDBID(iter, k);
final int r = neighbors.size();
DoubleDBIDListIter it1 = neighbors.iter(), it2 = neighbors.iter();
final double[] mindists = new double[r];
for(int i = 0; it1.valid(); it1.advance(), ++i) {
mindists[i] = DBIDUtil.equal(it1, iter) ? Double.NaN : it1.doubleValue();
}
double acsum = 0.;
for(int j = ((r < k) ? r : k) - 1; j > 0; --j) {
int minpos = -1;
double mindist = Double.NaN;
for(int i = 0; i < mindists.length; ++i) {
double curdist = mindists[i];
if(curdist == curdist && !(curdist > mindist)) {
minpos = i;
mindist = curdist;
}
}
acsum += mindist * j;
mindists[minpos] = Double.NaN;
it1.seek(minpos);
it2.seek(0);
for(int i = 0; it2.valid(); it2.advance(), ++i) {
final double curdist = mindists[i];
if(curdist != curdist) {
continue;
}
double newdist = dq.distance(it1, it2);
if(newdist < curdist) {
mindists[i] = newdist;
}
}
}
acds.putDouble(iter, acsum / (r * 0.5 * (r - 1.)));
LOG.incrementProcessed(lrdsProgress);
}
LOG.ensureCompleted(lrdsProgress);
} | protected void computeaveragechainingdistances(knnquery<o> knnq, distancequery<o> dq, dbids ids, writabledoubledatastore acds) { finiteprogress lrdsprogress = log.isverbose() ? new finiteprogress("computing average chaining distances", ids.size(), log) : null; for(dbiditer iter = ids.iter(); iter.valid(); iter.advance()) { final knnlist neighbors = knnq.getknnfordbid(iter, k); final int r = neighbors.size(); doubledbidlistiter it1 = neighbors.iter(), it2 = neighbors.iter(); final double[] mindists = new double[r]; for(int i = 0; it1.valid(); it1.advance(), ++i) { mindists[i] = dbidutil.equal(it1, iter) ? double.nan : it1.doublevalue(); } double acsum = 0.; for(int j = ((r < k) ? r : k) - 1; j > 0; --j) { int minpos = -1; double mindist = double.nan; for(int i = 0; i < mindists.length; ++i) { double curdist = mindists[i]; if(curdist == curdist && !(curdist > mindist)) { minpos = i; mindist = curdist; } } acsum += mindist * j; mindists[minpos] = double.nan; it1.seek(minpos); it2.seek(0); for(int i = 0; it2.valid(); it2.advance(), ++i) { final double curdist = mindists[i]; if(curdist != curdist) { continue; } double newdist = dq.distance(it1, it2); if(newdist < curdist) { mindists[i] = newdist; } } } acds.putdouble(iter, acsum / (r * 0.5 * (r - 1.))); log.incrementprocessed(lrdsprogress); } log.ensurecompleted(lrdsprogress); } | bertl4398/ssdbm2020 | [
1,
0,
0,
0
] |
22,220 | public static PerspectiveJsonObject getPerspectiveObjectForEvent(TrigTripleData trigTripleData, String mentionUri, String meta) {
PerspectiveJsonObject perspectiveJsonObject = new PerspectiveJsonObject();
String author = "";
String cite = "";
// System.out.println("Start");
ArrayList<String> perspectives = new ArrayList<String>();
if (trigTripleData.tripleMapGrasp.containsKey(mentionUri)) {
ArrayList<Statement> perspectiveTriples = trigTripleData.tripleMapGrasp.get(mentionUri);
// System.out.println("perspectiveTriples.size() = " + perspectiveTriples.size());
for (int i = 0; i < perspectiveTriples.size(); i++) {
Statement statement = perspectiveTriples.get(i);
String subject = statement.getSubject().getURI();
String predicate = statement.getPredicate().getURI();
String object = statement.getObject().toString();
if (predicate.endsWith("#hasAttribution")) {
if (trigTripleData.tripleMapGrasp.containsKey(object)) {
ArrayList<Statement> perspectiveValues = trigTripleData.tripleMapGrasp.get(object);
for (int j = 0; j < perspectiveValues.size(); j++) {
Statement statement1 = perspectiveValues.get(j);
//System.out.println("statement1.toString() = " + statement1.toString());
// System.out.println("statement1.getObject().toString() = " + statement1.getObject().toString());
//ttp://www.w3.org/ns/prov#wasAttributedTo,
if (statement1.getPredicate().getURI().endsWith("#wasAttributedTo")) {
if (trigTripleData.tripleMapGrasp.containsKey(statement1.getObject().toString())) {
//// this means the source has properties so it is likely to be the document with an author
ArrayList<Statement> provStatements = trigTripleData.tripleMapGrasp.get(statement1.getObject().toString());
for (int k = 0; k < provStatements.size(); k++) {
Statement statement2 = provStatements.get(k);
author = statement2.getObject().toString();
int idx = author.lastIndexOf("/");
if (idx > -1) {
author = author.substring(idx + 1);
}
// System.out.println("author source = " + author);
}
} else {
//// it is not the document so a cited source
cite = statement1.getObject().toString();
int idx = cite.lastIndexOf("/");
if (idx > -1) {
cite = cite.substring(idx + 1);
}
/* THIS DOES NOT WORK: PRONOUNS, RECEPTIONIST, ETC...
//// There can be source documents without meta data.
//// In that case, there are no triples for in tripleMapGrasp with this subject but it is still a document
//// The next hack checks for upper case characters in the URI
//// If they are present, we assume it is somebody otherwise we assume it is a document and we assign it to the meta string
if (cite.toLowerCase().equals(cite)) {
//// no uppercase characters
cite = meta;
}
*/
// System.out.println("quote source = " + cite);
}
} else if (statement1.getPredicate().getURI().endsWith("#value")) {
String perspective = "";
String str = statement1.getObject().toString();
// System.out.println("str = " + str);
int idx = str.lastIndexOf("#");
if (idx > -1) {
perspective = str.substring(idx + 1);
} else {
idx = str.lastIndexOf("/");
if (idx > -1) {
perspective = str.substring(idx + 1);
} else {
perspective = str;
}
}
ArrayList<String> myPerspectives = PerspectiveJsonObject.normalizePerspectiveValue(perspective);
for (int k = 0; k < myPerspectives.size(); k++) {
String myPerspective = myPerspectives.get(k);
if (!perspectives.contains(myPerspective)) {
// System.out.println("myPerspective = " + myPerspective);
perspectives.add(myPerspective);
}
}
} else {
// System.out.println("statement1.getPredicate().getURI() = " + statement1.getPredicate().getURI());
}
}
}
}
}
if (perspectives.size() > 0) {
// System.out.println("final author = " + author);
perspectiveJsonObject = new PerspectiveJsonObject(perspectives, author, cite, "", "", "", mentionUri, null);
}
}
return perspectiveJsonObject;
} | public static PerspectiveJsonObject getPerspectiveObjectForEvent(TrigTripleData trigTripleData, String mentionUri, String meta) {
PerspectiveJsonObject perspectiveJsonObject = new PerspectiveJsonObject();
String author = "";
String cite = "";
ArrayList<String> perspectives = new ArrayList<String>();
if (trigTripleData.tripleMapGrasp.containsKey(mentionUri)) {
ArrayList<Statement> perspectiveTriples = trigTripleData.tripleMapGrasp.get(mentionUri);
for (int i = 0; i < perspectiveTriples.size(); i++) {
Statement statement = perspectiveTriples.get(i);
String subject = statement.getSubject().getURI();
String predicate = statement.getPredicate().getURI();
String object = statement.getObject().toString();
if (predicate.endsWith("#hasAttribution")) {
if (trigTripleData.tripleMapGrasp.containsKey(object)) {
ArrayList<Statement> perspectiveValues = trigTripleData.tripleMapGrasp.get(object);
for (int j = 0; j < perspectiveValues.size(); j++) {
Statement statement1 = perspectiveValues.get(j);
if (statement1.getPredicate().getURI().endsWith("#wasAttributedTo")) {
if (trigTripleData.tripleMapGrasp.containsKey(statement1.getObject().toString())) {
ArrayList<Statement> provStatements = trigTripleData.tripleMapGrasp.get(statement1.getObject().toString());
for (int k = 0; k < provStatements.size(); k++) {
Statement statement2 = provStatements.get(k);
author = statement2.getObject().toString();
int idx = author.lastIndexOf("/");
if (idx > -1) {
author = author.substring(idx + 1);
}
}
} else {
cite = statement1.getObject().toString();
int idx = cite.lastIndexOf("/");
if (idx > -1) {
cite = cite.substring(idx + 1);
}
}
} else if (statement1.getPredicate().getURI().endsWith("#value")) {
String perspective = "";
String str = statement1.getObject().toString();
int idx = str.lastIndexOf("#");
if (idx > -1) {
perspective = str.substring(idx + 1);
} else {
idx = str.lastIndexOf("/");
if (idx > -1) {
perspective = str.substring(idx + 1);
} else {
perspective = str;
}
}
ArrayList<String> myPerspectives = PerspectiveJsonObject.normalizePerspectiveValue(perspective);
for (int k = 0; k < myPerspectives.size(); k++) {
String myPerspective = myPerspectives.get(k);
if (!perspectives.contains(myPerspective)) {
perspectives.add(myPerspective);
}
}
} else {
}
}
}
}
}
if (perspectives.size() > 0) {
perspectiveJsonObject = new PerspectiveJsonObject(perspectives, author, cite, "", "", "", mentionUri, null);
}
}
return perspectiveJsonObject;
} | public static perspectivejsonobject getperspectiveobjectforevent(trigtripledata trigtripledata, string mentionuri, string meta) { perspectivejsonobject perspectivejsonobject = new perspectivejsonobject(); string author = ""; string cite = ""; arraylist<string> perspectives = new arraylist<string>(); if (trigtripledata.triplemapgrasp.containskey(mentionuri)) { arraylist<statement> perspectivetriples = trigtripledata.triplemapgrasp.get(mentionuri); for (int i = 0; i < perspectivetriples.size(); i++) { statement statement = perspectivetriples.get(i); string subject = statement.getsubject().geturi(); string predicate = statement.getpredicate().geturi(); string object = statement.getobject().tostring(); if (predicate.endswith("#hasattribution")) { if (trigtripledata.triplemapgrasp.containskey(object)) { arraylist<statement> perspectivevalues = trigtripledata.triplemapgrasp.get(object); for (int j = 0; j < perspectivevalues.size(); j++) { statement statement1 = perspectivevalues.get(j); if (statement1.getpredicate().geturi().endswith("#wasattributedto")) { if (trigtripledata.triplemapgrasp.containskey(statement1.getobject().tostring())) { arraylist<statement> provstatements = trigtripledata.triplemapgrasp.get(statement1.getobject().tostring()); for (int k = 0; k < provstatements.size(); k++) { statement statement2 = provstatements.get(k); author = statement2.getobject().tostring(); int idx = author.lastindexof("/"); if (idx > -1) { author = author.substring(idx + 1); } } } else { cite = statement1.getobject().tostring(); int idx = cite.lastindexof("/"); if (idx > -1) { cite = cite.substring(idx + 1); } } } else if (statement1.getpredicate().geturi().endswith("#value")) { string perspective = ""; string str = statement1.getobject().tostring(); int idx = str.lastindexof("#"); if (idx > -1) { perspective = str.substring(idx + 1); } else { idx = str.lastindexof("/"); if (idx > -1) { perspective = str.substring(idx + 1); } else { perspective = str; } } arraylist<string> myperspectives = perspectivejsonobject.normalizeperspectivevalue(perspective); for (int k = 0; k < myperspectives.size(); k++) { string myperspective = myperspectives.get(k); if (!perspectives.contains(myperspective)) { perspectives.add(myperspective); } } } else { } } } } } if (perspectives.size() > 0) { perspectivejsonobject = new perspectivejsonobject(perspectives, author, cite, "", "", "", mentionuri, null); } } return perspectivejsonobject; } | cltl/EventCoreference | [
0,
0,
1,
0
] |
14,154 | @Test
void buildTask() throws Exception {
VelocityScheduler scheduler = new VelocityScheduler(new FakePluginManager());
CountDownLatch latch = new CountDownLatch(1);
ScheduledTask task = scheduler.buildTask(FakePluginManager.PLUGIN_A, latch::countDown)
.schedule();
latch.await();
assertEquals(TaskStatus.FINISHED, task.status());
} | @Test
void buildTask() throws Exception {
VelocityScheduler scheduler = new VelocityScheduler(new FakePluginManager());
CountDownLatch latch = new CountDownLatch(1);
ScheduledTask task = scheduler.buildTask(FakePluginManager.PLUGIN_A, latch::countDown)
.schedule();
latch.await();
assertEquals(TaskStatus.FINISHED, task.status());
} | @test void buildtask() throws exception { velocityscheduler scheduler = new velocityscheduler(new fakepluginmanager()); countdownlatch latch = new countdownlatch(1); scheduledtask task = scheduler.buildtask(fakepluginmanager.plugin_a, latch::countdown) .schedule(); latch.await(); assertequals(taskstatus.finished, task.status()); } | astei/velocity | [
0,
0,
1,
0
] |
30,548 | @Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
ParseUser currentUser = ParseUser.getCurrentUser();
String role = currentUser.getString("role");
if(convertView == null) {
convertView = LayoutInflater.from(mContext).inflate(R.layout.product_list_item, null);
holder = new ViewHolder();
holder.companyNameLabel = (TextView) convertView.findViewById(R.id.companyName);
holder.productNameLabel = (TextView) convertView.findViewById(R.id.productNameLabel);
holder.priceNameLabel = (TextView) convertView.findViewById(R.id.productPriceLabel);
holder.addItemButton = (Button) convertView.findViewById(R.id.btnAddToCart);
holder.editItemButton = (Button) convertView.findViewById(R.id.btnEditButton);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
if (role.equals("Company Account")) {
holder.editItemButton.setVisibility(View.VISIBLE);
}
else if (role.equals("Personal Account")) {
holder.addItemButton.setVisibility(View.VISIBLE);
}
final Product product = mProducts.get(position);
holder.companyNameLabel.setText(product.getCompany());
holder.productNameLabel.setText(product.getProductName());
holder.priceNameLabel.setText(product.getPrice() + "");
holder.addItemButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(mContext, CheckoutActivity.class);
intent.putExtra("product", product.getProductName());
mContext.startActivity(intent);
//TODO: Add functionality for edit button
}
});
return convertView;
} | @Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
ParseUser currentUser = ParseUser.getCurrentUser();
String role = currentUser.getString("role");
if(convertView == null) {
convertView = LayoutInflater.from(mContext).inflate(R.layout.product_list_item, null);
holder = new ViewHolder();
holder.companyNameLabel = (TextView) convertView.findViewById(R.id.companyName);
holder.productNameLabel = (TextView) convertView.findViewById(R.id.productNameLabel);
holder.priceNameLabel = (TextView) convertView.findViewById(R.id.productPriceLabel);
holder.addItemButton = (Button) convertView.findViewById(R.id.btnAddToCart);
holder.editItemButton = (Button) convertView.findViewById(R.id.btnEditButton);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
if (role.equals("Company Account")) {
holder.editItemButton.setVisibility(View.VISIBLE);
}
else if (role.equals("Personal Account")) {
holder.addItemButton.setVisibility(View.VISIBLE);
}
final Product product = mProducts.get(position);
holder.companyNameLabel.setText(product.getCompany());
holder.productNameLabel.setText(product.getProductName());
holder.priceNameLabel.setText(product.getPrice() + "");
holder.addItemButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(mContext, CheckoutActivity.class);
intent.putExtra("product", product.getProductName());
mContext.startActivity(intent);
}
});
return convertView;
} | @override public view getview(int position, view convertview, viewgroup parent) { viewholder holder; parseuser currentuser = parseuser.getcurrentuser(); string role = currentuser.getstring("role"); if(convertview == null) { convertview = layoutinflater.from(mcontext).inflate(r.layout.product_list_item, null); holder = new viewholder(); holder.companynamelabel = (textview) convertview.findviewbyid(r.id.companyname); holder.productnamelabel = (textview) convertview.findviewbyid(r.id.productnamelabel); holder.pricenamelabel = (textview) convertview.findviewbyid(r.id.productpricelabel); holder.additembutton = (button) convertview.findviewbyid(r.id.btnaddtocart); holder.edititembutton = (button) convertview.findviewbyid(r.id.btneditbutton); convertview.settag(holder); } else { holder = (viewholder) convertview.gettag(); } if (role.equals("company account")) { holder.edititembutton.setvisibility(view.visible); } else if (role.equals("personal account")) { holder.additembutton.setvisibility(view.visible); } final product product = mproducts.get(position); holder.companynamelabel.settext(product.getcompany()); holder.productnamelabel.settext(product.getproductname()); holder.pricenamelabel.settext(product.getprice() + ""); holder.additembutton.setonclicklistener(new view.onclicklistener() { @override public void onclick(view v) { intent intent = new intent(mcontext, checkoutactivity.class); intent.putextra("product", product.getproductname()); mcontext.startactivity(intent); } }); return convertview; } | ashleysullins/Postmates-android | [
0,
1,
0,
0
] |
22,367 | public static void main(String[] args) {
// TODO Implement me, pls :(
} | public static void main(String[] args) {
} | public static void main(string[] args) { } | awwitecki/kafka-samples | [
0,
1,
0,
0
] |
30,563 | private boolean hasTimedOut(SentMessage sentMessage) {
// NPE fix. For some reason under heavy load redis can return null records.
if (sentMessage == null) {
return false;
}
long now = new Date().getTime();
Date createdTs = sentMessage.getCreatedTs();
return createdTs.getTime() + faxTimeout < now;
} | private boolean hasTimedOut(SentMessage sentMessage) {
if (sentMessage == null) {
return false;
}
long now = new Date().getTime();
Date createdTs = sentMessage.getCreatedTs();
return createdTs.getTime() + faxTimeout < now;
} | private boolean hastimedout(sentmessage sentmessage) { if (sentmessage == null) { return false; } long now = new date().gettime(); date createdts = sentmessage.getcreatedts(); return createdts.gettime() + faxtimeout < now; } | bcgov/jag-efax | [
0,
0,
1,
0
] |
22,568 | public long forceSkip(long bytesToSkip) throws IOException {
long skipped = mUpstream.skip(bytesToSkip);
try {
// Figure out where we need to jump to...
int skip = (int) (bytesToSkip % AES_BLOCK_SIZE);
long blockOffset = bytesToSkip - skip;
long numberOfBlocks = blockOffset / AES_BLOCK_SIZE;
// TODO: This is designed for CTS mode, for other modes this routine code has to be changed
// So that we don't have to read the entire stream, we have to compute what the IV should be at this point in time in CTS mode
BigInteger ivForOffsetAsBigInteger = new BigInteger(1, mIvParameterSpec.getIV()).add(BigInteger.valueOf(numberOfBlocks));
byte[] ivForOffsetByteArray = ivForOffsetAsBigInteger.toByteArray();
IvParameterSpec computedIvParameterSpecForOffset;
if (ivForOffsetByteArray.length < AES_BLOCK_SIZE) {
byte[] resizedIvForOffsetByteArray = new byte[AES_BLOCK_SIZE];
System.arraycopy(ivForOffsetByteArray, 0, resizedIvForOffsetByteArray, AES_BLOCK_SIZE - ivForOffsetByteArray.length, ivForOffsetByteArray.length);
computedIvParameterSpecForOffset = new IvParameterSpec(resizedIvForOffsetByteArray);
} else {
computedIvParameterSpecForOffset = new IvParameterSpec(ivForOffsetByteArray, ivForOffsetByteArray.length - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
}
// Setup the cipher to use the new IV at the proper offset...
mCipher.init(Cipher.ENCRYPT_MODE, mSecretKeySpec, computedIvParameterSpecForOffset);
byte[] skipBuffer = new byte[skip];
// And read/update the buffer to be decrypted
mCipher.update(skipBuffer, 0, skip, skipBuffer);
Arrays.fill(skipBuffer, (byte) 0);
} catch (Exception e) {
return 0;
}
return skipped;
} | public long forceSkip(long bytesToSkip) throws IOException {
long skipped = mUpstream.skip(bytesToSkip);
try {
int skip = (int) (bytesToSkip % AES_BLOCK_SIZE);
long blockOffset = bytesToSkip - skip;
long numberOfBlocks = blockOffset / AES_BLOCK_SIZE;
BigInteger ivForOffsetAsBigInteger = new BigInteger(1, mIvParameterSpec.getIV()).add(BigInteger.valueOf(numberOfBlocks));
byte[] ivForOffsetByteArray = ivForOffsetAsBigInteger.toByteArray();
IvParameterSpec computedIvParameterSpecForOffset;
if (ivForOffsetByteArray.length < AES_BLOCK_SIZE) {
byte[] resizedIvForOffsetByteArray = new byte[AES_BLOCK_SIZE];
System.arraycopy(ivForOffsetByteArray, 0, resizedIvForOffsetByteArray, AES_BLOCK_SIZE - ivForOffsetByteArray.length, ivForOffsetByteArray.length);
computedIvParameterSpecForOffset = new IvParameterSpec(resizedIvForOffsetByteArray);
} else {
computedIvParameterSpecForOffset = new IvParameterSpec(ivForOffsetByteArray, ivForOffsetByteArray.length - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
}
mCipher.init(Cipher.ENCRYPT_MODE, mSecretKeySpec, computedIvParameterSpecForOffset);
byte[] skipBuffer = new byte[skip];
mCipher.update(skipBuffer, 0, skip, skipBuffer);
Arrays.fill(skipBuffer, (byte) 0);
} catch (Exception e) {
return 0;
}
return skipped;
} | public long forceskip(long bytestoskip) throws ioexception { long skipped = mupstream.skip(bytestoskip); try { int skip = (int) (bytestoskip % aes_block_size); long blockoffset = bytestoskip - skip; long numberofblocks = blockoffset / aes_block_size; biginteger ivforoffsetasbiginteger = new biginteger(1, mivparameterspec.getiv()).add(biginteger.valueof(numberofblocks)); byte[] ivforoffsetbytearray = ivforoffsetasbiginteger.tobytearray(); ivparameterspec computedivparameterspecforoffset; if (ivforoffsetbytearray.length < aes_block_size) { byte[] resizedivforoffsetbytearray = new byte[aes_block_size]; system.arraycopy(ivforoffsetbytearray, 0, resizedivforoffsetbytearray, aes_block_size - ivforoffsetbytearray.length, ivforoffsetbytearray.length); computedivparameterspecforoffset = new ivparameterspec(resizedivforoffsetbytearray); } else { computedivparameterspecforoffset = new ivparameterspec(ivforoffsetbytearray, ivforoffsetbytearray.length - aes_block_size, aes_block_size); } mcipher.init(cipher.encrypt_mode, msecretkeyspec, computedivparameterspecforoffset); byte[] skipbuffer = new byte[skip]; mcipher.update(skipbuffer, 0, skip, skipbuffer); arrays.fill(skipbuffer, (byte) 0); } catch (exception e) { return 0; } return skipped; } | bancha2nd/nativescript-plugins | [
1,
0,
0,
0
] |
30,915 | @BeforeEach
protected void setup() throws Exception {
// Remove the existing program data folder
String suffix = "-" + Profile.TEST;
ProductCard metadata = ProductCard.info( Program.class );
Path programDataFolder = OperatingSystem.getUserProgramDataFolder( metadata.getArtifact() + suffix, metadata.getName() + suffix );
assertThat( aggressiveDelete( programDataFolder ) ).withFailMessage( "Failed to delete program data folder" ).isTrue();
// For the parameters to be available using Java 9, the following needs to be added
// to the test JVM command line parameters because com.sun.javafx.application.ParametersImpl
// is not exposed, nor is there a "proper" way to access it:
//
// --add-opens=javafx.graphics/com.sun.javafx.application=ALL-UNNAMED
program = (Program)FxToolkit.setupApplication( Program.class, ProgramTestConfig.getParameterValues() );
program.register( ProgramEvent.ANY, programWatcher = new EventWatcher( TIMEOUT ) );
Fx.waitForWithExceptions( TIMEOUT );
// NOTE Thread.yield() is helpful but not consistent
Thread.yield();
programWatcher.waitForEvent( ProgramEvent.STARTED, TIMEOUT );
Fx.waitForWithExceptions( TIMEOUT );
// NOTE Thread.yield() is helpful but not consistent
Thread.yield();
// Wait for the active workarea
// FIXME This should use an event listener to wait for the workarea
long limit = System.currentTimeMillis() + TIMEOUT;
while( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea() == null && System.currentTimeMillis() < limit ) {
ThreadUtil.pause( 100 );
}
assertThat( program ).withFailMessage( "Program is null" ).isNotNull();
assertThat( program.getWorkspaceManager() ).withFailMessage( "Workspace manager is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace() ).withFailMessage( "Active workspace is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea() ).withFailMessage( "Active workarea is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea().getWorkpane() ).withFailMessage( "Active workpane is null" ).isNotNull();
Workpane workpane = program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea().getWorkpane();
workpane.addEventHandler( WorkpaneEvent.ANY, workpaneWatcher = new FxEventWatcher() );
initialMemoryUse = getMemoryUse();
} | @BeforeEach
protected void setup() throws Exception {
String suffix = "-" + Profile.TEST;
ProductCard metadata = ProductCard.info( Program.class );
Path programDataFolder = OperatingSystem.getUserProgramDataFolder( metadata.getArtifact() + suffix, metadata.getName() + suffix );
assertThat( aggressiveDelete( programDataFolder ) ).withFailMessage( "Failed to delete program data folder" ).isTrue();
program = (Program)FxToolkit.setupApplication( Program.class, ProgramTestConfig.getParameterValues() );
program.register( ProgramEvent.ANY, programWatcher = new EventWatcher( TIMEOUT ) );
Fx.waitForWithExceptions( TIMEOUT );
Thread.yield();
programWatcher.waitForEvent( ProgramEvent.STARTED, TIMEOUT );
Fx.waitForWithExceptions( TIMEOUT );
Thread.yield();
long limit = System.currentTimeMillis() + TIMEOUT;
while( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea() == null && System.currentTimeMillis() < limit ) {
ThreadUtil.pause( 100 );
}
assertThat( program ).withFailMessage( "Program is null" ).isNotNull();
assertThat( program.getWorkspaceManager() ).withFailMessage( "Workspace manager is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace() ).withFailMessage( "Active workspace is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea() ).withFailMessage( "Active workarea is null" ).isNotNull();
assertThat( program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea().getWorkpane() ).withFailMessage( "Active workpane is null" ).isNotNull();
Workpane workpane = program.getWorkspaceManager().getActiveWorkspace().getActiveWorkarea().getWorkpane();
workpane.addEventHandler( WorkpaneEvent.ANY, workpaneWatcher = new FxEventWatcher() );
initialMemoryUse = getMemoryUse();
} | @beforeeach protected void setup() throws exception { string suffix = "-" + profile.test; productcard metadata = productcard.info( program.class ); path programdatafolder = operatingsystem.getuserprogramdatafolder( metadata.getartifact() + suffix, metadata.getname() + suffix ); assertthat( aggressivedelete( programdatafolder ) ).withfailmessage( "failed to delete program data folder" ).istrue(); program = (program)fxtoolkit.setupapplication( program.class, programtestconfig.getparametervalues() ); program.register( programevent.any, programwatcher = new eventwatcher( timeout ) ); fx.waitforwithexceptions( timeout ); thread.yield(); programwatcher.waitforevent( programevent.started, timeout ); fx.waitforwithexceptions( timeout ); thread.yield(); long limit = system.currenttimemillis() + timeout; while( program.getworkspacemanager().getactiveworkspace().getactiveworkarea() == null && system.currenttimemillis() < limit ) { threadutil.pause( 100 ); } assertthat( program ).withfailmessage( "program is null" ).isnotnull(); assertthat( program.getworkspacemanager() ).withfailmessage( "workspace manager is null" ).isnotnull(); assertthat( program.getworkspacemanager().getactiveworkspace() ).withfailmessage( "active workspace is null" ).isnotnull(); assertthat( program.getworkspacemanager().getactiveworkspace().getactiveworkarea() ).withfailmessage( "active workarea is null" ).isnotnull(); assertthat( program.getworkspacemanager().getactiveworkspace().getactiveworkarea().getworkpane() ).withfailmessage( "active workpane is null" ).isnotnull(); workpane workpane = program.getworkspacemanager().getactiveworkspace().getactiveworkarea().getworkpane(); workpane.addeventhandler( workpaneevent.any, workpanewatcher = new fxeventwatcher() ); initialmemoryuse = getmemoryuse(); } | avereon/xenon | [
1,
0,
0,
0
] |
14,698 | private <AH extends AssignmentHolderType> Collection<EvaluatedAssignment<AH>> evaluateAssignments(AH assignmentHolder, Collection<AssignmentType> assignments, boolean virtual, AssignmentEvaluator<AH> assignmentEvaluator, Task task, OperationResult result) {
List<EvaluatedAssignment<AH>> evaluatedAssignments = new ArrayList<>();
RepositoryCache.enter(cacheConfigurationManager);
try {
for (AssignmentType assignmentType: assignments) {
try {
PrismContainerDefinition definition = assignmentType.asPrismContainerValue().getDefinition();
if (definition == null) {
// TODO: optimize
definition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(AssignmentHolderType.class).findContainerDefinition(AssignmentHolderType.F_ASSIGNMENT);
}
ItemDeltaItem<PrismContainerValue<AssignmentType>,PrismContainerDefinition<AssignmentType>> assignmentIdi =
new ItemDeltaItem<>(LensUtil.createAssignmentSingleValueContainer(assignmentType), definition);
EvaluatedAssignment<AH> assignment = assignmentEvaluator.evaluate(assignmentIdi, PlusMinusZero.ZERO, false, assignmentHolder, assignmentHolder.toString(), virtual, task, result);
evaluatedAssignments.add(assignment);
} catch (SchemaException | ObjectNotFoundException | ExpressionEvaluationException | PolicyViolationException | SecurityViolationException | ConfigurationException | CommunicationException e) {
LOGGER.error("Error while processing assignment of {}: {}; assignment: {}",
assignmentHolder, e.getMessage(), assignmentType, e);
}
}
} finally {
RepositoryCache.exit();
}
return evaluatedAssignments;
} | private <AH extends AssignmentHolderType> Collection<EvaluatedAssignment<AH>> evaluateAssignments(AH assignmentHolder, Collection<AssignmentType> assignments, boolean virtual, AssignmentEvaluator<AH> assignmentEvaluator, Task task, OperationResult result) {
List<EvaluatedAssignment<AH>> evaluatedAssignments = new ArrayList<>();
RepositoryCache.enter(cacheConfigurationManager);
try {
for (AssignmentType assignmentType: assignments) {
try {
PrismContainerDefinition definition = assignmentType.asPrismContainerValue().getDefinition();
if (definition == null) {
definition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(AssignmentHolderType.class).findContainerDefinition(AssignmentHolderType.F_ASSIGNMENT);
}
ItemDeltaItem<PrismContainerValue<AssignmentType>,PrismContainerDefinition<AssignmentType>> assignmentIdi =
new ItemDeltaItem<>(LensUtil.createAssignmentSingleValueContainer(assignmentType), definition);
EvaluatedAssignment<AH> assignment = assignmentEvaluator.evaluate(assignmentIdi, PlusMinusZero.ZERO, false, assignmentHolder, assignmentHolder.toString(), virtual, task, result);
evaluatedAssignments.add(assignment);
} catch (SchemaException | ObjectNotFoundException | ExpressionEvaluationException | PolicyViolationException | SecurityViolationException | ConfigurationException | CommunicationException e) {
LOGGER.error("Error while processing assignment of {}: {}; assignment: {}",
assignmentHolder, e.getMessage(), assignmentType, e);
}
}
} finally {
RepositoryCache.exit();
}
return evaluatedAssignments;
} | private <ah extends assignmentholdertype> collection<evaluatedassignment<ah>> evaluateassignments(ah assignmentholder, collection<assignmenttype> assignments, boolean virtual, assignmentevaluator<ah> assignmentevaluator, task task, operationresult result) { list<evaluatedassignment<ah>> evaluatedassignments = new arraylist<>(); repositorycache.enter(cacheconfigurationmanager); try { for (assignmenttype assignmenttype: assignments) { try { prismcontainerdefinition definition = assignmenttype.asprismcontainervalue().getdefinition(); if (definition == null) { definition = prismcontext.getschemaregistry().findobjectdefinitionbycompiletimeclass(assignmentholdertype.class).findcontainerdefinition(assignmentholdertype.f_assignment); } itemdeltaitem<prismcontainervalue<assignmenttype>,prismcontainerdefinition<assignmenttype>> assignmentidi = new itemdeltaitem<>(lensutil.createassignmentsinglevaluecontainer(assignmenttype), definition); evaluatedassignment<ah> assignment = assignmentevaluator.evaluate(assignmentidi, plusminuszero.zero, false, assignmentholder, assignmentholder.tostring(), virtual, task, result); evaluatedassignments.add(assignment); } catch (schemaexception | objectnotfoundexception | expressionevaluationexception | policyviolationexception | securityviolationexception | configurationexception | communicationexception e) { logger.error("error while processing assignment of {}: {}; assignment: {}", assignmentholder, e.getmessage(), assignmenttype, e); } } } finally { repositorycache.exit(); } return evaluatedassignments; } | bshp/midpoint | [
1,
0,
0,
0
] |
22,947 | private void decode(byte[] sensorData) {
kiwriousReader.setRawValues(sensorData);
switch (sensorData[KIWRIOUS_SENSOR_TYPE]) {
//TODO: complete decode calls using bye array data
case SENSOR_COLOUR:
String[] colorValues = sensorDecoder.decodeColor(sensorData);
kiwriousReader.setR(Integer.parseInt(colorValues[0]));
kiwriousReader.setG(Integer.parseInt(colorValues[1]));
kiwriousReader.setB(Integer.parseInt(colorValues[2]));
break;
case SENSOR_CONDUCTIVITY:
String[] conductivityValues = sensorDecoder.decodeConductivity(sensorData);
kiwriousReader.setResistance(Long.parseLong(conductivityValues[0]));
kiwriousReader.setConductivity(Float.parseFloat(conductivityValues[1]));
break;
case SENSOR_HEART_RATE:
String heartRateValue = sensorDecoder.decodeHeartRate(sensorData);
kiwriousReader.setHeartRate(Integer.parseInt(heartRateValue));
break;
case SENSOR_HUMIDITY:
Float[] humidityValues = sensorDecoder.decodeHumidity(sensorData);
kiwriousReader.setTemperature(humidityValues[0]);
kiwriousReader.setHumidity(humidityValues[1]);
break;
case SENSOR_SOUND:
// sensorDecoder.decodeSound(values);
break;
case SENSOR_TEMPERATURE:
String[] temperatureValues = sensorDecoder.decodeTemperature(sensorData);
kiwriousReader.setAmbientTemperature(Integer.parseInt(temperatureValues[0]));
kiwriousReader.setInfraredTemperature(Integer.parseInt(temperatureValues[1]));
break;
case SENSOR_TEMPERATURE2:
String[] temperature2Values = sensorDecoder.decodeTemperature2(sensorData);
kiwriousReader.setAmbientTemperature(Integer.parseInt(temperature2Values[0]));
kiwriousReader.setInfraredTemperature(Integer.parseInt(temperature2Values[1]));
break;
case SENSOR_UV:
String[] lightValues = sensorDecoder.decodeUV(sensorData);
kiwriousReader.setLux(Long.parseLong(lightValues[0]));
kiwriousReader.setUv(Float.parseFloat(lightValues[1]));
break;
case SENSOR_VOC:
String[] vocValues = sensorDecoder.decodeVOC(sensorData);
kiwriousReader.setVoc(Integer.parseInt(vocValues[0]));
kiwriousReader.setCo2(Integer.parseInt(vocValues[1]));
break;
default:
Log.e("kiwrious-plugin", "unexpected sensor type "+sensorData[KIWRIOUS_SENSOR_TYPE]);
break;
}
} | private void decode(byte[] sensorData) {
kiwriousReader.setRawValues(sensorData);
switch (sensorData[KIWRIOUS_SENSOR_TYPE]) {
case SENSOR_COLOUR:
String[] colorValues = sensorDecoder.decodeColor(sensorData);
kiwriousReader.setR(Integer.parseInt(colorValues[0]));
kiwriousReader.setG(Integer.parseInt(colorValues[1]));
kiwriousReader.setB(Integer.parseInt(colorValues[2]));
break;
case SENSOR_CONDUCTIVITY:
String[] conductivityValues = sensorDecoder.decodeConductivity(sensorData);
kiwriousReader.setResistance(Long.parseLong(conductivityValues[0]));
kiwriousReader.setConductivity(Float.parseFloat(conductivityValues[1]));
break;
case SENSOR_HEART_RATE:
String heartRateValue = sensorDecoder.decodeHeartRate(sensorData);
kiwriousReader.setHeartRate(Integer.parseInt(heartRateValue));
break;
case SENSOR_HUMIDITY:
Float[] humidityValues = sensorDecoder.decodeHumidity(sensorData);
kiwriousReader.setTemperature(humidityValues[0]);
kiwriousReader.setHumidity(humidityValues[1]);
break;
case SENSOR_SOUND:
break;
case SENSOR_TEMPERATURE:
String[] temperatureValues = sensorDecoder.decodeTemperature(sensorData);
kiwriousReader.setAmbientTemperature(Integer.parseInt(temperatureValues[0]));
kiwriousReader.setInfraredTemperature(Integer.parseInt(temperatureValues[1]));
break;
case SENSOR_TEMPERATURE2:
String[] temperature2Values = sensorDecoder.decodeTemperature2(sensorData);
kiwriousReader.setAmbientTemperature(Integer.parseInt(temperature2Values[0]));
kiwriousReader.setInfraredTemperature(Integer.parseInt(temperature2Values[1]));
break;
case SENSOR_UV:
String[] lightValues = sensorDecoder.decodeUV(sensorData);
kiwriousReader.setLux(Long.parseLong(lightValues[0]));
kiwriousReader.setUv(Float.parseFloat(lightValues[1]));
break;
case SENSOR_VOC:
String[] vocValues = sensorDecoder.decodeVOC(sensorData);
kiwriousReader.setVoc(Integer.parseInt(vocValues[0]));
kiwriousReader.setCo2(Integer.parseInt(vocValues[1]));
break;
default:
Log.e("kiwrious-plugin", "unexpected sensor type "+sensorData[KIWRIOUS_SENSOR_TYPE]);
break;
}
} | private void decode(byte[] sensordata) { kiwriousreader.setrawvalues(sensordata); switch (sensordata[kiwrious_sensor_type]) { case sensor_colour: string[] colorvalues = sensordecoder.decodecolor(sensordata); kiwriousreader.setr(integer.parseint(colorvalues[0])); kiwriousreader.setg(integer.parseint(colorvalues[1])); kiwriousreader.setb(integer.parseint(colorvalues[2])); break; case sensor_conductivity: string[] conductivityvalues = sensordecoder.decodeconductivity(sensordata); kiwriousreader.setresistance(long.parselong(conductivityvalues[0])); kiwriousreader.setconductivity(float.parsefloat(conductivityvalues[1])); break; case sensor_heart_rate: string heartratevalue = sensordecoder.decodeheartrate(sensordata); kiwriousreader.setheartrate(integer.parseint(heartratevalue)); break; case sensor_humidity: float[] humidityvalues = sensordecoder.decodehumidity(sensordata); kiwriousreader.settemperature(humidityvalues[0]); kiwriousreader.sethumidity(humidityvalues[1]); break; case sensor_sound: break; case sensor_temperature: string[] temperaturevalues = sensordecoder.decodetemperature(sensordata); kiwriousreader.setambienttemperature(integer.parseint(temperaturevalues[0])); kiwriousreader.setinfraredtemperature(integer.parseint(temperaturevalues[1])); break; case sensor_temperature2: string[] temperature2values = sensordecoder.decodetemperature2(sensordata); kiwriousreader.setambienttemperature(integer.parseint(temperature2values[0])); kiwriousreader.setinfraredtemperature(integer.parseint(temperature2values[1])); break; case sensor_uv: string[] lightvalues = sensordecoder.decodeuv(sensordata); kiwriousreader.setlux(long.parselong(lightvalues[0])); kiwriousreader.setuv(float.parsefloat(lightvalues[1])); break; case sensor_voc: string[] vocvalues = sensordecoder.decodevoc(sensordata); kiwriousreader.setvoc(integer.parseint(vocvalues[0])); kiwriousreader.setco2(integer.parseint(vocvalues[1])); break; default: log.e("kiwrious-plugin", "unexpected sensor type "+sensordata[kiwrious_sensor_type]); break; } } | augmented-human-lab/kiwrious-android-library | [
0,
1,
0,
0
] |
23,039 | @GetMapping("/sign-in")
Mono<JwtToken> signIn(@RequestParam("login") String login,//FIXME use Auth header with basic auth
@RequestParam("password") String password) {
return userRepository.findByEmailOrMobile(login, login)
.filter(u -> u.validatePassword(password))
.map(this::createToken)
.map(Mono::just)
.orElseThrow(() -> new IllegalArgumentException("Invalid login or password"));
} | @GetMapping("/sign-in")
Mono<JwtToken> signIn(@RequestParam("login") String login
@RequestParam("password") String password) {
return userRepository.findByEmailOrMobile(login, login)
.filter(u -> u.validatePassword(password))
.map(this::createToken)
.map(Mono::just)
.orElseThrow(() -> new IllegalArgumentException("Invalid login or password"));
} | @getmapping("/sign-in") mono<jwttoken> signin(@requestparam("login") string login @requestparam("password") string password) { return userrepository.findbyemailormobile(login, login) .filter(u -> u.validatepassword(password)) .map(this::createtoken) .map(mono::just) .orelsethrow(() -> new illegalargumentexception("invalid login or password")); } | batiaev/provisioner | [
0,
0,
1,
0
] |
23,061 | @Override
void testBaseLib() {} | @Override
void testBaseLib() {} | @override void testbaselib() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
23,062 | @Override
void testCoroutineLib() {} | @Override
void testCoroutineLib() {} | @override void testcoroutinelib() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
23,063 | @Override
void testIoLib() {} | @Override
void testIoLib() {} | @override void testiolib() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
23,064 | @Override
void testMetatags() {} | @Override
void testMetatags() {} | @override void testmetatags() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
23,065 | @Override
void testOsLib() {} | @Override
void testOsLib() {} | @override void testoslib() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
23,066 | @Override
void testStringLib() {} | @Override
void testStringLib() {} | @override void teststringlib() {} | baodingfengyun/luaj | [
0,
0,
0,
1
] |
14,968 | @Override
public String explainIncompleteness() {
String result = null;
if (isValueDomain()) {
//This concept name is value so do special processing
result = explainIfNotAllTokensProcessed("");
if (this.instanceName == null) {
result = appendReason(result, "No instance name was identified");
} else {
//TODO: Eventually remove this test
if (this.instanceName.equals(TOKEN_NAMED)) {
result = appendReason(result, "Instance name of 'named' was detected");
}
}
} else {
//Do normal processing
result = super.explainIncompleteness();
}
return result;
} | @Override
public String explainIncompleteness() {
String result = null;
if (isValueDomain()) {
result = explainIfNotAllTokensProcessed("");
if (this.instanceName == null) {
result = appendReason(result, "No instance name was identified");
} else {
if (this.instanceName.equals(TOKEN_NAMED)) {
result = appendReason(result, "Instance name of 'named' was detected");
}
}
} else {
result = super.explainIncompleteness();
}
return result;
} | @override public string explainincompleteness() { string result = null; if (isvaluedomain()) { result = explainifnotalltokensprocessed(""); if (this.instancename == null) { result = appendreason(result, "no instance name was identified"); } else { if (this.instancename.equals(token_named)) { result = appendreason(result, "instance name of 'named' was detected"); } } } else { result = super.explainincompleteness(); } return result; } | ce-store/ce-store | [
1,
0,
0,
0
] |
31,429 | @Override
public void run() {
// Bounds
if ( colorPick.getX()+hs > colorPane.getX()+colorPane.getWidth()-1 )
colorPick.setAbsolutePosition(colorPane.getX()+colorPane.getWidth()-hs-1, colorPick.getY());
if ( colorPick.getX()+hs < colorPane.getX() )
colorPick.setAbsolutePosition(colorPane.getX()-hs, colorPick.getY());
if ( colorPick.getY()+hs > colorPane.getY()+colorPane.getHeight() )
colorPick.setAbsolutePosition(colorPick.getX(), colorPane.getY()+colorPane.getHeight()-hs);
if ( colorPick.getY()+hs < colorPane.getY()+1 )
colorPick.setAbsolutePosition(colorPick.getX(), colorPane.getY()-hs+1);
Vector2i offset = new Vector2i( (int)(colorPick.getX()+hs-colorPane.getX()), (int)colorPane.getHeight()-(int)(colorPick.getY()+hs-colorPane.getY()) );
int index = (offset.y*(int)colorPane.getWidth())+offset.x;
int rgb = colorPane.colors[index];
// Swap blue/red channel? Why do I need to do this?
Color c = new Color((rgb >> 0) & 0xFF, (rgb >> 8) & 0xFF, (rgb >> 16) & 0xFF, 255);
// Update the temp color
colorS.setBackgroundLegacy(c);
r.setText(""+c.getRed());
g.setText(""+c.getGreen());
b.setText(""+c.getBlue());
colorPick.setBackgroundLegacy(c);
tempColor(c);
} | @Override
public void run() {
if ( colorPick.getX()+hs > colorPane.getX()+colorPane.getWidth()-1 )
colorPick.setAbsolutePosition(colorPane.getX()+colorPane.getWidth()-hs-1, colorPick.getY());
if ( colorPick.getX()+hs < colorPane.getX() )
colorPick.setAbsolutePosition(colorPane.getX()-hs, colorPick.getY());
if ( colorPick.getY()+hs > colorPane.getY()+colorPane.getHeight() )
colorPick.setAbsolutePosition(colorPick.getX(), colorPane.getY()+colorPane.getHeight()-hs);
if ( colorPick.getY()+hs < colorPane.getY()+1 )
colorPick.setAbsolutePosition(colorPick.getX(), colorPane.getY()-hs+1);
Vector2i offset = new Vector2i( (int)(colorPick.getX()+hs-colorPane.getX()), (int)colorPane.getHeight()-(int)(colorPick.getY()+hs-colorPane.getY()) );
int index = (offset.y*(int)colorPane.getWidth())+offset.x;
int rgb = colorPane.colors[index];
Color c = new Color((rgb >> 0) & 0xFF, (rgb >> 8) & 0xFF, (rgb >> 16) & 0xFF, 255);
colorS.setBackgroundLegacy(c);
r.setText(""+c.getRed());
g.setText(""+c.getGreen());
b.setText(""+c.getBlue());
colorPick.setBackgroundLegacy(c);
tempColor(c);
} | @override public void run() { if ( colorpick.getx()+hs > colorpane.getx()+colorpane.getwidth()-1 ) colorpick.setabsoluteposition(colorpane.getx()+colorpane.getwidth()-hs-1, colorpick.gety()); if ( colorpick.getx()+hs < colorpane.getx() ) colorpick.setabsoluteposition(colorpane.getx()-hs, colorpick.gety()); if ( colorpick.gety()+hs > colorpane.gety()+colorpane.getheight() ) colorpick.setabsoluteposition(colorpick.getx(), colorpane.gety()+colorpane.getheight()-hs); if ( colorpick.gety()+hs < colorpane.gety()+1 ) colorpick.setabsoluteposition(colorpick.getx(), colorpane.gety()-hs+1); vector2i offset = new vector2i( (int)(colorpick.getx()+hs-colorpane.getx()), (int)colorpane.getheight()-(int)(colorpick.gety()+hs-colorpane.gety()) ); int index = (offset.y*(int)colorpane.getwidth())+offset.x; int rgb = colorpane.colors[index]; color c = new color((rgb >> 0) & 0xff, (rgb >> 8) & 0xff, (rgb >> 16) & 0xff, 255); colors.setbackgroundlegacy(c); r.settext(""+c.getred()); g.settext(""+c.getgreen()); b.settext(""+c.getblue()); colorpick.setbackgroundlegacy(c); tempcolor(c); } | bumfo/LWJGUI | [
1,
0,
0,
0
] |
31,545 | @Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
IdConstruct idConstruct = (IdConstruct) getAvailablePrincipal(principals);
return getIOTAccount(idConstruct.getPartition(), idConstruct.getUsername());
} | @Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
IdConstruct idConstruct = (IdConstruct) getAvailablePrincipal(principals);
return getIOTAccount(idConstruct.getPartition(), idConstruct.getUsername());
} | @override protected authorizationinfo dogetauthorizationinfo(principalcollection principals) { idconstruct idconstruct = (idconstruct) getavailableprincipal(principals); return getiotaccount(idconstruct.getpartition(), idconstruct.getusername()); } | caricah/mq.tracah | [
0,
0,
0,
0
] |
15,338 | public static boolean yesNo(String yesNoQuestion){
int input = JOptionPane.showConfirmDialog(null, yesNoQuestion);
// 0=yes, 1=no, 2=cancel
return input==0;
//TODO remove the Cancel button. Should only be Yes and No.
} | public static boolean yesNo(String yesNoQuestion){
int input = JOptionPane.showConfirmDialog(null, yesNoQuestion);
return input==0;
} | public static boolean yesno(string yesnoquestion){ int input = joptionpane.showconfirmdialog(null, yesnoquestion); return input==0; } | benrayfield/occamsworkspace | [
1,
0,
0,
0
] |
15,390 | private void findActionVerbsAndArguments(TreeGraphNode node,
Map<Pair<Integer, Integer>, String> edge_rels,
Map<TreeGraphNode, Set<TreeGraphNode>> node_to_deps,
Map<TreeGraphNode, TreeGraphNode> dep_to_gov,
String root_type,
List<Tree> leaves,
List<String> lemmas, List<Label> tags, List<HasWord> words,
String recipe_name, int sentence_idx, int curr_sentence_char_offset,
String sentence_string, TreeMap<Integer, RecipeEvent> predicate_idx_to_event) {
String node_string = node.nodeString();
int index = node.index();
Label tag = tags.get(index - 1);
int hw_character_offset = charOffset(leaves.get(index - 1));
// Find the no-space character offset of the predicate
int no_space_character_offset = hw_character_offset;
String sentence_prefix = sentence_string.substring(0, hw_character_offset);
while (sentence_prefix.contains(" I would ")) {
no_space_character_offset -= 9;
sentence_prefix = sentence_prefix.replaceFirst(" I would ", "");
}
while (sentence_prefix.contains("I would ")) {
no_space_character_offset -= 8;
sentence_prefix = sentence_prefix.replaceFirst("I would ", "");
}
while (sentence_prefix.contains(" ")) {
no_space_character_offset--;
sentence_prefix = sentence_prefix.replaceFirst(" ", "");
}
if (tag.value().startsWith("V")) {
Set<TreeGraphNode> deps = node_to_deps.get(node);
if (deps == null) {
System.out.println(node + " " + deps);
// Create RecipeEvent
RecipeEvent event = new RecipeEvent(node_string.toLowerCase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset);
predicate_idx_to_event.put(hw_character_offset, event);
return;
}
// check for pcomp
boolean has_pcomp = false;
TreeGraphNode pcomp = null;
for (TreeGraphNode dep3 : deps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(hw_character_offset, charOffset(dep3)));
if (rel_string2.equals("pcomp")) {
has_pcomp = true;
pcomp = dep3;
break;
}
}
if (has_pcomp) {
System.out.println("has pcomp");
TreeGraphNode grandgov = dep_to_gov.get(node);
if (grandgov != null) {
int grandgov_index = grandgov.index();
RecipeEvent grandevent = predicate_idx_to_event.get(grandgov_index);
if (grandevent != null) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, true);
grandevent.addPrepositionalArgument(hw_character_offset, args.getFirst(), pcomp.nodeString());
}
}
return;
}
// Create RecipeEvent
RecipeEvent event = new RecipeEvent(node_string.toLowerCase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset);
boolean has_subj = false;
TreeGraphNode subj = null;
for (TreeGraphNode dep : deps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(hw_character_offset, charOffset(dep)));
if (rel_string2.equals("nsubj") && !root_type.equals("SINV")) {
has_subj = true;
subj = dep;
break;
}
}
// If the verb has a subject other than "I", assume parse error and add the subject as an argument.
if (has_subj && !root_type.equals("SINV")) {
System.out.println(has_subj + " " + root_type);
if (!subj.nodeString().equals("I")) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, false);
String arg_string = args.getFirst();
Set<TreeGraphNode> verbs = args.getSecond();
if (verbs.size() == 1) {
TreeGraphNode verb = verbs.iterator().next();
dep_to_gov.put(verb, node);
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
RecipeEvent dep_event = predicate_idx_to_event.get(charOffset(leaves.get(verb.index() - 1)));
if (dep_event != null) {
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(node.nodeString())) {
found = i;
break;
}
}
if (tags.get(index - found - 1).value().equals("IN")) {
dep_event.addPrepositionalArgument(hw_character_offset, arg_string);
} else {
dep_event.addOtherArgument(hw_character_offset, arg_string);
}
}
} else {
// TODO(chloe): do something else in this case?
for (TreeGraphNode verb : verbs) {
dep_to_gov.put(verb, node);
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
}
return;
}
}
// Search through the dependents of the verb and add each expanded dependent as an argument.
Set<TreeGraphNode> new_deps = new HashSet<TreeGraphNode>(deps);
TreeGraphNode pobj = null;
while (new_deps.size() != 0) {
List<TreeGraphNode> deps_list = new ArrayList<TreeGraphNode>(new_deps);
new_deps.clear();
for (TreeGraphNode dep : deps_list) {
TreeGraphNode gov = dep_to_gov.get(dep);
if (gov == null) {
gov = node;
}
int gov_index = index;
int gov_hw_offset = hw_character_offset;
String gov_string = node_string;
if (gov != null) {
gov_index = gov.index();
gov_hw_offset = charOffset(leaves.get(gov_index - 1));
gov_string = gov.nodeString();
}
int dep_index = dep.index();
int dep_hw_offset = charOffset(leaves.get(dep_index - 1));
String dep_string = dep.nodeString();
if (dep.nodeString().equals("would")) {
continue;
}
String rel_string = edge_rels.get(new Pair<Integer, Integer>(gov_hw_offset, dep_hw_offset));
if (rel_string.startsWith("nsubj") && !root_type.equals("SINV") && (gov == node || dep.nodeString().equals("I"))) {
// skip subjects
continue;
}
if (rel_string.startsWith("pobj")) {
// Store the prepositional object, but ignore for now. We will deal with it when the
// prepositional phrase is found.
pobj = dep;
continue;
}
// For vmod and xcomp edges, create an argument for the current verb
if (rel_string.equals("vmod") || rel_string.equals("xcomp")) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, true);
String arg_string = args.getFirst();
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
continue;
}
// Deal with prepositional phrases and determining whether or not a dependent verb is a new verb or
// part of the current verb. Uses heuristics to find out.
boolean is_prep = rel_string.startsWith("prep") || tags.get(dep_index - 1).value().equals("IN");
Label dep_tag = tags.get(dep_index - 1);
if (dep_tag.value().startsWith("V") && !is_prep && !rel_string.equals("advcl")
) {
if (dep_index == index + 2 && rel_string.startsWith("conj")) {
int underscore = rel_string.indexOf('_');
String conj_string = rel_string.substring(underscore + 1);
node_string += " " + conj_string + " " + dep_string;
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
if (depdeps != null) {
new_deps.addAll(depdeps);
for (TreeGraphNode depdep : depdeps) {
dep_to_gov.put(depdep, dep);
}
}
event.addVerbToPredicate(dep.nodeString(), conj_string, (dep.index() > gov_index));
} else if (rel_string.startsWith("conj") || rel_string.equals("parataxis") || rel_string.equals("dep")) {
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
has_subj = false;
if (depdeps != null) {
for (TreeGraphNode depdep : depdeps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(dep_hw_offset, charOffset(leaves.get(depdep.index() - 1))));
if (rel_string2.equals("nsubj")) {
has_subj = true;
break;
}
}
if (has_subj) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, true);
String arg_string = args.getFirst();
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
} else {
findActionVerbsAndArguments(dep, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
}
} else {
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
if (depdeps != null) {
new_deps.addAll(depdeps);
for (TreeGraphNode depdep : depdeps) {
dep_to_gov.put(depdep, dep);
}
}
event.addVerbToPredicate(dep.nodeString(), "", (dep.index() > gov_index));
}
} else { // prepositional phrase
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, is_prep || rel_string.equals("dobj"));
String arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
if (is_prep) {
String pobj_arg_string = null;
if (pobj != null) {
args = getTreeGraphString(pobj, node_to_deps, edge_rels, lemmas, tags, words, is_prep);
pobj_arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "" , leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
} else {
for (TreeGraphNode dep2 : deps_list) {
TreeGraphNode gov2 = dep_to_gov.get(dep2);
if (gov2 == null) {
gov2 = gov;
}
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(charOffset(leaves.get(gov2.index() - 1)), charOffset(leaves.get(dep2.index() - 1))));
if (rel_string2.equals("pobj")) {
args = getTreeGraphString(dep2, node_to_deps, edge_rels, lemmas, tags, words, is_prep);
pobj_arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
break;
}
}
}
// Find the true preposition from the collapsed dependency relation
int underscore = rel_string.indexOf('_');
if (underscore != -1) {
String prep_string = rel_string.substring(underscore + 1);
prep_string = prep_string.replace('_', ' ');
if (pobj_arg_string != null) {
if (prep_string.contains(arg_string)) {
arg_string = prep_string + " " + pobj_arg_string;
} else {
arg_string = prep_string + " " + arg_string + " " + pobj_arg_string;
}
} else {
arg_string = prep_string + " " + arg_string;
}
}
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else if (rel_string.equals("dobj") || rel_string.equals("nsubj")) {
// event.setDirectObject(arg_string);
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
System.out.println("DIRECTOBJECT " + arg_string);
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
if (arg_string.contains("minutes") || arg_string.contains("hours")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string, "for");
} else {
event.setDirectObject(arg_string);
}
}
}
} else {
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
System.out.println("prep");
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
}
}
}
}
System.out.println(event);
predicate_idx_to_event.put(index, event);
} else {
// This method should not be called if the node is not a verb.
//
// Not sure if this happens, so I will currently have it crash so I can investigate it.
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, false);
String arg_string = args.getFirst();
Set<TreeGraphNode> verbs = args.getSecond();
System.out.println(arg_string);
System.out.println("getting args error");
System.exit(1);
}
} | private void findActionVerbsAndArguments(TreeGraphNode node,
Map<Pair<Integer, Integer>, String> edge_rels,
Map<TreeGraphNode, Set<TreeGraphNode>> node_to_deps,
Map<TreeGraphNode, TreeGraphNode> dep_to_gov,
String root_type,
List<Tree> leaves,
List<String> lemmas, List<Label> tags, List<HasWord> words,
String recipe_name, int sentence_idx, int curr_sentence_char_offset,
String sentence_string, TreeMap<Integer, RecipeEvent> predicate_idx_to_event) {
String node_string = node.nodeString();
int index = node.index();
Label tag = tags.get(index - 1);
int hw_character_offset = charOffset(leaves.get(index - 1));
int no_space_character_offset = hw_character_offset;
String sentence_prefix = sentence_string.substring(0, hw_character_offset);
while (sentence_prefix.contains(" I would ")) {
no_space_character_offset -= 9;
sentence_prefix = sentence_prefix.replaceFirst(" I would ", "");
}
while (sentence_prefix.contains("I would ")) {
no_space_character_offset -= 8;
sentence_prefix = sentence_prefix.replaceFirst("I would ", "");
}
while (sentence_prefix.contains(" ")) {
no_space_character_offset--;
sentence_prefix = sentence_prefix.replaceFirst(" ", "");
}
if (tag.value().startsWith("V")) {
Set<TreeGraphNode> deps = node_to_deps.get(node);
if (deps == null) {
System.out.println(node + " " + deps);
RecipeEvent event = new RecipeEvent(node_string.toLowerCase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset);
predicate_idx_to_event.put(hw_character_offset, event);
return;
}
boolean has_pcomp = false;
TreeGraphNode pcomp = null;
for (TreeGraphNode dep3 : deps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(hw_character_offset, charOffset(dep3)));
if (rel_string2.equals("pcomp")) {
has_pcomp = true;
pcomp = dep3;
break;
}
}
if (has_pcomp) {
System.out.println("has pcomp");
TreeGraphNode grandgov = dep_to_gov.get(node);
if (grandgov != null) {
int grandgov_index = grandgov.index();
RecipeEvent grandevent = predicate_idx_to_event.get(grandgov_index);
if (grandevent != null) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, true);
grandevent.addPrepositionalArgument(hw_character_offset, args.getFirst(), pcomp.nodeString());
}
}
return;
}
RecipeEvent event = new RecipeEvent(node_string.toLowerCase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset);
boolean has_subj = false;
TreeGraphNode subj = null;
for (TreeGraphNode dep : deps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(hw_character_offset, charOffset(dep)));
if (rel_string2.equals("nsubj") && !root_type.equals("SINV")) {
has_subj = true;
subj = dep;
break;
}
}
if (has_subj && !root_type.equals("SINV")) {
System.out.println(has_subj + " " + root_type);
if (!subj.nodeString().equals("I")) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, false);
String arg_string = args.getFirst();
Set<TreeGraphNode> verbs = args.getSecond();
if (verbs.size() == 1) {
TreeGraphNode verb = verbs.iterator().next();
dep_to_gov.put(verb, node);
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
RecipeEvent dep_event = predicate_idx_to_event.get(charOffset(leaves.get(verb.index() - 1)));
if (dep_event != null) {
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(node.nodeString())) {
found = i;
break;
}
}
if (tags.get(index - found - 1).value().equals("IN")) {
dep_event.addPrepositionalArgument(hw_character_offset, arg_string);
} else {
dep_event.addOtherArgument(hw_character_offset, arg_string);
}
}
} else {
for (TreeGraphNode verb : verbs) {
dep_to_gov.put(verb, node);
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
}
return;
}
}
Set<TreeGraphNode> new_deps = new HashSet<TreeGraphNode>(deps);
TreeGraphNode pobj = null;
while (new_deps.size() != 0) {
List<TreeGraphNode> deps_list = new ArrayList<TreeGraphNode>(new_deps);
new_deps.clear();
for (TreeGraphNode dep : deps_list) {
TreeGraphNode gov = dep_to_gov.get(dep);
if (gov == null) {
gov = node;
}
int gov_index = index;
int gov_hw_offset = hw_character_offset;
String gov_string = node_string;
if (gov != null) {
gov_index = gov.index();
gov_hw_offset = charOffset(leaves.get(gov_index - 1));
gov_string = gov.nodeString();
}
int dep_index = dep.index();
int dep_hw_offset = charOffset(leaves.get(dep_index - 1));
String dep_string = dep.nodeString();
if (dep.nodeString().equals("would")) {
continue;
}
String rel_string = edge_rels.get(new Pair<Integer, Integer>(gov_hw_offset, dep_hw_offset));
if (rel_string.startsWith("nsubj") && !root_type.equals("SINV") && (gov == node || dep.nodeString().equals("I"))) {
continue;
}
if (rel_string.startsWith("pobj")) {
pobj = dep;
continue;
}
if (rel_string.equals("vmod") || rel_string.equals("xcomp")) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, true);
String arg_string = args.getFirst();
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
continue;
}
boolean is_prep = rel_string.startsWith("prep") || tags.get(dep_index - 1).value().equals("IN");
Label dep_tag = tags.get(dep_index - 1);
if (dep_tag.value().startsWith("V") && !is_prep && !rel_string.equals("advcl")
) {
if (dep_index == index + 2 && rel_string.startsWith("conj")) {
int underscore = rel_string.indexOf('_');
String conj_string = rel_string.substring(underscore + 1);
node_string += " " + conj_string + " " + dep_string;
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
if (depdeps != null) {
new_deps.addAll(depdeps);
for (TreeGraphNode depdep : depdeps) {
dep_to_gov.put(depdep, dep);
}
}
event.addVerbToPredicate(dep.nodeString(), conj_string, (dep.index() > gov_index));
} else if (rel_string.startsWith("conj") || rel_string.equals("parataxis") || rel_string.equals("dep")) {
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
has_subj = false;
if (depdeps != null) {
for (TreeGraphNode depdep : depdeps) {
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(dep_hw_offset, charOffset(leaves.get(depdep.index() - 1))));
if (rel_string2.equals("nsubj")) {
has_subj = true;
break;
}
}
if (has_subj) {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, true);
String arg_string = args.getFirst();
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
} else {
findActionVerbsAndArguments(dep, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
}
} else {
Set<TreeGraphNode> depdeps = node_to_deps.get(dep);
if (depdeps != null) {
new_deps.addAll(depdeps);
for (TreeGraphNode depdep : depdeps) {
dep_to_gov.put(depdep, dep);
}
}
event.addVerbToPredicate(dep.nodeString(), "", (dep.index() > gov_index));
}
} else {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(dep, node_to_deps, edge_rels, lemmas, tags, words, is_prep || rel_string.equals("dobj"));
String arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
if (is_prep) {
String pobj_arg_string = null;
if (pobj != null) {
args = getTreeGraphString(pobj, node_to_deps, edge_rels, lemmas, tags, words, is_prep);
pobj_arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "" , leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
} else {
for (TreeGraphNode dep2 : deps_list) {
TreeGraphNode gov2 = dep_to_gov.get(dep2);
if (gov2 == null) {
gov2 = gov;
}
String rel_string2 = edge_rels.get(new Pair<Integer, Integer>(charOffset(leaves.get(gov2.index() - 1)), charOffset(leaves.get(dep2.index() - 1))));
if (rel_string2.equals("pobj")) {
args = getTreeGraphString(dep2, node_to_deps, edge_rels, lemmas, tags, words, is_prep);
pobj_arg_string = args.getFirst();
for (TreeGraphNode verb : args.getSecond()) {
findActionVerbsAndArguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx,
curr_sentence_char_offset, sentence_string, predicate_idx_to_event);
}
break;
}
}
}
int underscore = rel_string.indexOf('_');
if (underscore != -1) {
String prep_string = rel_string.substring(underscore + 1);
prep_string = prep_string.replace('_', ' ');
if (pobj_arg_string != null) {
if (prep_string.contains(arg_string)) {
arg_string = prep_string + " " + pobj_arg_string;
} else {
arg_string = prep_string + " " + arg_string + " " + pobj_arg_string;
}
} else {
arg_string = prep_string + " " + arg_string;
}
}
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else if (rel_string.equals("dobj") || rel_string.equals("nsubj")) {
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
System.out.println("DIRECTOBJECT " + arg_string);
if (tags.get(dep_index - found - 1).value().equals("IN")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
if (arg_string.contains("minutes") || arg_string.contains("hours")) {
event.addPrepositionalArgument(dep_hw_offset, arg_string, "for");
} else {
event.setDirectObject(arg_string);
}
}
}
} else {
String[] split = arg_string.split(" ");
int found = -1;
for (int i = 0; i < split.length; i++) {
if (split[i].equals(dep.nodeString())) {
found = i;
break;
}
}
if (found != -1) {
if (tags.get(dep_index - found - 1).value().equals("IN")) {
System.out.println("prep");
event.addPrepositionalArgument(dep_hw_offset, arg_string);
} else {
event.addOtherArgument(dep_hw_offset, arg_string);
}
}
}
}
}
}
System.out.println(event);
predicate_idx_to_event.put(index, event);
} else {
Pair<String, Set<TreeGraphNode>> args = getTreeGraphString(node, node_to_deps, edge_rels, lemmas, tags, words, false);
String arg_string = args.getFirst();
Set<TreeGraphNode> verbs = args.getSecond();
System.out.println(arg_string);
System.out.println("getting args error");
System.exit(1);
}
} | private void findactionverbsandarguments(treegraphnode node, map<pair<integer, integer>, string> edge_rels, map<treegraphnode, set<treegraphnode>> node_to_deps, map<treegraphnode, treegraphnode> dep_to_gov, string root_type, list<tree> leaves, list<string> lemmas, list<label> tags, list<hasword> words, string recipe_name, int sentence_idx, int curr_sentence_char_offset, string sentence_string, treemap<integer, recipeevent> predicate_idx_to_event) { string node_string = node.nodestring(); int index = node.index(); label tag = tags.get(index - 1); int hw_character_offset = charoffset(leaves.get(index - 1)); int no_space_character_offset = hw_character_offset; string sentence_prefix = sentence_string.substring(0, hw_character_offset); while (sentence_prefix.contains(" i would ")) { no_space_character_offset -= 9; sentence_prefix = sentence_prefix.replacefirst(" i would ", ""); } while (sentence_prefix.contains("i would ")) { no_space_character_offset -= 8; sentence_prefix = sentence_prefix.replacefirst("i would ", ""); } while (sentence_prefix.contains(" ")) { no_space_character_offset--; sentence_prefix = sentence_prefix.replacefirst(" ", ""); } if (tag.value().startswith("v")) { set<treegraphnode> deps = node_to_deps.get(node); if (deps == null) { system.out.println(node + " " + deps); recipeevent event = new recipeevent(node_string.tolowercase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset); predicate_idx_to_event.put(hw_character_offset, event); return; } boolean has_pcomp = false; treegraphnode pcomp = null; for (treegraphnode dep3 : deps) { string rel_string2 = edge_rels.get(new pair<integer, integer>(hw_character_offset, charoffset(dep3))); if (rel_string2.equals("pcomp")) { has_pcomp = true; pcomp = dep3; break; } } if (has_pcomp) { system.out.println("has pcomp"); treegraphnode grandgov = dep_to_gov.get(node); if (grandgov != null) { int grandgov_index = grandgov.index(); recipeevent grandevent = predicate_idx_to_event.get(grandgov_index); if (grandevent != null) { pair<string, set<treegraphnode>> args = gettreegraphstring(node, node_to_deps, edge_rels, lemmas, tags, words, true); grandevent.addprepositionalargument(hw_character_offset, args.getfirst(), pcomp.nodestring()); } } return; } recipeevent event = new recipeevent(node_string.tolowercase(), recipe_name, sentence_idx, curr_sentence_char_offset + no_space_character_offset); boolean has_subj = false; treegraphnode subj = null; for (treegraphnode dep : deps) { string rel_string2 = edge_rels.get(new pair<integer, integer>(hw_character_offset, charoffset(dep))); if (rel_string2.equals("nsubj") && !root_type.equals("sinv")) { has_subj = true; subj = dep; break; } } if (has_subj && !root_type.equals("sinv")) { system.out.println(has_subj + " " + root_type); if (!subj.nodestring().equals("i")) { pair<string, set<treegraphnode>> args = gettreegraphstring(node, node_to_deps, edge_rels, lemmas, tags, words, false); string arg_string = args.getfirst(); set<treegraphnode> verbs = args.getsecond(); if (verbs.size() == 1) { treegraphnode verb = verbs.iterator().next(); dep_to_gov.put(verb, node); findactionverbsandarguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); recipeevent dep_event = predicate_idx_to_event.get(charoffset(leaves.get(verb.index() - 1))); if (dep_event != null) { string[] split = arg_string.split(" "); int found = -1; for (int i = 0; i < split.length; i++) { if (split[i].equals(node.nodestring())) { found = i; break; } } if (tags.get(index - found - 1).value().equals("in")) { dep_event.addprepositionalargument(hw_character_offset, arg_string); } else { dep_event.addotherargument(hw_character_offset, arg_string); } } } else { for (treegraphnode verb : verbs) { dep_to_gov.put(verb, node); findactionverbsandarguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); } } return; } } set<treegraphnode> new_deps = new hashset<treegraphnode>(deps); treegraphnode pobj = null; while (new_deps.size() != 0) { list<treegraphnode> deps_list = new arraylist<treegraphnode>(new_deps); new_deps.clear(); for (treegraphnode dep : deps_list) { treegraphnode gov = dep_to_gov.get(dep); if (gov == null) { gov = node; } int gov_index = index; int gov_hw_offset = hw_character_offset; string gov_string = node_string; if (gov != null) { gov_index = gov.index(); gov_hw_offset = charoffset(leaves.get(gov_index - 1)); gov_string = gov.nodestring(); } int dep_index = dep.index(); int dep_hw_offset = charoffset(leaves.get(dep_index - 1)); string dep_string = dep.nodestring(); if (dep.nodestring().equals("would")) { continue; } string rel_string = edge_rels.get(new pair<integer, integer>(gov_hw_offset, dep_hw_offset)); if (rel_string.startswith("nsubj") && !root_type.equals("sinv") && (gov == node || dep.nodestring().equals("i"))) { continue; } if (rel_string.startswith("pobj")) { pobj = dep; continue; } if (rel_string.equals("vmod") || rel_string.equals("xcomp")) { pair<string, set<treegraphnode>> args = gettreegraphstring(dep, node_to_deps, edge_rels, lemmas, tags, words, true); string arg_string = args.getfirst(); string[] split = arg_string.split(" "); int found = -1; for (int i = 0; i < split.length; i++) { if (split[i].equals(dep.nodestring())) { found = i; break; } } if (found != -1) { if (tags.get(dep_index - found - 1).value().equals("in")) { event.addprepositionalargument(dep_hw_offset, arg_string); } else { event.addotherargument(dep_hw_offset, arg_string); } } continue; } boolean is_prep = rel_string.startswith("prep") || tags.get(dep_index - 1).value().equals("in"); label dep_tag = tags.get(dep_index - 1); if (dep_tag.value().startswith("v") && !is_prep && !rel_string.equals("advcl") ) { if (dep_index == index + 2 && rel_string.startswith("conj")) { int underscore = rel_string.indexof('_'); string conj_string = rel_string.substring(underscore + 1); node_string += " " + conj_string + " " + dep_string; set<treegraphnode> depdeps = node_to_deps.get(dep); if (depdeps != null) { new_deps.addall(depdeps); for (treegraphnode depdep : depdeps) { dep_to_gov.put(depdep, dep); } } event.addverbtopredicate(dep.nodestring(), conj_string, (dep.index() > gov_index)); } else if (rel_string.startswith("conj") || rel_string.equals("parataxis") || rel_string.equals("dep")) { set<treegraphnode> depdeps = node_to_deps.get(dep); has_subj = false; if (depdeps != null) { for (treegraphnode depdep : depdeps) { string rel_string2 = edge_rels.get(new pair<integer, integer>(dep_hw_offset, charoffset(leaves.get(depdep.index() - 1)))); if (rel_string2.equals("nsubj")) { has_subj = true; break; } } if (has_subj) { pair<string, set<treegraphnode>> args = gettreegraphstring(dep, node_to_deps, edge_rels, lemmas, tags, words, true); string arg_string = args.getfirst(); string[] split = arg_string.split(" "); int found = -1; for (int i = 0; i < split.length; i++) { if (split[i].equals(dep.nodestring())) { found = i; break; } } if (found != -1) { if (tags.get(dep_index - found - 1).value().equals("in")) { event.addprepositionalargument(dep_hw_offset, arg_string); } else { event.addotherargument(dep_hw_offset, arg_string); } } } else { findactionverbsandarguments(dep, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); } } } else { set<treegraphnode> depdeps = node_to_deps.get(dep); if (depdeps != null) { new_deps.addall(depdeps); for (treegraphnode depdep : depdeps) { dep_to_gov.put(depdep, dep); } } event.addverbtopredicate(dep.nodestring(), "", (dep.index() > gov_index)); } } else { pair<string, set<treegraphnode>> args = gettreegraphstring(dep, node_to_deps, edge_rels, lemmas, tags, words, is_prep || rel_string.equals("dobj")); string arg_string = args.getfirst(); for (treegraphnode verb : args.getsecond()) { findactionverbsandarguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); } if (is_prep) { string pobj_arg_string = null; if (pobj != null) { args = gettreegraphstring(pobj, node_to_deps, edge_rels, lemmas, tags, words, is_prep); pobj_arg_string = args.getfirst(); for (treegraphnode verb : args.getsecond()) { findactionverbsandarguments(verb, edge_rels, node_to_deps, dep_to_gov, "" , leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); } } else { for (treegraphnode dep2 : deps_list) { treegraphnode gov2 = dep_to_gov.get(dep2); if (gov2 == null) { gov2 = gov; } string rel_string2 = edge_rels.get(new pair<integer, integer>(charoffset(leaves.get(gov2.index() - 1)), charoffset(leaves.get(dep2.index() - 1)))); if (rel_string2.equals("pobj")) { args = gettreegraphstring(dep2, node_to_deps, edge_rels, lemmas, tags, words, is_prep); pobj_arg_string = args.getfirst(); for (treegraphnode verb : args.getsecond()) { findactionverbsandarguments(verb, edge_rels, node_to_deps, dep_to_gov, "", leaves, lemmas, tags, words, recipe_name, sentence_idx, curr_sentence_char_offset, sentence_string, predicate_idx_to_event); } break; } } } int underscore = rel_string.indexof('_'); if (underscore != -1) { string prep_string = rel_string.substring(underscore + 1); prep_string = prep_string.replace('_', ' '); if (pobj_arg_string != null) { if (prep_string.contains(arg_string)) { arg_string = prep_string + " " + pobj_arg_string; } else { arg_string = prep_string + " " + arg_string + " " + pobj_arg_string; } } else { arg_string = prep_string + " " + arg_string; } } event.addprepositionalargument(dep_hw_offset, arg_string); } else if (rel_string.equals("dobj") || rel_string.equals("nsubj")) { string[] split = arg_string.split(" "); int found = -1; for (int i = 0; i < split.length; i++) { if (split[i].equals(dep.nodestring())) { found = i; break; } } if (found != -1) { system.out.println("directobject " + arg_string); if (tags.get(dep_index - found - 1).value().equals("in")) { event.addprepositionalargument(dep_hw_offset, arg_string); } else { if (arg_string.contains("minutes") || arg_string.contains("hours")) { event.addprepositionalargument(dep_hw_offset, arg_string, "for"); } else { event.setdirectobject(arg_string); } } } } else { string[] split = arg_string.split(" "); int found = -1; for (int i = 0; i < split.length; i++) { if (split[i].equals(dep.nodestring())) { found = i; break; } } if (found != -1) { if (tags.get(dep_index - found - 1).value().equals("in")) { system.out.println("prep"); event.addprepositionalargument(dep_hw_offset, arg_string); } else { event.addotherargument(dep_hw_offset, arg_string); } } } } } } system.out.println(event); predicate_idx_to_event.put(index, event); } else { pair<string, set<treegraphnode>> args = gettreegraphstring(node, node_to_deps, edge_rels, lemmas, tags, words, false); string arg_string = args.getfirst(); set<treegraphnode> verbs = args.getsecond(); system.out.println(arg_string); system.out.println("getting args error"); system.exit(1); } } | atcbosselut/recipe-interpretation | [
1,
0,
0,
0
] |
23,710 | @Provides
@Singleton
Retrofit provideCall() {
Cache cache = null;
try {
cache = new Cache(cacheFile, 10 * 1024 * 1024);
} catch (Exception e) {
e.printStackTrace();
}
/* TODO Consider moving elsewhere. */
OkHttpClient okHttpClient = new OkHttpClient.Builder()
.addInterceptor(new Interceptor() {
@Override
public okhttp3.Response intercept(Chain chain) throws IOException {
Request original = chain.request();
HttpUrl originalHttpUrl = original.url();
HttpUrl url = originalHttpUrl.newBuilder()
.addQueryParameter("APPID", BuildConfig.API_OPENWEATHERMAP_KEY)
.build();
Request request = original.newBuilder()
.header("Content-Type", "application/json")
.removeHeader("Pragma")
.header("Cache-Control", String.format("max-age=%d", BuildConfig.CACHETIME))
.url(url)
.build();
okhttp3.Response response = chain.proceed(request);
response.cacheResponse();
return response;
}
})
.cache(cache)
.build();
return new Retrofit.Builder()
.baseUrl(BuildConfig.API_OPENWEATHERMAP_BASEURL)
.client(okHttpClient)
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.build();
} | @Provides
@Singleton
Retrofit provideCall() {
Cache cache = null;
try {
cache = new Cache(cacheFile, 10 * 1024 * 1024);
} catch (Exception e) {
e.printStackTrace();
}
OkHttpClient okHttpClient = new OkHttpClient.Builder()
.addInterceptor(new Interceptor() {
@Override
public okhttp3.Response intercept(Chain chain) throws IOException {
Request original = chain.request();
HttpUrl originalHttpUrl = original.url();
HttpUrl url = originalHttpUrl.newBuilder()
.addQueryParameter("APPID", BuildConfig.API_OPENWEATHERMAP_KEY)
.build();
Request request = original.newBuilder()
.header("Content-Type", "application/json")
.removeHeader("Pragma")
.header("Cache-Control", String.format("max-age=%d", BuildConfig.CACHETIME))
.url(url)
.build();
okhttp3.Response response = chain.proceed(request);
response.cacheResponse();
return response;
}
})
.cache(cache)
.build();
return new Retrofit.Builder()
.baseUrl(BuildConfig.API_OPENWEATHERMAP_BASEURL)
.client(okHttpClient)
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.build();
} | @provides @singleton retrofit providecall() { cache cache = null; try { cache = new cache(cachefile, 10 * 1024 * 1024); } catch (exception e) { e.printstacktrace(); } okhttpclient okhttpclient = new okhttpclient.builder() .addinterceptor(new interceptor() { @override public okhttp3.response intercept(chain chain) throws ioexception { request original = chain.request(); httpurl originalhttpurl = original.url(); httpurl url = originalhttpurl.newbuilder() .addqueryparameter("appid", buildconfig.api_openweathermap_key) .build(); request request = original.newbuilder() .header("content-type", "application/json") .removeheader("pragma") .header("cache-control", string.format("max-age=%d", buildconfig.cachetime)) .url(url) .build(); okhttp3.response response = chain.proceed(request); response.cacheresponse(); return response; } }) .cache(cache) .build(); return new retrofit.builder() .baseurl(buildconfig.api_openweathermap_baseurl) .client(okhttpclient) .addconverterfactory(gsonconverterfactory.create()) .addcalladapterfactory(rxjavacalladapterfactory.create()) .build(); } | arenaq/weather-app | [
0,
1,
0,
0
] |
32,099 | protected List<Rectangle> createHighlightAreas2(String fullText, String clippedText, FontMetrics fm, int xOffset, int yOffset, int height) {
if (matchPattern == null) {
return Collections.emptyList();
}
Matcher matcher = matchPattern.matcher(clippedText);
List<Rectangle> highlightAreas = null;
int startFrom = 0;
while (startFrom < clippedText.length() && matcher.find(startFrom)) {
if (highlightAreas == null) {
highlightAreas = new ArrayList<Rectangle>();
}
int start = matcher.start();
int end = matcher.end();
if (start == end) {
// empty matcher will cause infinite loop
break;
}
startFrom = end;
int highlightx;
int highlightWidth;
if (start == 0) {
// start highlight from the start of the field
highlightx = xOffset;
} else {
// Calculate the width of the unhighlighted text to get the
// start of the highlighted region.
String strToStart = clippedText.substring(0, start);
highlightx = fm.stringWidth(strToStart) + xOffset;
}
// Get the width of the highlighted region
String highlightText = clippedText.substring(start, end);
highlightWidth = fm.stringWidth(highlightText);
highlightAreas.add(new Rectangle(highlightx, yOffset, highlightWidth, height));
}// while ( startFrom < text.length() && matcher.find( startFrom ) )
if (highlightAreas == null) {
highlightAreas = Collections.emptyList();
} else {
coalesceHighlightAreas(highlightAreas);
}
return highlightAreas;
} | protected List<Rectangle> createHighlightAreas2(String fullText, String clippedText, FontMetrics fm, int xOffset, int yOffset, int height) {
if (matchPattern == null) {
return Collections.emptyList();
}
Matcher matcher = matchPattern.matcher(clippedText);
List<Rectangle> highlightAreas = null;
int startFrom = 0;
while (startFrom < clippedText.length() && matcher.find(startFrom)) {
if (highlightAreas == null) {
highlightAreas = new ArrayList<Rectangle>();
}
int start = matcher.start();
int end = matcher.end();
if (start == end) {
break;
}
startFrom = end;
int highlightx;
int highlightWidth;
if (start == 0) {
highlightx = xOffset;
} else {
String strToStart = clippedText.substring(0, start);
highlightx = fm.stringWidth(strToStart) + xOffset;
}
String highlightText = clippedText.substring(start, end);
highlightWidth = fm.stringWidth(highlightText);
highlightAreas.add(new Rectangle(highlightx, yOffset, highlightWidth, height));
if (highlightAreas == null) {
highlightAreas = Collections.emptyList();
} else {
coalesceHighlightAreas(highlightAreas);
}
return highlightAreas;
} | protected list<rectangle> createhighlightareas2(string fulltext, string clippedtext, fontmetrics fm, int xoffset, int yoffset, int height) { if (matchpattern == null) { return collections.emptylist(); } matcher matcher = matchpattern.matcher(clippedtext); list<rectangle> highlightareas = null; int startfrom = 0; while (startfrom < clippedtext.length() && matcher.find(startfrom)) { if (highlightareas == null) { highlightareas = new arraylist<rectangle>(); } int start = matcher.start(); int end = matcher.end(); if (start == end) { break; } startfrom = end; int highlightx; int highlightwidth; if (start == 0) { highlightx = xoffset; } else { string strtostart = clippedtext.substring(0, start); highlightx = fm.stringwidth(strtostart) + xoffset; } string highlighttext = clippedtext.substring(start, end); highlightwidth = fm.stringwidth(highlighttext); highlightareas.add(new rectangle(highlightx, yoffset, highlightwidth, height)); if (highlightareas == null) { highlightareas = collections.emptylist(); } else { coalescehighlightareas(highlightareas); } return highlightareas; } | aragozin/stackviewer | [
1,
0,
0,
0
] |
32,113 | protected List<DoubleObjPair<RdKNNEntry>> getSortedEntries(AbstractRStarTreeNode<?, ?> node, SpatialComparable q, SpatialPrimitiveDistanceFunction<?> distanceFunction) {
List<DoubleObjPair<RdKNNEntry>> result = new ArrayList<>();
for(int i = 0; i < node.getNumEntries(); i++) {
RdKNNEntry entry = (RdKNNEntry) node.getEntry(i);
double minDist = distanceFunction.minDist(entry, q);
result.add(new DoubleObjPair<>(minDist, entry));
}
Collections.sort(result);
return result;
} | protected List<DoubleObjPair<RdKNNEntry>> getSortedEntries(AbstractRStarTreeNode<?, ?> node, SpatialComparable q, SpatialPrimitiveDistanceFunction<?> distanceFunction) {
List<DoubleObjPair<RdKNNEntry>> result = new ArrayList<>();
for(int i = 0; i < node.getNumEntries(); i++) {
RdKNNEntry entry = (RdKNNEntry) node.getEntry(i);
double minDist = distanceFunction.minDist(entry, q);
result.add(new DoubleObjPair<>(minDist, entry));
}
Collections.sort(result);
return result;
} | protected list<doubleobjpair<rdknnentry>> getsortedentries(abstractrstartreenode<?, ?> node, spatialcomparable q, spatialprimitivedistancefunction<?> distancefunction) { list<doubleobjpair<rdknnentry>> result = new arraylist<>(); for(int i = 0; i < node.getnumentries(); i++) { rdknnentry entry = (rdknnentry) node.getentry(i); double mindist = distancefunction.mindist(entry, q); result.add(new doubleobjpair<>(mindist, entry)); } collections.sort(result); return result; } | bertl4398/ssdbm2020 | [
1,
0,
0,
0
] |
23,927 | public void recordCorpusData (Corpus c, Learner l){
if (!isRecording) return;
this.c=c;
Vocabulary v = c.getVocabulary();
Set<Map.Entry<String, Integer>> entrySet = v.getWordIndicesEntrySet();
Set<Map.Entry<Integer, Word>> wordSet = v.getWordEntrySet();
StringBuilder s = new StringBuilder(v.getNumOfWords()*30);
corpusFile = new File(mainPath, "corpus_"+name+".js");
s.append(String.format("corpus_%1$s = \n{\n\tname:\"%1$s\",\n", name));
s.append(String.format("\ttokenCount: %1$s,\n", c.tokenCount));
s.append(String.format("\ttypeCount: %1$s,\n", v.getNumOfWords()));
s.append(String.format("\tcommonTypes: %1$s,\n", (v.getNumOfWords()-v.countWordsBelowThreshold(l.RARE_WORD_THRESHOLD))));
s.append(String.format("\tclusterCount: %1$s,\n", l.NUMBER_OF_CLUSTERS));
//TODO add proper number formatters
s.append(String.format("\tidentityEps: %1$s,\n", l.IDENTITY_EPSILON));
s.append(String.format("\titerationCount: %1$s,\n", iterationCounter));
s.append("\ttypeToId: \n\t{\n\t");
for (Map.Entry<String, Integer> e: entrySet){
if (v.getWord(e.getValue()).frequency >= l.RARE_WORD_THRESHOLD){ //TODO see if necessary
s.append(String.format("\t\t\"%1$s\": %2$s,\n", e.getKey(), e.getValue()));
}
}
s.append("},\n");
s.append("\tidToType: {");
for (Map.Entry<Integer, Word> e: wordSet){ //No rare word check here, to keep vector status
if (e.getValue().frequency >= l.RARE_WORD_THRESHOLD){
s.append(String.format("\t\t%1$s: \"%2$s\",\n",e.getKey(), e.getValue().name));
}
}
s.append("}");
s.append("\n};");
try {
corpusFile.createNewFile();
BufferedWriter out = new BufferedWriter(new FileWriter(corpusFile));
out.append(s);
out.close();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
} | public void recordCorpusData (Corpus c, Learner l){
if (!isRecording) return;
this.c=c;
Vocabulary v = c.getVocabulary();
Set<Map.Entry<String, Integer>> entrySet = v.getWordIndicesEntrySet();
Set<Map.Entry<Integer, Word>> wordSet = v.getWordEntrySet();
StringBuilder s = new StringBuilder(v.getNumOfWords()*30);
corpusFile = new File(mainPath, "corpus_"+name+".js");
s.append(String.format("corpus_%1$s = \n{\n\tname:\"%1$s\",\n", name));
s.append(String.format("\ttokenCount: %1$s,\n", c.tokenCount));
s.append(String.format("\ttypeCount: %1$s,\n", v.getNumOfWords()));
s.append(String.format("\tcommonTypes: %1$s,\n", (v.getNumOfWords()-v.countWordsBelowThreshold(l.RARE_WORD_THRESHOLD))));
s.append(String.format("\tclusterCount: %1$s,\n", l.NUMBER_OF_CLUSTERS));
s.append(String.format("\tidentityEps: %1$s,\n", l.IDENTITY_EPSILON));
s.append(String.format("\titerationCount: %1$s,\n", iterationCounter));
s.append("\ttypeToId: \n\t{\n\t");
for (Map.Entry<String, Integer> e: entrySet){
if (v.getWord(e.getValue()).frequency >= l.RARE_WORD_THRESHOLD){
s.append(String.format("\t\t\"%1$s\": %2$s,\n", e.getKey(), e.getValue()));
}
}
s.append("},\n");
s.append("\tidToType: {");
for (Map.Entry<Integer, Word> e: wordSet){
if (e.getValue().frequency >= l.RARE_WORD_THRESHOLD){
s.append(String.format("\t\t%1$s: \"%2$s\",\n",e.getKey(), e.getValue().name));
}
}
s.append("}");
s.append("\n};");
try {
corpusFile.createNewFile();
BufferedWriter out = new BufferedWriter(new FileWriter(corpusFile));
out.append(s);
out.close();
} catch (IOException e1) {
e1.printStackTrace();
}
} | public void recordcorpusdata (corpus c, learner l){ if (!isrecording) return; this.c=c; vocabulary v = c.getvocabulary(); set<map.entry<string, integer>> entryset = v.getwordindicesentryset(); set<map.entry<integer, word>> wordset = v.getwordentryset(); stringbuilder s = new stringbuilder(v.getnumofwords()*30); corpusfile = new file(mainpath, "corpus_"+name+".js"); s.append(string.format("corpus_%1$s = \n{\n\tname:\"%1$s\",\n", name)); s.append(string.format("\ttokencount: %1$s,\n", c.tokencount)); s.append(string.format("\ttypecount: %1$s,\n", v.getnumofwords())); s.append(string.format("\tcommontypes: %1$s,\n", (v.getnumofwords()-v.countwordsbelowthreshold(l.rare_word_threshold)))); s.append(string.format("\tclustercount: %1$s,\n", l.number_of_clusters)); s.append(string.format("\tidentityeps: %1$s,\n", l.identity_epsilon)); s.append(string.format("\titerationcount: %1$s,\n", iterationcounter)); s.append("\ttypetoid: \n\t{\n\t"); for (map.entry<string, integer> e: entryset){ if (v.getword(e.getvalue()).frequency >= l.rare_word_threshold){ s.append(string.format("\t\t\"%1$s\": %2$s,\n", e.getkey(), e.getvalue())); } } s.append("},\n"); s.append("\tidtotype: {"); for (map.entry<integer, word> e: wordset){ if (e.getvalue().frequency >= l.rare_word_threshold){ s.append(string.format("\t\t%1$s: \"%2$s\",\n",e.getkey(), e.getvalue().name)); } } s.append("}"); s.append("\n};"); try { corpusfile.createnewfile(); bufferedwriter out = new bufferedwriter(new filewriter(corpusfile)); out.append(s); out.close(); } catch (ioexception e1) { e1.printstacktrace(); } } | aureatesting/Syntactic | [
1,
1,
0,
0
] |
15,912 | @Test
public void testSpendInputs() throws Throwable {
ExplorerCoinsRepository repo = TestWallet.app().explorerCoinsRepo();
GateEstimateRepository estimateRepo = TestWallet.app().estimateRepo();
final int tabPos = 0;
// spend coins
mActivityTestRule.runOnUiThread(() -> {
mActivityTestRule.getActivity().setCurrentPage(tabPos);
});
// wait while balance be a 100
waitForBalance(100);
// wait while balance isn't update
waitForBalanceUpdate();
final String balanceString = String.format("%s (%s)", MinterSDK.DEFAULT_COIN, bdHuman(new BigDecimal(100.d)));
ViewInteraction actionBtn = onView(allOf(
withId(R.id.action),
withText("EXCHANGE"),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction actionUseMax = onView(allOf(
withId(R.id.action_maximum),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction amountInput = onView(allOf(withId(R.id.input_amount), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinInput = onView(allOf(withId(R.id.input_incoming_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinOutput = onView(allOf(withId(R.id.input_outgoing_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction layoutCalculation = onView(allOf(
withId(R.id.layout_calculation),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction calculationSum = onView(allOf(withId(R.id.calculation), inViewPager(tabPos, R.id.pager)));
coinOutput.check(matches(withText(balanceString)));
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
// submit disabled
actionBtn.check(matches(not(isEnabled())));
amountInput.check(matches(withInputLayoutHint(R.string.label_amount)));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText("0"));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
// empty string is valid zero
amountInput.perform(replaceText(""));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
final String amount = "1";
// valid value
amountInput.perform(replaceText(amount));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
// @TODO potentially, may break test, as coin can be created
coinInput.perform(replaceText("DOSNTEXIST"));
coinInput.check(matches(withInputLayoutError("Coin to buy not exists")));
// we can send transaction even if coin does not exists, because it can be no a true
actionBtn.check(matches(isEnabled()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinInput.perform(scrollTo(), replaceText(mExchangeCoin.getSymbol()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
onView(allOf(withId(R.id.calculation_title), inViewPager(tabPos, R.id.pager)))
.check(matches(withText(R.string.label_you_will_get_approximately)));
calculationSum.perform(scrollTo());
Response<GateResult<ExchangeSellValue>> estimate1 = estimateRepo.getCoinExchangeCurrencyToSell(MinterSDK.DEFAULT_COIN, new BigDecimal(amount), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate1.body().isOk());
// amount without commission
String expectEstimate3 = bdHuman(estimate1.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate3, mExchangeCoin.getSymbol()))));
// visible "use max" for spend tab
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
actionUseMax.perform(click());
amountInput.check(matches(withText("100")));
Response<GateResult<ExchangeSellValue>> estimate2 = estimateRepo.getCoinExchangeCurrencyToSell(MinterSDK.DEFAULT_COIN, new BigDecimal("100"), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate2.body().isOk());
// amount without commission
String expectEstimate2 = bdHuman(estimate2.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate2, mExchangeCoin.getSymbol()))));
} | @Test
public void testSpendInputs() throws Throwable {
ExplorerCoinsRepository repo = TestWallet.app().explorerCoinsRepo();
GateEstimateRepository estimateRepo = TestWallet.app().estimateRepo();
final int tabPos = 0;
mActivityTestRule.runOnUiThread(() -> {
mActivityTestRule.getActivity().setCurrentPage(tabPos);
});
waitForBalance(100);
waitForBalanceUpdate();
final String balanceString = String.format("%s (%s)", MinterSDK.DEFAULT_COIN, bdHuman(new BigDecimal(100.d)));
ViewInteraction actionBtn = onView(allOf(
withId(R.id.action),
withText("EXCHANGE"),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction actionUseMax = onView(allOf(
withId(R.id.action_maximum),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction amountInput = onView(allOf(withId(R.id.input_amount), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinInput = onView(allOf(withId(R.id.input_incoming_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinOutput = onView(allOf(withId(R.id.input_outgoing_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction layoutCalculation = onView(allOf(
withId(R.id.layout_calculation),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction calculationSum = onView(allOf(withId(R.id.calculation), inViewPager(tabPos, R.id.pager)));
coinOutput.check(matches(withText(balanceString)));
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
actionBtn.check(matches(not(isEnabled())));
amountInput.check(matches(withInputLayoutHint(R.string.label_amount)));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText("0"));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText(""));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
final String amount = "1";
amountInput.perform(replaceText(amount));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
coinInput.perform(replaceText("DOSNTEXIST"));
coinInput.check(matches(withInputLayoutError("Coin to buy not exists")));
actionBtn.check(matches(isEnabled()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinInput.perform(scrollTo(), replaceText(mExchangeCoin.getSymbol()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
onView(allOf(withId(R.id.calculation_title), inViewPager(tabPos, R.id.pager)))
.check(matches(withText(R.string.label_you_will_get_approximately)));
calculationSum.perform(scrollTo());
Response<GateResult<ExchangeSellValue>> estimate1 = estimateRepo.getCoinExchangeCurrencyToSell(MinterSDK.DEFAULT_COIN, new BigDecimal(amount), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate1.body().isOk());
String expectEstimate3 = bdHuman(estimate1.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate3, mExchangeCoin.getSymbol()))));
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
actionUseMax.perform(click());
amountInput.check(matches(withText("100")));
Response<GateResult<ExchangeSellValue>> estimate2 = estimateRepo.getCoinExchangeCurrencyToSell(MinterSDK.DEFAULT_COIN, new BigDecimal("100"), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate2.body().isOk());
String expectEstimate2 = bdHuman(estimate2.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate2, mExchangeCoin.getSymbol()))));
} | @test public void testspendinputs() throws throwable { explorercoinsrepository repo = testwallet.app().explorercoinsrepo(); gateestimaterepository estimaterepo = testwallet.app().estimaterepo(); final int tabpos = 0; mactivitytestrule.runonuithread(() -> { mactivitytestrule.getactivity().setcurrentpage(tabpos); }); waitforbalance(100); waitforbalanceupdate(); final string balancestring = string.format("%s (%s)", mintersdk.default_coin, bdhuman(new bigdecimal(100.d))); viewinteraction actionbtn = onview(allof( withid(r.id.action), withtext("exchange"), inviewpager(tabpos, r.id.pager) )); viewinteraction actionusemax = onview(allof( withid(r.id.action_maximum), inviewpager(tabpos, r.id.pager) )); viewinteraction amountinput = onview(allof(withid(r.id.input_amount), inviewpager(tabpos, r.id.pager))); viewinteraction coininput = onview(allof(withid(r.id.input_incoming_coin), inviewpager(tabpos, r.id.pager))); viewinteraction coinoutput = onview(allof(withid(r.id.input_outgoing_coin), inviewpager(tabpos, r.id.pager))); viewinteraction layoutcalculation = onview(allof( withid(r.id.layout_calculation), inviewpager(tabpos, r.id.pager) )); viewinteraction calculationsum = onview(allof(withid(r.id.calculation), inviewpager(tabpos, r.id.pager))); coinoutput.check(matches(withtext(balancestring))); actionusemax.check(matches(witheffectivevisibility(viewmatchers.visibility.visible))); actionbtn.check(matches(not(isenabled()))); amountinput.check(matches(withinputlayouthint(r.string.label_amount))); amountinput.check(matches(withinputlayouterror(null))); actionbtn.check(matches(not(isenabled()))); amountinput.perform(replacetext("0")); amountinput.check(matches(withinputlayouterror("amount must be greater than 0"))); actionbtn.check(matches(not(isenabled()))); amountinput.perform(replacetext("")); amountinput.check(matches(withinputlayouterror("amount must be greater than 0"))); actionbtn.check(matches(not(isenabled()))); final string amount = "1"; amountinput.perform(replacetext(amount)); amountinput.check(matches(withinputlayouterror(null))); actionbtn.check(matches(not(isenabled()))); coininput.perform(replacetext("dosntexist")); coininput.check(matches(withinputlayouterror("coin to buy not exists"))); actionbtn.check(matches(isenabled())); layoutcalculation.check(matches(witheffectivevisibility(viewmatchers.visibility.gone))); coininput.perform(scrollto(), replacetext(mexchangecoin.getsymbol())); layoutcalculation.check(matches(witheffectivevisibility(viewmatchers.visibility.visible))); onview(allof(withid(r.id.calculation_title), inviewpager(tabpos, r.id.pager))) .check(matches(withtext(r.string.label_you_will_get_approximately))); calculationsum.perform(scrollto()); response<gateresult<exchangesellvalue>> estimate1 = estimaterepo.getcoinexchangecurrencytosell(mintersdk.default_coin, new bigdecimal(amount), mexchangecoin.getsymbol()).execute(); asserttrue(estimate1.body().isok()); string expectestimate3 = bdhuman(estimate1.body().result.getamount()); calculationsum.check(matches(withtext(string.format("%s %s", expectestimate3, mexchangecoin.getsymbol())))); actionusemax.check(matches(witheffectivevisibility(viewmatchers.visibility.visible))); actionusemax.perform(click()); amountinput.check(matches(withtext("100"))); response<gateresult<exchangesellvalue>> estimate2 = estimaterepo.getcoinexchangecurrencytosell(mintersdk.default_coin, new bigdecimal("100"), mexchangecoin.getsymbol()).execute(); asserttrue(estimate2.body().isok()); string expectestimate2 = bdhuman(estimate2.body().result.getamount()); calculationsum.check(matches(withtext(string.format("%s %s", expectestimate2, mexchangecoin.getsymbol())))); } | bogdyak/Android-Bip-Wallet-crypttp | [
0,
0,
0,
1
] |
15,913 | @Test
public void testGetInputs() throws Throwable {
ExplorerCoinsRepository repo = TestWallet.app().explorerCoinsRepo();
GateEstimateRepository estimateRepo = TestWallet.app().estimateRepo();
final int tabPos = 1;
// spend coins
mActivityTestRule.runOnUiThread(() -> {
mActivityTestRule.getActivity().setCurrentPage(tabPos);
});
// wait while balance be a 100
waitForBalance(100);
// wait while balance isn't update
waitForBalanceUpdate();
final String balanceString = String.format("%s (%s)", MinterSDK.DEFAULT_COIN, bdHuman(new BigDecimal("100")));
ViewInteraction actionBtn = onView(allOf(
withId(R.id.action),
withText("EXCHANGE"),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction actionUseMax = onView(allOf(
withId(R.id.action_maximum),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction amountInput = onView(allOf(withId(R.id.input_amount), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinInput = onView(allOf(withId(R.id.input_incoming_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinOutput = onView(allOf(withId(R.id.input_outgoing_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction layoutCalculation = onView(allOf(
withId(R.id.layout_calculation),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction calculationSum = onView(allOf(withId(R.id.calculation), inViewPager(tabPos, R.id.pager)));
// invisible "use max" for spend tab
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinOutput.check(matches(withText(balanceString)));
// submit disabled
actionBtn.check(matches(not(isEnabled())));
amountInput.check(matches(withInputLayoutHint(R.string.label_amount)));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText("0"));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
// empty string is valid zero
amountInput.perform(replaceText(""));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
final String amount = "1";
// valid value
amountInput.perform(replaceText(amount));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
// @TODO potentially, may break test, as coin can be created
coinInput.perform(replaceText("DOSNTEXIST"));
coinInput.check(matches(withInputLayoutError("Coin to buy not exists")));
// we can send transaction even if coin does not exists, because it can be no a true
actionBtn.check(matches(isEnabled()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinInput.perform(scrollTo(), replaceText(mExchangeCoin.getSymbol()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
onView(allOf(withId(R.id.calculation_title), inViewPager(tabPos, R.id.pager)))
.check(matches(withText(R.string.label_you_will_pay_approximately)));
calculationSum.perform(scrollTo());
Response<GateResult<ExchangeBuyValue>> estimate1 = estimateRepo.getCoinExchangeCurrencyToBuy(MinterSDK.DEFAULT_COIN, new BigDecimal(amount), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate1.body().isOk());
// amount without commission
String expectEstimate1 = bdHuman(estimate1.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate1, MinterSDK.DEFAULT_COIN))));
} | @Test
public void testGetInputs() throws Throwable {
ExplorerCoinsRepository repo = TestWallet.app().explorerCoinsRepo();
GateEstimateRepository estimateRepo = TestWallet.app().estimateRepo();
final int tabPos = 1;
mActivityTestRule.runOnUiThread(() -> {
mActivityTestRule.getActivity().setCurrentPage(tabPos);
});
waitForBalance(100);
waitForBalanceUpdate();
final String balanceString = String.format("%s (%s)", MinterSDK.DEFAULT_COIN, bdHuman(new BigDecimal("100")));
ViewInteraction actionBtn = onView(allOf(
withId(R.id.action),
withText("EXCHANGE"),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction actionUseMax = onView(allOf(
withId(R.id.action_maximum),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction amountInput = onView(allOf(withId(R.id.input_amount), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinInput = onView(allOf(withId(R.id.input_incoming_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction coinOutput = onView(allOf(withId(R.id.input_outgoing_coin), inViewPager(tabPos, R.id.pager)));
ViewInteraction layoutCalculation = onView(allOf(
withId(R.id.layout_calculation),
inViewPager(tabPos, R.id.pager)
));
ViewInteraction calculationSum = onView(allOf(withId(R.id.calculation), inViewPager(tabPos, R.id.pager)));
actionUseMax.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinOutput.check(matches(withText(balanceString)));
actionBtn.check(matches(not(isEnabled())));
amountInput.check(matches(withInputLayoutHint(R.string.label_amount)));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText("0"));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
amountInput.perform(replaceText(""));
amountInput.check(matches(withInputLayoutError("Amount must be greater than 0")));
actionBtn.check(matches(not(isEnabled())));
final String amount = "1";
amountInput.perform(replaceText(amount));
amountInput.check(matches(withInputLayoutError(null)));
actionBtn.check(matches(not(isEnabled())));
coinInput.perform(replaceText("DOSNTEXIST"));
coinInput.check(matches(withInputLayoutError("Coin to buy not exists")));
actionBtn.check(matches(isEnabled()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
coinInput.perform(scrollTo(), replaceText(mExchangeCoin.getSymbol()));
layoutCalculation.check(matches(withEffectiveVisibility(ViewMatchers.Visibility.VISIBLE)));
onView(allOf(withId(R.id.calculation_title), inViewPager(tabPos, R.id.pager)))
.check(matches(withText(R.string.label_you_will_pay_approximately)));
calculationSum.perform(scrollTo());
Response<GateResult<ExchangeBuyValue>> estimate1 = estimateRepo.getCoinExchangeCurrencyToBuy(MinterSDK.DEFAULT_COIN, new BigDecimal(amount), mExchangeCoin.getSymbol()).execute();
assertTrue(estimate1.body().isOk());
String expectEstimate1 = bdHuman(estimate1.body().result.getAmount());
calculationSum.check(matches(withText(String.format("%s %s", expectEstimate1, MinterSDK.DEFAULT_COIN))));
} | @test public void testgetinputs() throws throwable { explorercoinsrepository repo = testwallet.app().explorercoinsrepo(); gateestimaterepository estimaterepo = testwallet.app().estimaterepo(); final int tabpos = 1; mactivitytestrule.runonuithread(() -> { mactivitytestrule.getactivity().setcurrentpage(tabpos); }); waitforbalance(100); waitforbalanceupdate(); final string balancestring = string.format("%s (%s)", mintersdk.default_coin, bdhuman(new bigdecimal("100"))); viewinteraction actionbtn = onview(allof( withid(r.id.action), withtext("exchange"), inviewpager(tabpos, r.id.pager) )); viewinteraction actionusemax = onview(allof( withid(r.id.action_maximum), inviewpager(tabpos, r.id.pager) )); viewinteraction amountinput = onview(allof(withid(r.id.input_amount), inviewpager(tabpos, r.id.pager))); viewinteraction coininput = onview(allof(withid(r.id.input_incoming_coin), inviewpager(tabpos, r.id.pager))); viewinteraction coinoutput = onview(allof(withid(r.id.input_outgoing_coin), inviewpager(tabpos, r.id.pager))); viewinteraction layoutcalculation = onview(allof( withid(r.id.layout_calculation), inviewpager(tabpos, r.id.pager) )); viewinteraction calculationsum = onview(allof(withid(r.id.calculation), inviewpager(tabpos, r.id.pager))); actionusemax.check(matches(witheffectivevisibility(viewmatchers.visibility.gone))); coinoutput.check(matches(withtext(balancestring))); actionbtn.check(matches(not(isenabled()))); amountinput.check(matches(withinputlayouthint(r.string.label_amount))); amountinput.check(matches(withinputlayouterror(null))); actionbtn.check(matches(not(isenabled()))); amountinput.perform(replacetext("0")); amountinput.check(matches(withinputlayouterror("amount must be greater than 0"))); actionbtn.check(matches(not(isenabled()))); amountinput.perform(replacetext("")); amountinput.check(matches(withinputlayouterror("amount must be greater than 0"))); actionbtn.check(matches(not(isenabled()))); final string amount = "1"; amountinput.perform(replacetext(amount)); amountinput.check(matches(withinputlayouterror(null))); actionbtn.check(matches(not(isenabled()))); coininput.perform(replacetext("dosntexist")); coininput.check(matches(withinputlayouterror("coin to buy not exists"))); actionbtn.check(matches(isenabled())); layoutcalculation.check(matches(witheffectivevisibility(viewmatchers.visibility.gone))); coininput.perform(scrollto(), replacetext(mexchangecoin.getsymbol())); layoutcalculation.check(matches(witheffectivevisibility(viewmatchers.visibility.visible))); onview(allof(withid(r.id.calculation_title), inviewpager(tabpos, r.id.pager))) .check(matches(withtext(r.string.label_you_will_pay_approximately))); calculationsum.perform(scrollto()); response<gateresult<exchangebuyvalue>> estimate1 = estimaterepo.getcoinexchangecurrencytobuy(mintersdk.default_coin, new bigdecimal(amount), mexchangecoin.getsymbol()).execute(); asserttrue(estimate1.body().isok()); string expectestimate1 = bdhuman(estimate1.body().result.getamount()); calculationsum.check(matches(withtext(string.format("%s %s", expectestimate1, mintersdk.default_coin)))); } | bogdyak/Android-Bip-Wallet-crypttp | [
0,
0,
0,
1
] |
24,399 | protected List<LP2Rule> createContextStartRulesForStartRule(final LP2Rule aStartRule) {
List<LP2Rule> result = new ArrayList<LP2Rule>();
// TODO make all other tags contextual tags here. for now we take only
// the counterpart
// tag of the current learning process: (opening/closing tags)
LP2RuleItem ctxItem = new LP2RuleItem();
MLLP2ContextConstraint ctxConstraint = new MLLP2ContextConstraint(
slotMaximumTokenCountMap.get(aStartRule.getTarget().getSingleSlotRawTypeName()),
aStartRule);
ctxItem.setContextConstraint(ctxConstraint);
LP2Rule ctxStartRule = new LP2Rule(this, aStartRule.getTarget());
ctxStartRule.setIsContextualRule(true);
if (aStartRule.getTarget().type == MLTargetType.SINGLE_LEFT_BOUNDARY)
ctxStartRule.addPostFillerItem(ctxItem);
else
ctxStartRule.addPreFillerItem(ctxItem);
result.add(ctxStartRule);
return result;
} | protected List<LP2Rule> createContextStartRulesForStartRule(final LP2Rule aStartRule) {
List<LP2Rule> result = new ArrayList<LP2Rule>();
LP2RuleItem ctxItem = new LP2RuleItem();
MLLP2ContextConstraint ctxConstraint = new MLLP2ContextConstraint(
slotMaximumTokenCountMap.get(aStartRule.getTarget().getSingleSlotRawTypeName()),
aStartRule);
ctxItem.setContextConstraint(ctxConstraint);
LP2Rule ctxStartRule = new LP2Rule(this, aStartRule.getTarget());
ctxStartRule.setIsContextualRule(true);
if (aStartRule.getTarget().type == MLTargetType.SINGLE_LEFT_BOUNDARY)
ctxStartRule.addPostFillerItem(ctxItem);
else
ctxStartRule.addPreFillerItem(ctxItem);
result.add(ctxStartRule);
return result;
} | protected list<lp2rule> createcontextstartrulesforstartrule(final lp2rule astartrule) { list<lp2rule> result = new arraylist<lp2rule>(); lp2ruleitem ctxitem = new lp2ruleitem(); mllp2contextconstraint ctxconstraint = new mllp2contextconstraint( slotmaximumtokencountmap.get(astartrule.gettarget().getsingleslotrawtypename()), astartrule); ctxitem.setcontextconstraint(ctxconstraint); lp2rule ctxstartrule = new lp2rule(this, astartrule.gettarget()); ctxstartrule.setiscontextualrule(true); if (astartrule.gettarget().type == mltargettype.single_left_boundary) ctxstartrule.addpostfilleritem(ctxitem); else ctxstartrule.addprefilleritem(ctxitem); result.add(ctxstartrule); return result; } | apache/uima-ruta | [
0,
1,
0,
0
] |
24,503 | @Override
public void runOpMode(){
//When the robot begins, it is in initialized state
//In this state, initialize the robot sensors and actuators
//and wait for the event: driver pushes START
//************************************************************
//*** INITIALIZE THE ROBOT
//************************************************************
autonParameters = AutonParameters.DEBUG_TWO_WHEEL;
autonParameters.setSpeed(Speed.FAST);
autonParameters.getSpeed().setK_i(0.3);
robot = new Robot(Pose.PresetPose.INNER_START_LINE, Alliance.RED, autonParameters);
targetZone = new TargetZone(robot.getAlliance(),TargetZone.Zone.B);
launchLine = new LaunchLine();
autonState = AutonState.INITIALIZE;
stateStopWatch = new StopWatch();
//Initialize the wheels
robot.initializeStandardDriveWheels(hardwareMap);
//Initialize the imu
robot.initializeImu(hardwareMap);
//Initialize the color sensors
robot.initializeColorSensors(hardwareMap);
//Initialize the digitalTouch sensors
robot.initializeEbotsDigitalTouches(hardwareMap);
//Initialize the LED lights
robot.initializeRevBlinkinLedDriver(hardwareMap);
//Initialize the Rev2mDistance Sensors
robot.initializeEbotsRev2mDistanceSensors(hardwareMap);
//Prepare the expansion hubs for bulk reads
robot.initializeExpansionHubsForBulkRead(hardwareMap);
telemetry.addLine(robot.getActualPose().toString());
telemetry.addLine("Initialization Complete!");
telemetry.update();
Pose newPose = robot.getActualPose();
newPose.getX();
robot.setTargetPose(newPose);
waitForStart();
telemetry.clearAll();
long stateTimeLimit = 0L;
while(opModeIsActive()){
switch (autonState) {
case INITIALIZE:
if (this.isStarted()) { //EVENT TO TRIGGER NEXT STATE IS PUSHING START BUTTON
//Perform the transitional actions
//Initialize the encoders
robot.initializeEncoderTrackers(autonParameters);
//Set the target pose (where the robot should drive to + heading)
Pose targetPose = new Pose(targetZone.getFieldPosition(), 0);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
//Set the new state
autonState = AutonState.MOVE_TO_TARGET_ZONE;
standardStateTransitionActions();
} else {
telemetry.addLine("Stuck in INITIALIZED state, something is wrong");
telemetry.update();
}
break;
case MOVE_TO_TARGET_ZONE:
if (robot.getEbotsMotionController().isTargetPoseReached(robot) //check if trigger event occurred
| stateStopWatch.getElapsedTimeMillis() > stateTimeLimit) {
//Perform transitional actions
robot.stop();
//Set the new state
autonState = AutonState.PLACE_WOBBLE_GOAL;
standardStateTransitionActions();
stateTimeLimit = 5000; //set a timelimit of 5 seconds for next state
} else {
//Perform the state actions
robot.getEbotsMotionController().moveToTargetPose(robot, stateStopWatch);
//Report telemetry
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
case PLACE_WOBBLE_GOAL:
if(stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){ //trigger event?
//Perform transitional actions
//TBD code to fold Wobble Arm
//Create a new target pose on the launch line in center of field
double xCoord = launchLine.getX()-(robot.getSizeCoordinate(CsysDirection.X)/2);
Pose targetPose = new Pose(xCoord, robot.getActualPose().getY(), 0);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
//Set the new state
autonState = AutonState.MOVE_TO_LAUNCH_LINE;
standardStateTransitionActions();
} else { //perform the state actions
//TBD code to place the wobble goal
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString() + " time limit " + stateTimeLimit);
}
break;
case MOVE_TO_LAUNCH_LINE:
if(robot.getEbotsMotionController().isTargetPoseReached(robot) //trigger event?
| stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){ //or timed out?
//Perform transitional actions
robot.stop();
//TBD spin up the ring launcher
//Set the new state
autonState = AutonState.SHOOT_POWER_SHOTS;
standardStateTransitionActions();
stateTimeLimit = 5000L;
} else { //perform the state actions
robot.getEbotsMotionController().moveToTargetPose(robot,stateStopWatch);
//Report telemetry
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
case SHOOT_POWER_SHOTS:
if(stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){ //trigger event?
robot.stop();
//Create a new target pose on the launch line in center of field
Pose targetPose = new Pose(launchLine.getX(), robot.getActualPose().getY(), 180);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
//Set the new state
autonState = AutonState.PARK_ON_LAUNCH_LINE;
standardStateTransitionActions();
} else { //perform state actions
//TBD action to launch rings at powershots
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString() + " time limit " + stateTimeLimit);
}
break;
case PARK_ON_LAUNCH_LINE:
if(!opModeIsActive()){ //check for trigger event
robot.stop();
} else { //perform state actions
robot.getEbotsMotionController().moveToTargetPose(robot, stateStopWatch);
//Report telemetry
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
}
}
//Exit opmode
robot.stop();
//State debug info
// if (debugOn) {
// Log.d(logTag, "Transitioning out of state " + autonState.toString());
// if (robot.getEbotsMotionController().isTargetPoseReached(robot)) {
// Log.d(logTag, "Pose Achieved in " + format("%.2f", stateStopWatch.getElapsedTimeSeconds()));
// } else {
// Log.d(logTag, "Failed to reach target, timed out!!! " + robot.getPoseError().toString());
// }
// }
} | @Override
public void runOpMode(){
autonParameters = AutonParameters.DEBUG_TWO_WHEEL;
autonParameters.setSpeed(Speed.FAST);
autonParameters.getSpeed().setK_i(0.3);
robot = new Robot(Pose.PresetPose.INNER_START_LINE, Alliance.RED, autonParameters);
targetZone = new TargetZone(robot.getAlliance(),TargetZone.Zone.B);
launchLine = new LaunchLine();
autonState = AutonState.INITIALIZE;
stateStopWatch = new StopWatch();
robot.initializeStandardDriveWheels(hardwareMap);
robot.initializeImu(hardwareMap);
robot.initializeColorSensors(hardwareMap);
robot.initializeEbotsDigitalTouches(hardwareMap);
robot.initializeRevBlinkinLedDriver(hardwareMap);
robot.initializeEbotsRev2mDistanceSensors(hardwareMap);
robot.initializeExpansionHubsForBulkRead(hardwareMap);
telemetry.addLine(robot.getActualPose().toString());
telemetry.addLine("Initialization Complete!");
telemetry.update();
Pose newPose = robot.getActualPose();
newPose.getX();
robot.setTargetPose(newPose);
waitForStart();
telemetry.clearAll();
long stateTimeLimit = 0L;
while(opModeIsActive()){
switch (autonState) {
case INITIALIZE:
if (this.isStarted()) {
robot.initializeEncoderTrackers(autonParameters);
Pose targetPose = new Pose(targetZone.getFieldPosition(), 0);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
autonState = AutonState.MOVE_TO_TARGET_ZONE;
standardStateTransitionActions();
} else {
telemetry.addLine("Stuck in INITIALIZED state, something is wrong");
telemetry.update();
}
break;
case MOVE_TO_TARGET_ZONE:
if (robot.getEbotsMotionController().isTargetPoseReached(robot)
| stateStopWatch.getElapsedTimeMillis() > stateTimeLimit) {
robot.stop();
autonState = AutonState.PLACE_WOBBLE_GOAL;
standardStateTransitionActions();
stateTimeLimit = 5000;
} else {
robot.getEbotsMotionController().moveToTargetPose(robot, stateStopWatch);
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
case PLACE_WOBBLE_GOAL:
if(stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){
double xCoord = launchLine.getX()-(robot.getSizeCoordinate(CsysDirection.X)/2);
Pose targetPose = new Pose(xCoord, robot.getActualPose().getY(), 0);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
autonState = AutonState.MOVE_TO_LAUNCH_LINE;
standardStateTransitionActions();
} else {
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString() + " time limit " + stateTimeLimit);
}
break;
case MOVE_TO_LAUNCH_LINE:
if(robot.getEbotsMotionController().isTargetPoseReached(robot)
| stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){
robot.stop();
autonState = AutonState.SHOOT_POWER_SHOTS;
standardStateTransitionActions();
stateTimeLimit = 5000L;
} else {
robot.getEbotsMotionController().moveToTargetPose(robot,stateStopWatch);
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
case SHOOT_POWER_SHOTS:
if(stateStopWatch.getElapsedTimeMillis() > stateTimeLimit){
robot.stop();
Pose targetPose = new Pose(launchLine.getX(), robot.getActualPose().getY(), 180);
robot.setTargetPose(targetPose);
stateTimeLimit = robot.getEbotsMotionController().calculateTimeLimitMillis(robot);
autonState = AutonState.PARK_ON_LAUNCH_LINE;
standardStateTransitionActions();
} else {
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString() + " time limit " + stateTimeLimit);
}
break;
case PARK_ON_LAUNCH_LINE:
if(!opModeIsActive()){
robot.stop();
} else {
robot.getEbotsMotionController().moveToTargetPose(robot, stateStopWatch);
telemetry.addData("Current State", autonState.toString());
telemetry.addLine(stateStopWatch.toString(robot.getEbotsMotionController().getLoopCount()));
telemetry.addData("Actual Pose: ", robot.getActualPose().toString());
telemetry.addData("Target Pose: ", robot.getTargetPose().toString());
telemetry.addData("Error: ", robot.getPoseError().toString());
telemetry.update();
}
break;
}
}
robot.stop();
} | @override public void runopmode(){ autonparameters = autonparameters.debug_two_wheel; autonparameters.setspeed(speed.fast); autonparameters.getspeed().setk_i(0.3); robot = new robot(pose.presetpose.inner_start_line, alliance.red, autonparameters); targetzone = new targetzone(robot.getalliance(),targetzone.zone.b); launchline = new launchline(); autonstate = autonstate.initialize; statestopwatch = new stopwatch(); robot.initializestandarddrivewheels(hardwaremap); robot.initializeimu(hardwaremap); robot.initializecolorsensors(hardwaremap); robot.initializeebotsdigitaltouches(hardwaremap); robot.initializerevblinkinleddriver(hardwaremap); robot.initializeebotsrev2mdistancesensors(hardwaremap); robot.initializeexpansionhubsforbulkread(hardwaremap); telemetry.addline(robot.getactualpose().tostring()); telemetry.addline("initialization complete!"); telemetry.update(); pose newpose = robot.getactualpose(); newpose.getx(); robot.settargetpose(newpose); waitforstart(); telemetry.clearall(); long statetimelimit = 0l; while(opmodeisactive()){ switch (autonstate) { case initialize: if (this.isstarted()) { robot.initializeencodertrackers(autonparameters); pose targetpose = new pose(targetzone.getfieldposition(), 0); robot.settargetpose(targetpose); statetimelimit = robot.getebotsmotioncontroller().calculatetimelimitmillis(robot); autonstate = autonstate.move_to_target_zone; standardstatetransitionactions(); } else { telemetry.addline("stuck in initialized state, something is wrong"); telemetry.update(); } break; case move_to_target_zone: if (robot.getebotsmotioncontroller().istargetposereached(robot) | statestopwatch.getelapsedtimemillis() > statetimelimit) { robot.stop(); autonstate = autonstate.place_wobble_goal; standardstatetransitionactions(); statetimelimit = 5000; } else { robot.getebotsmotioncontroller().movetotargetpose(robot, statestopwatch); telemetry.adddata("current state", autonstate.tostring()); telemetry.addline(statestopwatch.tostring(robot.getebotsmotioncontroller().getloopcount())); telemetry.adddata("actual pose: ", robot.getactualpose().tostring()); telemetry.adddata("target pose: ", robot.gettargetpose().tostring()); telemetry.adddata("error: ", robot.getposeerror().tostring()); telemetry.update(); } break; case place_wobble_goal: if(statestopwatch.getelapsedtimemillis() > statetimelimit){ double xcoord = launchline.getx()-(robot.getsizecoordinate(csysdirection.x)/2); pose targetpose = new pose(xcoord, robot.getactualpose().gety(), 0); robot.settargetpose(targetpose); statetimelimit = robot.getebotsmotioncontroller().calculatetimelimitmillis(robot); autonstate = autonstate.move_to_launch_line; standardstatetransitionactions(); } else { telemetry.adddata("current state", autonstate.tostring()); telemetry.addline(statestopwatch.tostring() + " time limit " + statetimelimit); } break; case move_to_launch_line: if(robot.getebotsmotioncontroller().istargetposereached(robot) | statestopwatch.getelapsedtimemillis() > statetimelimit){ robot.stop(); autonstate = autonstate.shoot_power_shots; standardstatetransitionactions(); statetimelimit = 5000l; } else { robot.getebotsmotioncontroller().movetotargetpose(robot,statestopwatch); telemetry.adddata("current state", autonstate.tostring()); telemetry.addline(statestopwatch.tostring(robot.getebotsmotioncontroller().getloopcount())); telemetry.adddata("actual pose: ", robot.getactualpose().tostring()); telemetry.adddata("target pose: ", robot.gettargetpose().tostring()); telemetry.adddata("error: ", robot.getposeerror().tostring()); telemetry.update(); } break; case shoot_power_shots: if(statestopwatch.getelapsedtimemillis() > statetimelimit){ robot.stop(); pose targetpose = new pose(launchline.getx(), robot.getactualpose().gety(), 180); robot.settargetpose(targetpose); statetimelimit = robot.getebotsmotioncontroller().calculatetimelimitmillis(robot); autonstate = autonstate.park_on_launch_line; standardstatetransitionactions(); } else { telemetry.adddata("current state", autonstate.tostring()); telemetry.addline(statestopwatch.tostring() + " time limit " + statetimelimit); } break; case park_on_launch_line: if(!opmodeisactive()){ robot.stop(); } else { robot.getebotsmotioncontroller().movetotargetpose(robot, statestopwatch); telemetry.adddata("current state", autonstate.tostring()); telemetry.addline(statestopwatch.tostring(robot.getebotsmotioncontroller().getloopcount())); telemetry.adddata("actual pose: ", robot.getactualpose().tostring()); telemetry.adddata("target pose: ", robot.gettargetpose().tostring()); telemetry.adddata("error: ", robot.getposeerror().tostring()); telemetry.update(); } break; } } robot.stop(); } | bignaczak/eBots2020_V2 | [
0,
1,
0,
0
] |
28 | @Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
long startTime = System.currentTimeMillis();
FeatureExtractor fe = new NYTEntitySalienceFeatureExtractor();
List<EntityInstance> entityInstances;
try {
entityInstances = fe.getEntityInstances(jCas, TrainingSettings.FeatureExtractor.ENTITY_SALIENCE);
final int featureVectorSize = FeatureSetFactory.createFeatureSet(TrainingSettings.FeatureExtractor.ENTITY_SALIENCE).getFeatureVectorSize();
//TODO: For each model create separate implementation.
RandomForestClassificationModel rfm = (RandomForestClassificationModel)trainingModel.stages()[2];
for(EntityInstance ei : entityInstances) {
Vector vei = FeatureValueInstanceUtils.convertToSparkMLVector(ei, featureVectorSize);
double label = rfm.predict(vei);
Vector probabilities = rfm.predictProbability(vei);
double salience = probabilities.toArray()[1];
SalientEntity salientEntity = new SalientEntity(jCas, 0, 0);
salientEntity.setLabel(label);
salientEntity.setID(ei.getEntityId());
salientEntity.setSalience(salience);
salientEntity.addToIndexes();
}
long endTime = System.currentTimeMillis() - startTime;
logger.debug("Annotating salient entities finished in {}ms.", endTime);
} catch (Exception e) {
throw new AnalysisEngineProcessException(e);
}
} | @Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
long startTime = System.currentTimeMillis();
FeatureExtractor fe = new NYTEntitySalienceFeatureExtractor();
List<EntityInstance> entityInstances;
try {
entityInstances = fe.getEntityInstances(jCas, TrainingSettings.FeatureExtractor.ENTITY_SALIENCE);
final int featureVectorSize = FeatureSetFactory.createFeatureSet(TrainingSettings.FeatureExtractor.ENTITY_SALIENCE).getFeatureVectorSize();
RandomForestClassificationModel rfm = (RandomForestClassificationModel)trainingModel.stages()[2];
for(EntityInstance ei : entityInstances) {
Vector vei = FeatureValueInstanceUtils.convertToSparkMLVector(ei, featureVectorSize);
double label = rfm.predict(vei);
Vector probabilities = rfm.predictProbability(vei);
double salience = probabilities.toArray()[1];
SalientEntity salientEntity = new SalientEntity(jCas, 0, 0);
salientEntity.setLabel(label);
salientEntity.setID(ei.getEntityId());
salientEntity.setSalience(salience);
salientEntity.addToIndexes();
}
long endTime = System.currentTimeMillis() - startTime;
logger.debug("Annotating salient entities finished in {}ms.", endTime);
} catch (Exception e) {
throw new AnalysisEngineProcessException(e);
}
} | @override public void process(jcas jcas) throws analysisengineprocessexception { long starttime = system.currenttimemillis(); featureextractor fe = new nytentitysaliencefeatureextractor(); list<entityinstance> entityinstances; try { entityinstances = fe.getentityinstances(jcas, trainingsettings.featureextractor.entity_salience); final int featurevectorsize = featuresetfactory.createfeatureset(trainingsettings.featureextractor.entity_salience).getfeaturevectorsize(); randomforestclassificationmodel rfm = (randomforestclassificationmodel)trainingmodel.stages()[2]; for(entityinstance ei : entityinstances) { vector vei = featurevalueinstanceutils.converttosparkmlvector(ei, featurevectorsize); double label = rfm.predict(vei); vector probabilities = rfm.predictprobability(vei); double salience = probabilities.toarray()[1]; saliententity saliententity = new saliententity(jcas, 0, 0); saliententity.setlabel(label); saliententity.setid(ei.getentityid()); saliententity.setsalience(salience); saliententity.addtoindexes(); } long endtime = system.currenttimemillis() - starttime; logger.debug("annotating salient entities finished in {}ms.", endtime); } catch (exception e) { throw new analysisengineprocessexception(e); } } | dmilcevski/ambiverse-nlu | [
0,
1,
0,
0
] |
32 | private void generateArttribute() {
String temp = "";
for (String s : connectedElements)
temp = temp + s + " ";
// TODO: check here, may be necessary to remove the last space.
linkingResourceElement.setAttribute(
"connectedResourceContainers_LinkingResource", temp);
} | private void generateArttribute() {
String temp = "";
for (String s : connectedElements)
temp = temp + s + " ";
linkingResourceElement.setAttribute(
"connectedResourceContainers_LinkingResource", temp);
} | private void generatearttribute() { string temp = ""; for (string s : connectedelements) temp = temp + s + " "; linkingresourceelement.setattribute( "connectedresourcecontainers_linkingresource", temp); } | deib-polimi/modaclouds-space4cloud | [
1,
0,
0,
0
] |
100 | @Override
public boolean hasRole(final PrincipalCollection principal, final String roleIdentifier) {
// TODO Auto-generated method stub
// return super.hasRole(principal, roleIdentifier);
for (final Object p : principal.fromRealm(REALM_NAME)) {
if (p instanceof GenericPrincipal) {
final GenericPrincipal gp = (GenericPrincipal) p;
for (final String r : gp.getRoles()) {
if (r.equals(roleIdentifier)) {
return true;
}
}
}
}
return false;
} | @Override
public boolean hasRole(final PrincipalCollection principal, final String roleIdentifier) {
for (final Object p : principal.fromRealm(REALM_NAME)) {
if (p instanceof GenericPrincipal) {
final GenericPrincipal gp = (GenericPrincipal) p;
for (final String r : gp.getRoles()) {
if (r.equals(roleIdentifier)) {
return true;
}
}
}
}
return false;
} | @override public boolean hasrole(final principalcollection principal, final string roleidentifier) { for (final object p : principal.fromrealm(realm_name)) { if (p instanceof genericprincipal) { final genericprincipal gp = (genericprincipal) p; for (final string r : gp.getroles()) { if (r.equals(roleidentifier)) { return true; } } } } return false; } | danieljue/graphene | [
1,
0,
0,
0
] |
8,408 | private synchronized static void initializeImplClasses() {
log.trace("exec");
// TODO load all expected impl classes, allowing fail fast rather than waiting for user to hit a certain use case
} | private synchronized static void initializeImplClasses() {
log.trace("exec");
} | private synchronized static void initializeimplclasses() { log.trace("exec"); } | esasiela/hc-log4tri | [
0,
1,
0,
0
] |
8,468 | private ProgressTracker createProgressTracker() {
ReadOperation readOperation;
RemoteGrpcPortWriteOperation grpcWriteOperation;
RegisterAndProcessBundleOperation bundleProcessOperation;
try {
readOperation = getReadOperation();
} catch (Exception exn) {
readOperation = null;
LOG.info("Unable to get read operation.", exn);
return new NullProgressTracker();
}
// If there is a exactly one of each of RemoteGrpcPortWriteOperation and
// RegisterAndProcessBundleOperation we know they have the right topology.
try {
grpcWriteOperation =
Iterables.getOnlyElement(
Iterables.filter(operations, RemoteGrpcPortWriteOperation.class));
bundleProcessOperation =
Iterables.getOnlyElement(
Iterables.filter(operations, RegisterAndProcessBundleOperation.class));
} catch (IllegalArgumentException | NoSuchElementException exn) {
// TODO: Handle more than one sdk worker processing a single bundle.
grpcWriteOperation = null;
bundleProcessOperation = null;
LOG.debug("Does not have exactly one grpcWRite and bundleProcess operation.", exn);
}
if (grpcWriteOperation != null && bundleProcessOperation != null) {
return new SingularProcessBundleProgressTracker(
readOperation, grpcWriteOperation, bundleProcessOperation);
} else {
return new ReadOperationProgressTracker(readOperation);
}
} | private ProgressTracker createProgressTracker() {
ReadOperation readOperation;
RemoteGrpcPortWriteOperation grpcWriteOperation;
RegisterAndProcessBundleOperation bundleProcessOperation;
try {
readOperation = getReadOperation();
} catch (Exception exn) {
readOperation = null;
LOG.info("Unable to get read operation.", exn);
return new NullProgressTracker();
}
try {
grpcWriteOperation =
Iterables.getOnlyElement(
Iterables.filter(operations, RemoteGrpcPortWriteOperation.class));
bundleProcessOperation =
Iterables.getOnlyElement(
Iterables.filter(operations, RegisterAndProcessBundleOperation.class));
} catch (IllegalArgumentException | NoSuchElementException exn) {
grpcWriteOperation = null;
bundleProcessOperation = null;
LOG.debug("Does not have exactly one grpcWRite and bundleProcess operation.", exn);
}
if (grpcWriteOperation != null && bundleProcessOperation != null) {
return new SingularProcessBundleProgressTracker(
readOperation, grpcWriteOperation, bundleProcessOperation);
} else {
return new ReadOperationProgressTracker(readOperation);
}
} | private progresstracker createprogresstracker() { readoperation readoperation; remotegrpcportwriteoperation grpcwriteoperation; registerandprocessbundleoperation bundleprocessoperation; try { readoperation = getreadoperation(); } catch (exception exn) { readoperation = null; log.info("unable to get read operation.", exn); return new nullprogresstracker(); } try { grpcwriteoperation = iterables.getonlyelement( iterables.filter(operations, remotegrpcportwriteoperation.class)); bundleprocessoperation = iterables.getonlyelement( iterables.filter(operations, registerandprocessbundleoperation.class)); } catch (illegalargumentexception | nosuchelementexception exn) { grpcwriteoperation = null; bundleprocessoperation = null; log.debug("does not have exactly one grpcwrite and bundleprocess operation.", exn); } if (grpcwriteoperation != null && bundleprocessoperation != null) { return new singularprocessbundleprogresstracker( readoperation, grpcwriteoperation, bundleprocessoperation); } else { return new readoperationprogresstracker(readoperation); } } | elwinarens/beam | [
0,
1,
0,
0
] |
16,671 | @Override
public void execute() {
double y = -Constants.joystick.getY();
double z = Constants.joystick.getZ();
//TODO - Could we also check if the elevator is up and limit speed?
if(y < Constants.REVERSE_MAX_SPEED) {
y = Constants.REVERSE_MAX_SPEED;
}
if(z < Constants.LEFT_MAX_SPEED ) {
z = Constants.LEFT_MAX_SPEED;
} else if(z > Constants.RIGHT_MAX_SPEED){
z = Constants.RIGHT_MAX_SPEED;
}
driveSubsystem.getDiffDrive().arcadeDrive(y, z);
} | @Override
public void execute() {
double y = -Constants.joystick.getY();
double z = Constants.joystick.getZ();
if(y < Constants.REVERSE_MAX_SPEED) {
y = Constants.REVERSE_MAX_SPEED;
}
if(z < Constants.LEFT_MAX_SPEED ) {
z = Constants.LEFT_MAX_SPEED;
} else if(z > Constants.RIGHT_MAX_SPEED){
z = Constants.RIGHT_MAX_SPEED;
}
driveSubsystem.getDiffDrive().arcadeDrive(y, z);
} | @override public void execute() { double y = -constants.joystick.gety(); double z = constants.joystick.getz(); if(y < constants.reverse_max_speed) { y = constants.reverse_max_speed; } if(z < constants.left_max_speed ) { z = constants.left_max_speed; } else if(z > constants.right_max_speed){ z = constants.right_max_speed; } drivesubsystem.getdiffdrive().arcadedrive(y, z); } | frc5826/2022-production | [
0,
1,
0,
0
] |
8,547 | private void deleteInternal(List<FileInfo> itemsToDelete, List<FileInfo> bucketsToDelete)
throws IOException {
// TODO(user): We might need to separate out children into separate batches from parents to
// avoid deleting a parent before somehow failing to delete a child.
// Delete children before their parents.
//
// Note: we modify the input list, which is ok for current usage.
// We should make a copy in case that changes in future.
itemsToDelete.sort(FILE_INFO_PATH_COMPARATOR.reversed());
if (!itemsToDelete.isEmpty()) {
List<StorageResourceId> objectsToDelete = new ArrayList<>(itemsToDelete.size());
for (FileInfo fileInfo : itemsToDelete) {
// TODO(b/110833109): populate generation ID in StorageResourceId when listing infos?
objectsToDelete.add(
new StorageResourceId(
fileInfo.getItemInfo().getBucketName(),
fileInfo.getItemInfo().getObjectName(),
fileInfo.getItemInfo().getContentGeneration()));
}
gcs.deleteObjects(objectsToDelete);
}
if (!bucketsToDelete.isEmpty()) {
List<String> bucketNames = new ArrayList<>(bucketsToDelete.size());
for (FileInfo bucketInfo : bucketsToDelete) {
bucketNames.add(bucketInfo.getItemInfo().getResourceId().getBucketName());
}
if (options.isBucketDeleteEnabled()) {
gcs.deleteBuckets(bucketNames);
} else {
logger.atInfo().log(
"Skipping deletion of buckets because enableBucketDelete is false: %s", bucketNames);
}
}
} | private void deleteInternal(List<FileInfo> itemsToDelete, List<FileInfo> bucketsToDelete)
throws IOException {
itemsToDelete.sort(FILE_INFO_PATH_COMPARATOR.reversed());
if (!itemsToDelete.isEmpty()) {
List<StorageResourceId> objectsToDelete = new ArrayList<>(itemsToDelete.size());
for (FileInfo fileInfo : itemsToDelete) {
objectsToDelete.add(
new StorageResourceId(
fileInfo.getItemInfo().getBucketName(),
fileInfo.getItemInfo().getObjectName(),
fileInfo.getItemInfo().getContentGeneration()));
}
gcs.deleteObjects(objectsToDelete);
}
if (!bucketsToDelete.isEmpty()) {
List<String> bucketNames = new ArrayList<>(bucketsToDelete.size());
for (FileInfo bucketInfo : bucketsToDelete) {
bucketNames.add(bucketInfo.getItemInfo().getResourceId().getBucketName());
}
if (options.isBucketDeleteEnabled()) {
gcs.deleteBuckets(bucketNames);
} else {
logger.atInfo().log(
"Skipping deletion of buckets because enableBucketDelete is false: %s", bucketNames);
}
}
} | private void deleteinternal(list<fileinfo> itemstodelete, list<fileinfo> bucketstodelete) throws ioexception { itemstodelete.sort(file_info_path_comparator.reversed()); if (!itemstodelete.isempty()) { list<storageresourceid> objectstodelete = new arraylist<>(itemstodelete.size()); for (fileinfo fileinfo : itemstodelete) { objectstodelete.add( new storageresourceid( fileinfo.getiteminfo().getbucketname(), fileinfo.getiteminfo().getobjectname(), fileinfo.getiteminfo().getcontentgeneration())); } gcs.deleteobjects(objectstodelete); } if (!bucketstodelete.isempty()) { list<string> bucketnames = new arraylist<>(bucketstodelete.size()); for (fileinfo bucketinfo : bucketstodelete) { bucketnames.add(bucketinfo.getiteminfo().getresourceid().getbucketname()); } if (options.isbucketdeleteenabled()) { gcs.deletebuckets(bucketnames); } else { logger.atinfo().log( "skipping deletion of buckets because enablebucketdelete is false: %s", bucketnames); } } } | danielxjd/hadoop-connectors | [
1,
1,
0,
0
] |
16,844 | public void testEmpty() throws IOException {
final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG);
testCase(
stats("_name").field(ft.name()),
iw -> {},
stats -> {
assertEquals(0d, stats.getCount(), 0);
assertEquals(0d, stats.getSum(), 0);
assertEquals(Float.NaN, stats.getAvg(), 0);
assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0);
assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0);
assertFalse(AggregationInspectionHelper.hasValue(stats));
},
singleton(ft)
);
} | public void testEmpty() throws IOException {
final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG);
testCase(
stats("_name").field(ft.name()),
iw -> {},
stats -> {
assertEquals(0d, stats.getCount(), 0);
assertEquals(0d, stats.getSum(), 0);
assertEquals(Float.NaN, stats.getAvg(), 0);
assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0);
assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0);
assertFalse(AggregationInspectionHelper.hasValue(stats));
},
singleton(ft)
);
} | public void testempty() throws ioexception { final mappedfieldtype ft = new numberfieldmapper.numberfieldtype("field", numbertype.long); testcase( stats("_name").field(ft.name()), iw -> {}, stats -> { assertequals(0d, stats.getcount(), 0); assertequals(0d, stats.getsum(), 0); assertequals(float.nan, stats.getavg(), 0); assertequals(double.positive_infinity, stats.getmin(), 0); assertequals(double.negative_infinity, stats.getmax(), 0); assertfalse(aggregationinspectionhelper.hasvalue(stats)); }, singleton(ft) ); } | dial-workable/elasticsearch | [
0,
0,
0,
1
] |
521 | public void testEvilGroovyScripts() throws Exception {
int nodes = randomIntBetween(1, 3);
Settings nodeSettings = Settings.builder()
.put("script.inline", true)
.put("script.indexed", true)
.build();
internalCluster().startNodesAsync(nodes, nodeSettings).get();
client().admin().cluster().prepareHealth().setWaitForNodes(nodes + "").get();
client().prepareIndex("test", "doc", "1").setSource("foo", 5, "bar", "baz").setRefresh(true).get();
// Plain test
assertSuccess("");
// numeric field access
assertSuccess("def foo = doc['foo'].value; if (foo == null) { return 5; }");
// string field access
assertSuccess("def bar = doc['bar'].value; if (bar == null) { return 5; }");
// List
assertSuccess("def list = [doc['foo'].value, 3, 4]; def v = list.get(1); list.add(10)");
// Ranges
assertSuccess("def range = 1..doc['foo'].value; def v = range.get(0)");
// Maps
assertSuccess("def v = doc['foo'].value; def m = [:]; m.put(\"value\", v)");
// Times
assertSuccess("def t = Instant.now().getMillis()");
// GroovyCollections
assertSuccess("def n = [1,2,3]; GroovyCollections.max(n)");
// Fail cases:
// AccessControlException[access denied ("java.io.FilePermission" "<<ALL FILES>>" "execute")]
assertFailure("pr = Runtime.getRuntime().exec(\"touch /tmp/gotcha\"); pr.waitFor()");
// AccessControlException[access denied ("java.lang.RuntimePermission" "accessClassInPackage.sun.reflect")]
assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)");
assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." +
"\"${'getDeclared' + 'Method'}\"(\"year\").\"${'set' + 'Accessible'}\"(false)");
assertFailure("Class.forName(\"org.joda.time.DateTime\").getDeclaredMethod(\"year\").setAccessible(true)");
// AccessControlException[access denied ("groovy.security.GroovyCodeSourcePermission" "/groovy/shell")]
assertFailure("Eval.me('2 + 2')");
assertFailure("Eval.x(5, 'x + 2')");
// AccessControlException[access denied ("java.lang.RuntimePermission" "accessDeclaredMembers")]
assertFailure("d = new Date(); java.lang.reflect.Field f = Date.class.getDeclaredField(\"fastTime\");" +
" f.setAccessible(true); f.get(\"fastTime\")");
// AccessControlException[access denied ("java.io.FilePermission" "<<ALL FILES>>" "execute")]
assertFailure("def methodName = 'ex'; Runtime.\"${'get' + 'Runtime'}\"().\"${methodName}ec\"(\"touch /tmp/gotcha2\")");
// AccessControlException[access denied ("java.lang.RuntimePermission" "modifyThreadGroup")]
assertFailure("t = new Thread({ println 3 });");
// test a directory we normally have access to, but the groovy script does not.
Path dir = createTempDir();
// TODO: figure out the necessary escaping for windows paths here :)
if (!Constants.WINDOWS) {
// access denied ("java.io.FilePermission" ".../tempDir-00N" "read")
assertFailure("new File(\"" + dir + "\").exists()");
}
} | public void testEvilGroovyScripts() throws Exception {
int nodes = randomIntBetween(1, 3);
Settings nodeSettings = Settings.builder()
.put("script.inline", true)
.put("script.indexed", true)
.build();
internalCluster().startNodesAsync(nodes, nodeSettings).get();
client().admin().cluster().prepareHealth().setWaitForNodes(nodes + "").get();
client().prepareIndex("test", "doc", "1").setSource("foo", 5, "bar", "baz").setRefresh(true).get();
assertSuccess("");
assertSuccess("def foo = doc['foo'].value; if (foo == null) { return 5; }");
assertSuccess("def bar = doc['bar'].value; if (bar == null) { return 5; }");
assertSuccess("def list = [doc['foo'].value, 3, 4]; def v = list.get(1); list.add(10)");
assertSuccess("def range = 1..doc['foo'].value; def v = range.get(0)");
assertSuccess("def v = doc['foo'].value; def m = [:]; m.put(\"value\", v)");
assertSuccess("def t = Instant.now().getMillis()");
assertSuccess("def n = [1,2,3]; GroovyCollections.max(n)");
assertFailure("pr = Runtime.getRuntime().exec(\"touch /tmp/gotcha\"); pr.waitFor()");
assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)");
assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." +
"\"${'getDeclared' + 'Method'}\"(\"year\").\"${'set' + 'Accessible'}\"(false)");
assertFailure("Class.forName(\"org.joda.time.DateTime\").getDeclaredMethod(\"year\").setAccessible(true)");
assertFailure("Eval.me('2 + 2')");
assertFailure("Eval.x(5, 'x + 2')");
assertFailure("d = new Date(); java.lang.reflect.Field f = Date.class.getDeclaredField(\"fastTime\");" +
" f.setAccessible(true); f.get(\"fastTime\")");
assertFailure("def methodName = 'ex'; Runtime.\"${'get' + 'Runtime'}\"().\"${methodName}ec\"(\"touch /tmp/gotcha2\")");
assertFailure("t = new Thread({ println 3 });");
Path dir = createTempDir();
if (!Constants.WINDOWS) {
assertFailure("new File(\"" + dir + "\").exists()");
}
} | public void testevilgroovyscripts() throws exception { int nodes = randomintbetween(1, 3); settings nodesettings = settings.builder() .put("script.inline", true) .put("script.indexed", true) .build(); internalcluster().startnodesasync(nodes, nodesettings).get(); client().admin().cluster().preparehealth().setwaitfornodes(nodes + "").get(); client().prepareindex("test", "doc", "1").setsource("foo", 5, "bar", "baz").setrefresh(true).get(); assertsuccess(""); assertsuccess("def foo = doc['foo'].value; if (foo == null) { return 5; }"); assertsuccess("def bar = doc['bar'].value; if (bar == null) { return 5; }"); assertsuccess("def list = [doc['foo'].value, 3, 4]; def v = list.get(1); list.add(10)"); assertsuccess("def range = 1..doc['foo'].value; def v = range.get(0)"); assertsuccess("def v = doc['foo'].value; def m = [:]; m.put(\"value\", v)"); assertsuccess("def t = instant.now().getmillis()"); assertsuccess("def n = [1,2,3]; groovycollections.max(n)"); assertfailure("pr = runtime.getruntime().exec(\"touch /tmp/gotcha\"); pr.waitfor()"); assertfailure("d = new datetime(); d.getclass().getdeclaredmethod(\"year\").setaccessible(true)"); assertfailure("d = new datetime(); d.\"${'get' + 'class'}\"()." + "\"${'getdeclared' + 'method'}\"(\"year\").\"${'set' + 'accessible'}\"(false)"); assertfailure("class.forname(\"org.joda.time.datetime\").getdeclaredmethod(\"year\").setaccessible(true)"); assertfailure("eval.me('2 + 2')"); assertfailure("eval.x(5, 'x + 2')"); assertfailure("d = new date(); java.lang.reflect.field f = date.class.getdeclaredfield(\"fasttime\");" + " f.setaccessible(true); f.get(\"fasttime\")"); assertfailure("def methodname = 'ex'; runtime.\"${'get' + 'runtime'}\"().\"${methodname}ec\"(\"touch /tmp/gotcha2\")"); assertfailure("t = new thread({ println 3 });"); path dir = createtempdir(); if (!constants.windows) { assertfailure("new file(\"" + dir + "\").exists()"); } } | drewr/elasticsearch | [
1,
0,
0,
0
] |
33,296 | private byte[] md5(String data)
{
try {
return this.getMd5Digest().digest(data.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
/* This really shouldn't happen */
throw new RuntimeException(e);
}
} | private byte[] md5(String data)
{
try {
return this.getMd5Digest().digest(data.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
} | private byte[] md5(string data) { try { return this.getmd5digest().digest(data.getbytes("utf-8")); } catch (unsupportedencodingexception e) { throw new runtimeexception(e); } } | dontdrinkandroot/cache.java | [
1,
0,
0,
0
] |
574 | @Before
public void setUp() throws IOException {
event = TestUtils.parse("/s3-event.put.json", S3Event.class);
// TODO: customize your mock logic for s3 client
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType(CONTENT_TYPE);
when(s3Object.getObjectMetadata()).thenReturn(objectMetadata);
when(s3Client.getObject(getObjectRequest.capture())).thenReturn(s3Object);
} | @Before
public void setUp() throws IOException {
event = TestUtils.parse("/s3-event.put.json", S3Event.class);
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType(CONTENT_TYPE);
when(s3Object.getObjectMetadata()).thenReturn(objectMetadata);
when(s3Client.getObject(getObjectRequest.capture())).thenReturn(s3Object);
} | @before public void setup() throws ioexception { event = testutils.parse("/s3-event.put.json", s3event.class); objectmetadata objectmetadata = new objectmetadata(); objectmetadata.setcontenttype(content_type); when(s3object.getobjectmetadata()).thenreturn(objectmetadata); when(s3client.getobject(getobjectrequest.capture())).thenreturn(s3object); } | cpsloSecondScreen/SecondScreen | [
1,
0,
0,
0
] |
16,997 | protected void processEmbeddedMember(List<AbstractMemberMetaData> mmds, AbstractClassMetaData embCmd, ClassLoaderResolver clr, EmbeddedMetaData embmd, boolean ownerNested)
{
TypeManager typeMgr = storeMgr.getNucleusContext().getTypeManager();
MetaDataManager mmgr = storeMgr.getMetaDataManager();
NamingFactory namingFactory = storeMgr.getNamingFactory();
AbstractMemberMetaData lastMmd = mmds.get(mmds.size()-1);
// Go through all members of the embedded type
int[] memberPositions = embCmd.getAllMemberPositions();
for (int i=0;i<memberPositions.length;i++)
{
AbstractMemberMetaData mmd = embCmd.getMetaDataForManagedMemberAtAbsolutePosition(memberPositions[i]);
if (mmd.getPersistenceModifier() != FieldPersistenceModifier.PERSISTENT)
{
// Don't need column if not persistent
continue;
}
if (mmds.size() == 1 && embmd != null && embmd.getOwnerMember() != null && embmd.getOwnerMember().equals(mmd.getName()))
{
// Special case of this being a link back to the owner. TODO Repeat this for nested and their owners
continue;
}
AbstractMemberMetaData embmdMmd = null;
if (embmd != null)
{
AbstractMemberMetaData[] embmdMmds = embmd.getMemberMetaData();
if (embmdMmds != null)
{
for (AbstractMemberMetaData thisMmd : embmdMmds)
{
if (thisMmd.getName().equals(mmd.getName()))
{
embmdMmd = thisMmd;
break;
}
}
}
}
RelationType relationType = mmd.getRelationType(clr);
if (relationType != RelationType.NONE && MetaDataUtils.getInstance().isMemberEmbedded(mmgr, clr, mmd, relationType, lastMmd))
{
if (RelationType.isRelationSingleValued(relationType))
{
// Nested embedded PC, so recurse
boolean nested = false;
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_PC_NESTED))
{
nested = !storeMgr.getNucleusContext().getConfiguration().getBooleanProperty(PropertyNames.PROPERTY_METADATA_EMBEDDED_PC_FLAT);
String nestedStr = mmd.getValueForExtension("nested");
if (nestedStr != null && nestedStr.equalsIgnoreCase("" + !nested))
{
nested = !nested;
}
}
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
if (nested)
{
// Embedded object stored as nested under this in the owner table (where the datastore supports that)
// Add column for the owner of the embedded object, typically for the column name only
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
// TODO Create mapping for the related info under the above column
}
// Recurse through the embedded member
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, nested);
}
else
{
if (mmd.hasCollection())
{
// Nested embedded collection, so recurse
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_COLLECTION_NESTED))
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
// Add column for the collection (since the store needs a name to reference it by)
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
// TODO Create mapping for the related info under the above column
// Recurse through the embedded collection element
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getCollection().getElementType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, true);
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded collection. Not supported for this datastore so ignoring");
continue;
}
}
else if (mmd.hasMap())
{
// Nested embedded map, so recurse
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_MAP_NESTED))
{
// TODO Support nested embedded map key/value
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded map. Not yet supported so ignoring");
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded map. Not supported for this datastore so ignoring");
continue;
}
}
else if (mmd.hasArray())
{
// Nested embedded array, so recurse
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_ARRAY_NESTED))
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
// Add column for the array (since the store needs a name to reference it by) TODO Extract this block out and reuse it
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
// TODO Create mapping for the related info under the above column
// Recurse through the embedded array element
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getArray().getElementType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, true);
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded array. Not supported for this datastore so ignoring");
continue;
}
}
}
}
else
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
ColumnMetaData[] colmds = mmd.getColumnMetaData();
if (relationType != RelationType.NONE)
{
// 1-1/N-1 stored as single column with persistable-id
// 1-N/M-N stored as single column with collection<persistable-id>
// Create column for basic type
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
else
{
TypeConverter typeConv = getTypeConverterForMember(mmd, colmds, typeMgr); // TODO Pass in embedded colmds if they have jdbcType info?
if (typeConv != null)
{
// Create column(s) for this type using a TypeConverter
if (typeConv instanceof MultiColumnConverter)
{
Class[] colJavaTypes = ((MultiColumnConverter)typeConv).getDatastoreColumnTypes();
Column[] cols = new Column[colJavaTypes.length];
for (int j=0;j<colJavaTypes.length;j++)
{
String colName = namingFactory.getColumnName(embMmds, j);
ColumnImpl col = addEmbeddedColumn(colName, typeConv);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == colJavaTypes.length && embmdMmd.getColumnMetaData()[j].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[j].getPosition());
}
else if (colmds != null && colmds.length == colJavaTypes.length && colmds[j].getPosition() != null)
{
col.setPosition(colmds[j].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == colJavaTypes.length && embmdMmd.getColumnMetaData()[j].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[j].getJdbcType());
}
else if (colmds != null && colmds.length == colJavaTypes.length && colmds[j].getJdbcType() != null)
{
col.setJdbcType(colmds[j].getJdbcType());
}
cols[j] = col;
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, cols, typeConv);
for (int j=0;j<colJavaTypes.length;j++)
{
((ColumnImpl)cols[j]).setMemberColumnMapping(mapping);
}
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
else
{
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, typeConv);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
mapping.setTypeConverter(typeConv);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
}
else
{
// Create column for basic type
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(ownerNested);
AbstractMemberMetaData theMmd = embMmds.get(0);
if (theMmd.isPrimaryKey())
{
col.setPrimaryKey();
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
}
}
}
} | protected void processEmbeddedMember(List<AbstractMemberMetaData> mmds, AbstractClassMetaData embCmd, ClassLoaderResolver clr, EmbeddedMetaData embmd, boolean ownerNested)
{
TypeManager typeMgr = storeMgr.getNucleusContext().getTypeManager();
MetaDataManager mmgr = storeMgr.getMetaDataManager();
NamingFactory namingFactory = storeMgr.getNamingFactory();
AbstractMemberMetaData lastMmd = mmds.get(mmds.size()-1);
int[] memberPositions = embCmd.getAllMemberPositions();
for (int i=0;i<memberPositions.length;i++)
{
AbstractMemberMetaData mmd = embCmd.getMetaDataForManagedMemberAtAbsolutePosition(memberPositions[i]);
if (mmd.getPersistenceModifier() != FieldPersistenceModifier.PERSISTENT)
{
continue;
}
if (mmds.size() == 1 && embmd != null && embmd.getOwnerMember() != null && embmd.getOwnerMember().equals(mmd.getName()))
{
continue;
}
AbstractMemberMetaData embmdMmd = null;
if (embmd != null)
{
AbstractMemberMetaData[] embmdMmds = embmd.getMemberMetaData();
if (embmdMmds != null)
{
for (AbstractMemberMetaData thisMmd : embmdMmds)
{
if (thisMmd.getName().equals(mmd.getName()))
{
embmdMmd = thisMmd;
break;
}
}
}
}
RelationType relationType = mmd.getRelationType(clr);
if (relationType != RelationType.NONE && MetaDataUtils.getInstance().isMemberEmbedded(mmgr, clr, mmd, relationType, lastMmd))
{
if (RelationType.isRelationSingleValued(relationType))
{
boolean nested = false;
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_PC_NESTED))
{
nested = !storeMgr.getNucleusContext().getConfiguration().getBooleanProperty(PropertyNames.PROPERTY_METADATA_EMBEDDED_PC_FLAT);
String nestedStr = mmd.getValueForExtension("nested");
if (nestedStr != null && nestedStr.equalsIgnoreCase("" + !nested))
{
nested = !nested;
}
}
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
if (nested)
{
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, nested);
}
else
{
if (mmd.hasCollection())
{
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_COLLECTION_NESTED))
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getCollection().getElementType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, true);
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded collection. Not supported for this datastore so ignoring");
continue;
}
}
else if (mmd.hasMap())
{
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_MAP_NESTED))
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded map. Not yet supported so ignoring");
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded map. Not supported for this datastore so ignoring");
continue;
}
}
else if (mmd.hasArray())
{
if (storeMgr.getSupportedOptions().contains(StoreManager.OPTION_ORM_EMBEDDED_ARRAY_NESTED))
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
ColumnMetaData[] colmds = mmd.getColumnMetaData();
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(true);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
processEmbeddedMember(embMmds, mmgr.getMetaDataForClass(mmd.getArray().getElementType(), clr), clr, embmdMmd != null ? embmdMmd.getEmbeddedMetaData() : null, true);
}
else
{
NucleusLogger.DATASTORE_SCHEMA.warn("Member " + mmd.getFullFieldName() + " is a (nested) embedded array. Not supported for this datastore so ignoring");
continue;
}
}
}
}
else
{
List<AbstractMemberMetaData> embMmds = new ArrayList<AbstractMemberMetaData>(mmds);
embMmds.add(mmd);
ColumnMetaData[] colmds = mmd.getColumnMetaData();
if (relationType != RelationType.NONE)
{
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
else
{
TypeConverter typeConv = getTypeConverterForMember(mmd, colmds, typeMgr);
if (typeConv != null)
{
if (typeConv instanceof MultiColumnConverter)
{
Class[] colJavaTypes = ((MultiColumnConverter)typeConv).getDatastoreColumnTypes();
Column[] cols = new Column[colJavaTypes.length];
for (int j=0;j<colJavaTypes.length;j++)
{
String colName = namingFactory.getColumnName(embMmds, j);
ColumnImpl col = addEmbeddedColumn(colName, typeConv);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == colJavaTypes.length && embmdMmd.getColumnMetaData()[j].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[j].getPosition());
}
else if (colmds != null && colmds.length == colJavaTypes.length && colmds[j].getPosition() != null)
{
col.setPosition(colmds[j].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == colJavaTypes.length && embmdMmd.getColumnMetaData()[j].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[j].getJdbcType());
}
else if (colmds != null && colmds.length == colJavaTypes.length && colmds[j].getJdbcType() != null)
{
col.setJdbcType(colmds[j].getJdbcType());
}
cols[j] = col;
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, cols, typeConv);
for (int j=0;j<colJavaTypes.length;j++)
{
((ColumnImpl)cols[j]).setMemberColumnMapping(mapping);
}
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
else
{
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, typeConv);
col.setNested(ownerNested);
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
mapping.setTypeConverter(typeConv);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
}
else
{
String colName = namingFactory.getColumnName(embMmds, 0);
ColumnImpl col = addEmbeddedColumn(colName, null);
col.setNested(ownerNested);
AbstractMemberMetaData theMmd = embMmds.get(0);
if (theMmd.isPrimaryKey())
{
col.setPrimaryKey();
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getPosition() != null)
{
col.setPosition(embmdMmd.getColumnMetaData()[0].getPosition());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getPosition() != null)
{
col.setPosition(colmds[0].getPosition());
}
if (embmdMmd != null && embmdMmd.getColumnMetaData() != null && embmdMmd.getColumnMetaData().length == 1 && embmdMmd.getColumnMetaData()[0].getJdbcType() != null)
{
col.setJdbcType(embmdMmd.getColumnMetaData()[0].getJdbcType());
}
else if (colmds != null && colmds.length == 1 && colmds[0].getJdbcType() != null)
{
col.setJdbcType(colmds[0].getJdbcType());
}
MemberColumnMapping mapping = new MemberColumnMappingImpl(mmd, col);
col.setMemberColumnMapping(mapping);
if (schemaVerifier != null)
{
schemaVerifier.attributeEmbeddedMember(mapping, embMmds);
}
mappingByEmbeddedMember.put(getEmbeddedMemberNavigatedPath(embMmds), mapping);
}
}
}
}
} | protected void processembeddedmember(list<abstractmembermetadata> mmds, abstractclassmetadata embcmd, classloaderresolver clr, embeddedmetadata embmd, boolean ownernested) { typemanager typemgr = storemgr.getnucleuscontext().gettypemanager(); metadatamanager mmgr = storemgr.getmetadatamanager(); namingfactory namingfactory = storemgr.getnamingfactory(); abstractmembermetadata lastmmd = mmds.get(mmds.size()-1); int[] memberpositions = embcmd.getallmemberpositions(); for (int i=0;i<memberpositions.length;i++) { abstractmembermetadata mmd = embcmd.getmetadataformanagedmemberatabsoluteposition(memberpositions[i]); if (mmd.getpersistencemodifier() != fieldpersistencemodifier.persistent) { continue; } if (mmds.size() == 1 && embmd != null && embmd.getownermember() != null && embmd.getownermember().equals(mmd.getname())) { continue; } abstractmembermetadata embmdmmd = null; if (embmd != null) { abstractmembermetadata[] embmdmmds = embmd.getmembermetadata(); if (embmdmmds != null) { for (abstractmembermetadata thismmd : embmdmmds) { if (thismmd.getname().equals(mmd.getname())) { embmdmmd = thismmd; break; } } } } relationtype relationtype = mmd.getrelationtype(clr); if (relationtype != relationtype.none && metadatautils.getinstance().ismemberembedded(mmgr, clr, mmd, relationtype, lastmmd)) { if (relationtype.isrelationsinglevalued(relationtype)) { boolean nested = false; if (storemgr.getsupportedoptions().contains(storemanager.option_orm_embedded_pc_nested)) { nested = !storemgr.getnucleuscontext().getconfiguration().getbooleanproperty(propertynames.property_metadata_embedded_pc_flat); string nestedstr = mmd.getvalueforextension("nested"); if (nestedstr != null && nestedstr.equalsignorecase("" + !nested)) { nested = !nested; } } list<abstractmembermetadata> embmmds = new arraylist<abstractmembermetadata>(mmds); embmmds.add(mmd); if (nested) { columnmetadata[] colmds = mmd.getcolumnmetadata(); string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, null); col.setnested(true); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); } processembeddedmember(embmmds, mmgr.getmetadataforclass(mmd.gettype(), clr), clr, embmdmmd != null ? embmdmmd.getembeddedmetadata() : null, nested); } else { if (mmd.hascollection()) { if (storemgr.getsupportedoptions().contains(storemanager.option_orm_embedded_collection_nested)) { list<abstractmembermetadata> embmmds = new arraylist<abstractmembermetadata>(mmds); embmmds.add(mmd); columnmetadata[] colmds = mmd.getcolumnmetadata(); string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, null); col.setnested(true); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); processembeddedmember(embmmds, mmgr.getmetadataforclass(mmd.getcollection().getelementtype(), clr), clr, embmdmmd != null ? embmdmmd.getembeddedmetadata() : null, true); } else { nucleuslogger.datastore_schema.warn("member " + mmd.getfullfieldname() + " is a (nested) embedded collection. not supported for this datastore so ignoring"); continue; } } else if (mmd.hasmap()) { if (storemgr.getsupportedoptions().contains(storemanager.option_orm_embedded_map_nested)) { nucleuslogger.datastore_schema.warn("member " + mmd.getfullfieldname() + " is a (nested) embedded map. not yet supported so ignoring"); } else { nucleuslogger.datastore_schema.warn("member " + mmd.getfullfieldname() + " is a (nested) embedded map. not supported for this datastore so ignoring"); continue; } } else if (mmd.hasarray()) { if (storemgr.getsupportedoptions().contains(storemanager.option_orm_embedded_array_nested)) { list<abstractmembermetadata> embmmds = new arraylist<abstractmembermetadata>(mmds); embmmds.add(mmd); columnmetadata[] colmds = mmd.getcolumnmetadata(); string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, null); col.setnested(true); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); processembeddedmember(embmmds, mmgr.getmetadataforclass(mmd.getarray().getelementtype(), clr), clr, embmdmmd != null ? embmdmmd.getembeddedmetadata() : null, true); } else { nucleuslogger.datastore_schema.warn("member " + mmd.getfullfieldname() + " is a (nested) embedded array. not supported for this datastore so ignoring"); continue; } } } } else { list<abstractmembermetadata> embmmds = new arraylist<abstractmembermetadata>(mmds); embmmds.add(mmd); columnmetadata[] colmds = mmd.getcolumnmetadata(); if (relationtype != relationtype.none) { string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, null); col.setnested(ownernested); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); } else { typeconverter typeconv = gettypeconverterformember(mmd, colmds, typemgr); if (typeconv != null) { if (typeconv instanceof multicolumnconverter) { class[] coljavatypes = ((multicolumnconverter)typeconv).getdatastorecolumntypes(); column[] cols = new column[coljavatypes.length]; for (int j=0;j<coljavatypes.length;j++) { string colname = namingfactory.getcolumnname(embmmds, j); columnimpl col = addembeddedcolumn(colname, typeconv); col.setnested(ownernested); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == coljavatypes.length && embmdmmd.getcolumnmetadata()[j].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[j].getposition()); } else if (colmds != null && colmds.length == coljavatypes.length && colmds[j].getposition() != null) { col.setposition(colmds[j].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == coljavatypes.length && embmdmmd.getcolumnmetadata()[j].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[j].getjdbctype()); } else if (colmds != null && colmds.length == coljavatypes.length && colmds[j].getjdbctype() != null) { col.setjdbctype(colmds[j].getjdbctype()); } cols[j] = col; } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, cols, typeconv); for (int j=0;j<coljavatypes.length;j++) { ((columnimpl)cols[j]).setmembercolumnmapping(mapping); } if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); } else { string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, typeconv); col.setnested(ownernested); if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); mapping.settypeconverter(typeconv); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); } } else { string colname = namingfactory.getcolumnname(embmmds, 0); columnimpl col = addembeddedcolumn(colname, null); col.setnested(ownernested); abstractmembermetadata themmd = embmmds.get(0); if (themmd.isprimarykey()) { col.setprimarykey(); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getposition() != null) { col.setposition(embmdmmd.getcolumnmetadata()[0].getposition()); } else if (colmds != null && colmds.length == 1 && colmds[0].getposition() != null) { col.setposition(colmds[0].getposition()); } if (embmdmmd != null && embmdmmd.getcolumnmetadata() != null && embmdmmd.getcolumnmetadata().length == 1 && embmdmmd.getcolumnmetadata()[0].getjdbctype() != null) { col.setjdbctype(embmdmmd.getcolumnmetadata()[0].getjdbctype()); } else if (colmds != null && colmds.length == 1 && colmds[0].getjdbctype() != null) { col.setjdbctype(colmds[0].getjdbctype()); } membercolumnmapping mapping = new membercolumnmappingimpl(mmd, col); col.setmembercolumnmapping(mapping); if (schemaverifier != null) { schemaverifier.attributeembeddedmember(mapping, embmmds); } mappingbyembeddedmember.put(getembeddedmembernavigatedpath(embmmds), mapping); } } } } } | dcheung2/datanucleus-core | [
1,
1,
0,
0
] |
17,020 | String probeGemHomeForTesting() {
return this.gemHome;
} | String probeGemHomeForTesting() {
return this.gemHome;
} | string probegemhomefortesting() { return this.gemhome; } | dmikurube/embulk | [
0,
1,
0,
0
] |
8,997 | protected void visit(final RuleDescr descr) {
// This is the NAME of the rule, not a reference to it!!
String ruleName = getPackagePrefix() + descr.getName();
addResource(ruleName,
ResourceType.RULE);
// This is, on other hand, is a reference to the parent rule (because it's used in inheritance)
String parentRuleName = descr.getParentName();
if (parentRuleName != null) {
addResourceReference(parentRuleName,
ResourceType.RULE);
}
for (AttributeDescr d : descr.getAttributes().values()) {
visit(d);
}
visit(descr.getLhs());
visitConsequence(descr.getConsequence()); // need compilation for this..
for (String namedConsequence : descr.getNamedConsequences().keySet()) {
// TODO
// ? addResourceReference(namedConsequence, PartType.NAMED_CONSEQUENCE);
}
visitAnnos(descr);
} | protected void visit(final RuleDescr descr) {
String ruleName = getPackagePrefix() + descr.getName();
addResource(ruleName,
ResourceType.RULE);
String parentRuleName = descr.getParentName();
if (parentRuleName != null) {
addResourceReference(parentRuleName,
ResourceType.RULE);
}
for (AttributeDescr d : descr.getAttributes().values()) {
visit(d);
}
visit(descr.getLhs());
visitConsequence(descr.getConsequence());
for (String namedConsequence : descr.getNamedConsequences().keySet()) {
}
visitAnnos(descr);
} | protected void visit(final ruledescr descr) { string rulename = getpackageprefix() + descr.getname(); addresource(rulename, resourcetype.rule); string parentrulename = descr.getparentname(); if (parentrulename != null) { addresourcereference(parentrulename, resourcetype.rule); } for (attributedescr d : descr.getattributes().values()) { visit(d); } visit(descr.getlhs()); visitconsequence(descr.getconsequence()); for (string namedconsequence : descr.getnamedconsequences().keyset()) { } visitannos(descr); } | etirelli/kie-wb-common | [
0,
0,
0,
0
] |
832 | public Raster getRaster(int xOffset, int yOffset, int w, int h) {
ColorModel cm = getColorModel();
if (raster == null) createRaster();
// TODO: eventually use caching here
WritableRaster childRaster = cm.createCompatibleWritableRaster(w, h);
Rectangle2D childRect = new Rectangle2D.Double(xOffset, yOffset, w, h);
if (!childRect.intersects(deviceBounds)) {
// usually doesn't happen ...
return childRaster;
}
Rectangle2D destRect = new Rectangle2D.Double();
Rectangle2D.intersect(childRect, deviceBounds, destRect);
int dx = (int)(destRect.getX()-deviceBounds.getX());
int dy = (int)(destRect.getY()-deviceBounds.getY());
int dw = (int)destRect.getWidth();
int dh = (int)destRect.getHeight();
Object data = raster.getDataElements(dx, dy, dw, dh, null);
dx = (int)(destRect.getX()-childRect.getX());
dy = (int)(destRect.getY()-childRect.getY());
childRaster.setDataElements(dx, dy, dw, dh, data);
return childRaster;
} | public Raster getRaster(int xOffset, int yOffset, int w, int h) {
ColorModel cm = getColorModel();
if (raster == null) createRaster();
WritableRaster childRaster = cm.createCompatibleWritableRaster(w, h);
Rectangle2D childRect = new Rectangle2D.Double(xOffset, yOffset, w, h);
if (!childRect.intersects(deviceBounds)) {
return childRaster;
}
Rectangle2D destRect = new Rectangle2D.Double();
Rectangle2D.intersect(childRect, deviceBounds, destRect);
int dx = (int)(destRect.getX()-deviceBounds.getX());
int dy = (int)(destRect.getY()-deviceBounds.getY());
int dw = (int)destRect.getWidth();
int dh = (int)destRect.getHeight();
Object data = raster.getDataElements(dx, dy, dw, dh, null);
dx = (int)(destRect.getX()-childRect.getX());
dy = (int)(destRect.getY()-childRect.getY());
childRaster.setDataElements(dx, dy, dw, dh, data);
return childRaster;
} | public raster getraster(int xoffset, int yoffset, int w, int h) { colormodel cm = getcolormodel(); if (raster == null) createraster(); writableraster childraster = cm.createcompatiblewritableraster(w, h); rectangle2d childrect = new rectangle2d.double(xoffset, yoffset, w, h); if (!childrect.intersects(devicebounds)) { return childraster; } rectangle2d destrect = new rectangle2d.double(); rectangle2d.intersect(childrect, devicebounds, destrect); int dx = (int)(destrect.getx()-devicebounds.getx()); int dy = (int)(destrect.gety()-devicebounds.gety()); int dw = (int)destrect.getwidth(); int dh = (int)destrect.getheight(); object data = raster.getdataelements(dx, dy, dw, dh, null); dx = (int)(destrect.getx()-childrect.getx()); dy = (int)(destrect.gety()-childrect.gety()); childraster.setdataelements(dx, dy, dw, dh, data); return childraster; } | fomich-artem/poi | [
1,
0,
0,
0
] |
17,336 | @Test(expected = TaskExecutionException.class)
@Ignore("Test execution is depending on server technical characteristics.")
// ToDo: Need to rewrite.
public void Should_throwWhenAnotherTaskIsBeingExecuted()
throws Exception {
thread.execute(() -> {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
thread.interrupt();
}
});
thread.execute(mock(ITask.class));
} | @Test(expected = TaskExecutionException.class)
@Ignore("Test execution is depending on server technical characteristics.")
public void Should_throwWhenAnotherTaskIsBeingExecuted()
throws Exception {
thread.execute(() -> {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
thread.interrupt();
}
});
thread.execute(mock(ITask.class));
} | @test(expected = taskexecutionexception.class) @ignore("test execution is depending on server technical characteristics.") public void should_throwwhenanothertaskisbeingexecuted() throws exception { thread.execute(() -> { try { thread.sleep(200); } catch (interruptedexception e) { thread.interrupt(); } }); thread.execute(mock(itask.class)); } | d-protsenko/smartactors-core | [
0,
1,
0,
0
] |
17,337 | @Test(expected = TaskExecutionException.class)
@Ignore("Test execution is depending on server technical characteristics.")
// ToDo: Need to rewrite.
public void Should_throwWhenThreadIsNotAlive()
throws Exception {
ITask taskMock = mock(ITask.class);
thread.interrupt();
thread.execute(taskMock);
fail();
} | @Test(expected = TaskExecutionException.class)
@Ignore("Test execution is depending on server technical characteristics.")
public void Should_throwWhenThreadIsNotAlive()
throws Exception {
ITask taskMock = mock(ITask.class);
thread.interrupt();
thread.execute(taskMock);
fail();
} | @test(expected = taskexecutionexception.class) @ignore("test execution is depending on server technical characteristics.") public void should_throwwhenthreadisnotalive() throws exception { itask taskmock = mock(itask.class); thread.interrupt(); thread.execute(taskmock); fail(); } | d-protsenko/smartactors-core | [
0,
1,
0,
0
] |