max_stars_repo_path
stringlengths 6
1.02k
| max_stars_repo_name
stringlengths 6
114
| max_stars_count
int64 0
191k
| id
stringlengths 1
8
| content
stringlengths 1
1.05M
| score
float64 -0.93
3.95
| int_score
int64 0
4
|
---|---|---|---|---|---|---|
src/org/opengts/util/DatagramMessage.java | paragp/GTS-PreUAT | 0 | 1 | // ----------------------------------------------------------------------------
// Copyright 2007-2015, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2007/11/28 <NAME>
// -Initial release
// 2009/01/28 <NAME>
// -Improved command-line interface
// 2015/08/16 <NAME>
// -Added "udpTest" command-line option
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.lang.*;
import java.util.*;
import java.io.*;
import java.net.*;
/**
*** A class for sending and recieving datagram messages [CHECK]
**/
public class DatagramMessage
{
// ------------------------------------------------------------------------
protected DatagramSocket datagramSocket = null;
protected DatagramPacket sendPacket = null;
protected DatagramPacket recvPacket = null;
/**
*** For subclassing only
**/
protected DatagramMessage()
{
}
/**
*** Constructor for receiving messages
*** @param port The port to use
*** @throws IOException if a socket error occurs
*** @throws UnknownHostException if the IP adress of the host could not be
*** determined
**/
public DatagramMessage(int port)
throws IOException, UnknownHostException
{
this.datagramSocket = new DatagramSocket(port);
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @throws IOException if a socket error occurs
*** @throws UnknownHostException if the IP adress of the host could not be
*** determined
**/
public DatagramMessage(String destHost, int destPort)
throws IOException, UnknownHostException
{
this(InetAddress.getByName(destHost), destPort);
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @throws IOException if a socket error occurs
**/
public DatagramMessage(InetAddress destHost, int destPort)
throws IOException
{
this.datagramSocket = new DatagramSocket();
this.setRemoteHost(destHost, destPort);
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @param bindPort The local port to bind
*** @throws IOException if a socket error occurs
*** @throws UnknownHostException if the IP adress of the host could not be
*** determined
**/
public DatagramMessage(String destHost, int destPort, int bindPort)
throws IOException, UnknownHostException
{
this(InetAddress.getByName(destHost), destPort,
bindPort, null);
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @param bindPort The local port to bind
*** @param bindAddr The local address to bind
*** @throws IOException if a socket error occurs
*** @throws UnknownHostException if the IP adress of the host could not be
*** determined
**/
public DatagramMessage(String destHost, int destPort, int bindPort, String bindAddr)
throws IOException, UnknownHostException
{
this(InetAddress.getByName(destHost), destPort,
bindPort, (!StringTools.isBlank(bindAddr)? InetAddress.getByName(bindAddr) : null));
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @param bindPort The local port to bind
*** @throws IOException if a socket error occurs
**/
public DatagramMessage(InetAddress destHost, int destPort, int bindPort)
throws IOException
{
this(destHost, destPort,
bindPort, null);
}
/**
*** Constructor for sending messages
*** @param destHost The remote(destination) host address
*** @param destPort The remote(destination) port to use
*** @param bindPort The local port to bind
*** @param bindAddr The local address to bind
*** @throws IOException if a socket error occurs
**/
public DatagramMessage(InetAddress destHost, int destPort, int bindPort, InetAddress bindAddr)
throws IOException
{
if (bindPort <= 0) {
this.datagramSocket = new DatagramSocket();
} else
if (bindAddr == null) {
this.datagramSocket = new DatagramSocket(bindPort);
} else {
this.datagramSocket = new DatagramSocket(bindPort, bindAddr);
}
this.setRemoteHost(destHost, destPort);
}
// ------------------------------------------------------------------------
/**
*** Closes the datagram socket
**/
public void close()
throws IOException
{
this.datagramSocket.close();
}
// ------------------------------------------------------------------------
/**
*** Set the remote(destination) host
*** @param host The remote host address
*** @param port The remote host port
*** @throws IOException if an error occurs
**/
public void setRemoteHost(String host, int port)
throws IOException
{
this.setRemoteHost(InetAddress.getByName(host), port);
}
/**
*** Set the remote(destination) host
*** @param host The remote host address
*** @param port The remote host port
*** @throws IOException if an error occurs
**/
public void setRemoteHost(InetAddress host, int port)
throws IOException
{
if (this.sendPacket != null) {
this.sendPacket.setAddress(host);
this.sendPacket.setPort(port);
} else {
this.sendPacket = new DatagramPacket(new byte[0], 0, host, port);
}
}
/**
*** Gets the datagram packet to be sent
*** @return The datagram packet to be sent
**/
public DatagramPacket getSendPacket()
{
return this.sendPacket;
}
// ------------------------------------------------------------------------
/**
*** Send a String to the remote host
*** @param msg The String to send to the remote host
*** @throws IOException if the string is null or a socket error occurs
**/
public void send(String msg)
throws IOException
{
this.send(StringTools.getBytes(msg));
}
/**
*** Send an array of bytes to the remote host
*** @param data The array of bytes to send to the remote host
*** @throws IOException if the string is null or a socket error occurs
**/
public void send(byte data[])
throws IOException
{
if (data != null) {
this.send(data, data.length);
} else {
throw new IOException("Nothing to send");
}
}
/**
*** Send an array of bytes to the remote host
*** @param data The array of bytes to send to the remote host
*** @param len The length of the data
*** @throws IOException if the string is null or a socket error occurs
**/
public void send(byte data[], int len)
throws IOException
{
this.send(data, len, 1);
}
/**
*** Send an array of bytes to the remote host
*** @param data The array of bytes to send to the remote host
*** @param len The length of the data
*** @param count The number of times to send the message
*** @throws IOException if the string is null or a socket error occurs
**/
public void send(byte data[], int len, int count)
throws IOException
{
if (this.sendPacket == null) {
throw new IOException("'setRemoteHost' not specified");
} else
if ((data == null) || (len <= 0) || (count <= 0)) {
throw new IOException("Nothing to send");
} else {
this.sendPacket.setData(data);
this.sendPacket.setLength(len);
for (; count > 0; count--) {
this.datagramSocket.send(this.sendPacket);
}
}
}
// ------------------------------------------------------------------------
private static final int DEFAULT_PACKET_SIZE = 1024;
/**
*** Receive an array of bytes
*** @param maxBuffSize The maximum buffer size
*** @return The recieved packet as a byte array
*** @throws IOException if a socket error occurs
**/
public byte[] receive(int maxBuffSize)
throws IOException
{
/* receive data */
byte dbuff[] = new byte[(maxBuffSize > 0)? maxBuffSize : DEFAULT_PACKET_SIZE];
this.recvPacket = new DatagramPacket(dbuff, dbuff.length);
this.datagramSocket.receive(this.recvPacket);
byte newBuff[] = new byte[this.recvPacket.getLength()];
System.arraycopy(this.recvPacket.getData(), 0, newBuff, 0, this.recvPacket.getLength());
/* return received data */
return newBuff;
}
/**
*** Gets the DatagramPacket last recieved [CHECK]
*** @return The DatagramPacket last recieved
**/
public DatagramPacket getReceivePacket()
{
return this.recvPacket;
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// Example receiver:
// bin/exeJava org.opengts.util.DatagramMessage -port=39000 -recv -echo
// Example transmitter:
// bin/exeJava org.opengts.util.DatagramMessage -host=localhost -port=39000 -send=hello -recv
private static final String ARG_HOST[] = new String[] { "host" , "h" };
private static final String ARG_PORT[] = new String[] { "port" , "p" };
private static final String ARG_BINDADDR[] = new String[] { "bindAddr" };
private static final String ARG_BINDPORT[] = new String[] { "bindPort" };
private static final String ARG_SEND[] = new String[] { "send" };
private static final String ARG_RECEIVE[] = new String[] { "recv", "receive" };
private static final String ARG_ECHO[] = new String[] { "echo", };
private static final String ARG_STRESSTEST[] = new String[] { "udpStressTest" }; // "msgs/sec,seconds,pktSize"
private static void usage()
{
Print.logInfo("Usage:");
Print.logInfo(" java ... " + DatagramMessage.class.getName() + " {options}");
Print.logInfo("'Send' Options:");
Print.logInfo(" -bindAddr=<ip> The local bind address");
Print.logInfo(" -bindPort=<port> The local bind port");
Print.logInfo(" -host=<host> The destination host");
Print.logInfo(" -port=<port> The destination port");
Print.logInfo(" -send=<data> The data to send (prefix with '0x' for hex data)");
Print.logInfo(" -recv Set to 'receive' mode after sending");
Print.logInfo("'Receive' Options:");
Print.logInfo(" -port=<port> The port on which to listen for incoming data");
Print.logInfo(" -recv Set to 'receive' mode");
Print.logInfo(" -echo Echo received packet back to sender (implies '-recv')");
System.exit(1);
}
/**
*** Main entry point for testing/debugging
*** @param argv Comand-line arguments
**/
public static void main(String argv[])
{
RTConfig.setCommandLineArgs(argv);
String host = RTConfig.getString(ARG_HOST, null);
int port = RTConfig.getInt(ARG_PORT, 0);
boolean cmdEcho = RTConfig.hasProperty(ARG_ECHO);
boolean cmdRecv = RTConfig.hasProperty(ARG_RECEIVE);
/* send data */
if (RTConfig.hasProperty(ARG_SEND)) {
if (StringTools.isBlank(host)) {
Print.logError("Target host not specified");
usage();
}
if (port <= 0) {
Print.logError("Target port not specified");
usage();
}
DatagramMessage dgm = null;
try {
int bindPort = RTConfig.getInt(ARG_BINDPORT, -1);
String bindAddr = RTConfig.getString(ARG_BINDADDR, null);
dgm = new DatagramMessage(host, port, bindPort, bindAddr);
String dataStr = RTConfig.getString(ARG_SEND,"Hello World");
byte send[] = dataStr.startsWith("0x")? StringTools.parseHex(dataStr,null) : dataStr.getBytes();
dgm.send(send);
Print.logInfo("Datagram sent to %s:%d", host, port);
if (!cmdRecv) {
// -- skip attempting to receive message
} else
if (bindPort <= 0) {
Print.logWarn("'-recv' requires '-bindPort', receive ignored.");
} else {
Print.sysPrintln("Waiting for incoming data on port %d ...", bindPort);
byte recv[] = dgm.receive(1000); // timeout?
SocketAddress sa = dgm.getReceivePacket().getSocketAddress();
if (sa instanceof InetSocketAddress) {
int recvPort = dgm.getReceivePacket().getPort();
InetAddress hostAddr = ((InetSocketAddress)sa).getAddress();
Print.logInfo("Received from '" + hostAddr + ":" + recvPort + "' - 0x" + StringTools.toHexString(recv));
}
}
} catch (Throwable th) {
Print.logException("Error", th);
System.exit(99);
} finally {
try {
if (dgm != null) {
dgm.close();
}
} catch (IOException ioe) {
// -- ignore
}
}
System.exit(0);
}
/* receive data */
if (cmdRecv || cmdEcho) {
if (port <= 0) {
Print.logError("Target port not specified");
usage();
}
if (!StringTools.isBlank(host)) {
Print.logWarn("Specified 'host' will be ignored");
}
DatagramMessage dgm = null;
try {
dgm = new DatagramMessage(port);
Print.sysPrintln("Waiting for incoming data on port %d ...", port);
byte recv[] = dgm.receive(1000);
SocketAddress sa = dgm.getReceivePacket().getSocketAddress();
if (sa instanceof InetSocketAddress) {
InetAddress hostAddr = ((InetSocketAddress)sa).getAddress();
int recvPort = dgm.getReceivePacket().getPort();
Print.logInfo("Received from host "+hostAddr+"["+recvPort+"]: 0x" + StringTools.toHexString(recv));
if (cmdEcho) {
try { Thread.sleep(500L); } catch (Throwable th) { /* ignore */ }
Print.sysPrintln("Echoing packet back to sender ...");
dgm.setRemoteHost(hostAddr, recvPort);
dgm.send(recv);
}
}
} catch (Throwable th) {
Print.logException("Error", th);
System.exit(99);
} finally {
try {
if (dgm != null) {
dgm.close();
}
} catch (IOException ioe) {
// -- ignore
}
}
System.exit(0);
}
/* show usage */
usage();
}
}
| 0.976563 | 1 |
engine/src/org/pentaho/di/job/entries/ssh2get/JobEntrySSH2GET.java | krivera-pentaho/pentaho-kettle | 1 | 9 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.ssh2get;
import static org.pentaho.di.job.entry.validator.AndValidator.putValidators;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.integerValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notNullValidator;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.vfs.FileObject;
import org.apache.commons.vfs.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
import com.trilead.ssh2.Connection;
import com.trilead.ssh2.HTTPProxyData;
import com.trilead.ssh2.KnownHosts;
import com.trilead.ssh2.SFTPv3Client;
import com.trilead.ssh2.SFTPv3DirectoryEntry;
import com.trilead.ssh2.SFTPv3FileAttributes;
import com.trilead.ssh2.SFTPv3FileHandle;
/**
* This defines a SSH2 GET job entry.
*
* @author Samatar
* @since 17-12-2007
*
*/
public class JobEntrySSH2GET extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySSH2GET.class; // for i18n purposes, needed by Translator2!!
private String serverName;
private String userName;
private String password;
private String serverPort;
private String ftpDirectory;
private String localDirectory;
private String wildcard;
private boolean onlyGettingNewFiles; /* Don't overwrite files */
private boolean usehttpproxy;
private String httpProxyHost;
private String httpproxyport;
private String httpproxyusername;
private String httpProxyPassword;
private boolean publicpublickey;
private String keyFilename;
private String keyFilePass;
private boolean useBasicAuthentication;
private String afterFtpPut;
private String destinationfolder;
private boolean createdestinationfolder;
private boolean cachehostkey;
private int timeout;
boolean createtargetfolder;
boolean includeSubFolders;
static KnownHosts database = new KnownHosts();
int nbfilestoget = 0;
int nbgot = 0;
int nbrerror = 0;
public JobEntrySSH2GET( String n ) {
super( n, "" );
serverName = null;
publicpublickey = false;
keyFilename = null;
keyFilePass = null;
usehttpproxy = false;
httpProxyHost = null;
httpproxyport = null;
httpproxyusername = null;
httpProxyPassword = <PASSWORD>;
serverPort = "22";
useBasicAuthentication = false;
afterFtpPut = "do_nothing";
destinationfolder = null;
includeSubFolders = false;
createdestinationfolder = false;
createtargetfolder = false;
cachehostkey = false;
timeout = 0;
}
public JobEntrySSH2GET() {
this( "" );
}
public Object clone() {
JobEntrySSH2GET je = (JobEntrySSH2GET) super.clone();
return je;
}
public String getXML() {
StringBuffer retval = new StringBuffer( 128 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "servername", serverName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "username", userName ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "password", Encr.encryptPasswordIfNotUsingVariables( getPassword() ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "serverport", serverPort ) );
retval.append( " " ).append( XMLHandler.addTagValue( "ftpdirectory", ftpDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "localdirectory", localDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard ) );
retval.append( " " ).append( XMLHandler.addTagValue( "only_new", onlyGettingNewFiles ) );
retval.append( " " ).append( XMLHandler.addTagValue( "usehttpproxy", usehttpproxy ) );
retval.append( " " ).append( XMLHandler.addTagValue( "httpproxyhost", httpProxyHost ) );
retval.append( " " ).append( XMLHandler.addTagValue( "httpproxyport", httpproxyport ) );
retval.append( " " ).append( XMLHandler.addTagValue( "httpproxyusername", httpproxyusername ) );
retval.append( " " ).append( XMLHandler.addTagValue( "httpproxypassword", httpProxyPassword ) );
retval.append( " " ).append( XMLHandler.addTagValue( "publicpublickey", publicpublickey ) );
retval.append( " " ).append( XMLHandler.addTagValue( "keyfilename", keyFilename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "keyfilepass", keyFilePass ) );
retval.append( " " ).append( XMLHandler.addTagValue( "usebasicauthentication", useBasicAuthentication ) );
retval.append( " " ).append( XMLHandler.addTagValue( "afterftpput", afterFtpPut ) );
retval.append( " " ).append( XMLHandler.addTagValue( "destinationfolder", destinationfolder ) );
retval
.append( " " ).append( XMLHandler.addTagValue( "createdestinationfolder", createdestinationfolder ) );
retval.append( " " ).append( XMLHandler.addTagValue( "cachehostkey", cachehostkey ) );
retval.append( " " ).append( XMLHandler.addTagValue( "timeout", timeout ) );
retval.append( " " ).append( XMLHandler.addTagValue( "createtargetfolder", createtargetfolder ) );
retval.append( " " ).append( XMLHandler.addTagValue( "includeSubFolders", includeSubFolders ) );
return retval.toString();
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
serverName = XMLHandler.getTagValue( entrynode, "servername" );
userName = XMLHandler.getTagValue( entrynode, "username" );
password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) );
serverPort = XMLHandler.getTagValue( entrynode, "serverport" );
ftpDirectory = XMLHandler.getTagValue( entrynode, "ftpdirectory" );
localDirectory = XMLHandler.getTagValue( entrynode, "localdirectory" );
wildcard = XMLHandler.getTagValue( entrynode, "wildcard" );
onlyGettingNewFiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "only_new" ) );
usehttpproxy = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usehttpproxy" ) );
httpProxyHost = XMLHandler.getTagValue( entrynode, "httpproxyhost" );
httpproxyport = XMLHandler.getTagValue( entrynode, "httpproxyport" );
httpproxyusername = XMLHandler.getTagValue( entrynode, "httpproxyusername" );
httpProxyPassword = XMLHandler.getTagValue( entrynode, "httpproxypassword" );
publicpublickey = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "publicpublickey" ) );
keyFilename = XMLHandler.getTagValue( entrynode, "keyfilename" );
keyFilePass = XMLHandler.getTagValue( entrynode, "keyfilepass" );
useBasicAuthentication =
"Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usebasicauthentication" ) );
afterFtpPut = XMLHandler.getTagValue( entrynode, "afterftpput" );
destinationfolder = XMLHandler.getTagValue( entrynode, "destinationfolder" );
createdestinationfolder =
"Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createdestinationfolder" ) );
cachehostkey = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "cachehostkey" ) );
timeout = Const.toInt( XMLHandler.getTagValue( entrynode, "timeout" ), 0 );
createtargetfolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createtargetfolder" ) );
includeSubFolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "includeSubFolders" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException( BaseMessages.getString( PKG, "JobSSH2GET.Log.UnableLoadXML", xe.getMessage() ) );
}
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" );
userName = rep.getJobEntryAttributeString( id_jobentry, "username" );
password =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );
serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" );
ftpDirectory = rep.getJobEntryAttributeString( id_jobentry, "ftpdirectory" );
localDirectory = rep.getJobEntryAttributeString( id_jobentry, "localdirectory" );
wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
onlyGettingNewFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "only_new" );
usehttpproxy = rep.getJobEntryAttributeBoolean( id_jobentry, "usehttpproxy" );
httpProxyHost = rep.getJobEntryAttributeString( id_jobentry, "httpproxyhost" );
httpproxyusername = rep.getJobEntryAttributeString( id_jobentry, "httpproxyusername" );
httpProxyPassword = rep.getJobEntryAttributeString( id_jobentry, "httpproxypassword" );
publicpublickey = rep.getJobEntryAttributeBoolean( id_jobentry, "publicpublickey" );
keyFilename = rep.getJobEntryAttributeString( id_jobentry, "keyfilename" );
keyFilePass = rep.getJobEntryAttributeString( id_jobentry, "keyfilepass" );
useBasicAuthentication = rep.getJobEntryAttributeBoolean( id_jobentry, "usebasicauthentication" );
afterFtpPut = rep.getJobEntryAttributeString( id_jobentry, "afterftpput" );
destinationfolder = rep.getJobEntryAttributeString( id_jobentry, "destinationfolder" );
createdestinationfolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createdestinationfolder" );
cachehostkey = rep.getJobEntryAttributeBoolean( id_jobentry, "cachehostkey" );
timeout = (int) rep.getJobEntryAttributeInteger( id_jobentry, "timeout" );
createtargetfolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createtargetfolder" );
includeSubFolders = rep.getJobEntryAttributeBoolean( id_jobentry, "includeSubFolders" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobSSH2GET.Log.UnableLoadRep", "" + id_jobentry, dbe.getMessage() ) );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "servername", serverName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "username", userName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr
.encryptPasswordIfNotUsingVariables( password ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "serverport", serverPort );
rep.saveJobEntryAttribute( id_job, getObjectId(), "ftpdirectory", ftpDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "localdirectory", localDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
rep.saveJobEntryAttribute( id_job, getObjectId(), "only_new", onlyGettingNewFiles );
rep.saveJobEntryAttribute( id_job, getObjectId(), "usehttpproxy", usehttpproxy );
rep.saveJobEntryAttribute( id_job, getObjectId(), "httpproxyhost", httpProxyHost );
rep.saveJobEntryAttribute( id_job, getObjectId(), "httpproxyport", httpproxyport );
rep.saveJobEntryAttribute( id_job, getObjectId(), "httpproxyusername", httpproxyusername );
rep.saveJobEntryAttribute( id_job, getObjectId(), "httpproxypassword", httpProxyPassword );
rep.saveJobEntryAttribute( id_job, getObjectId(), "publicpublickey", publicpublickey );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilename", keyFilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilepass", keyFilePass );
rep.saveJobEntryAttribute( id_job, getObjectId(), "usebasicauthentication", useBasicAuthentication );
rep.saveJobEntryAttribute( id_job, getObjectId(), "afterftpput", afterFtpPut );
rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationfolder", destinationfolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createdestinationfolder", createdestinationfolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "cachehostkey", cachehostkey );
rep.saveJobEntryAttribute( id_job, getObjectId(), "timeout", timeout );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createtargetfolder", createtargetfolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "includeSubFolders", includeSubFolders );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobSSH2GET.Log.UnableSaveRep", "" + id_job, dbe
.getMessage() ) );
}
}
/**
* @return Returns the directory.
*/
public String getFtpDirectory() {
return ftpDirectory;
}
/**
* @param directory
* The directory to set.
*/
public void setFtpDirectory( String directory ) {
this.ftpDirectory = directory;
}
/**
* @return Returns the password.
*/
public String getPassword() {
return password;
}
/**
* @param password
* The password to set.
*/
public void setPassword( String password ) {
this.password = password;
}
/**
* @return Returns the afterftpput.
*/
public String getAfterFTPPut() {
return afterFtpPut;
}
/**
* @param afterFtpPut
* The action after (FTP/SSH) transfer to execute
*/
public void setAfterFTPPut( String afterFtpPut ) {
this.afterFtpPut = afterFtpPut;
}
/**
* @param proxyPassword
* The httpproxypassword to set.
*/
public void setHTTPProxyPassword( String proxyPassword ) {
this.httpProxyPassword = proxyPassword;
}
/**
* @return Returns the password.
*/
public String getHTTPProxyPassword() {
return httpProxyPassword;
}
/**
* @param keyFilePass
* The key file pass to set.
*/
public void setKeyFilePass( String keyFilePass ) {
this.keyFilePass = keyFilePass;
}
/**
* @return Returns the key file pass.
*/
public String getKeyFilePass() {
return keyFilePass;
}
/**
* @return Returns the serverName.
*/
public String getServerName() {
return serverName;
}
/**
* @param serverName
* The serverName to set.
*/
public void setServerName( String serverName ) {
this.serverName = serverName;
}
/**
* @param proxyhost
* The httpproxyhost to set.
*/
public void setHTTPProxyHost( String proxyhost ) {
this.httpProxyHost = proxyhost;
}
/**
* @return Returns the HTTP proxy host.
*/
public String getHTTPProxyHost() {
return httpProxyHost;
}
/**
* @param keyfilename
* The key filename to set.
*/
public void setKeyFilename( String keyfilename ) {
this.keyFilename = keyfilename;
}
/**
* @return Returns the key filename.
*/
public String getKeyFilename() {
return keyFilename;
}
/**
* @return Returns the userName.
*/
public String getUserName() {
return userName;
}
/**
* @param userName
* The userName to set.
*/
public void setUserName( String userName ) {
this.userName = userName;
}
/**
* @param proxyusername
* The httpproxyusername to set.
*/
public void setHTTPProxyUsername( String proxyusername ) {
this.httpproxyusername = proxyusername;
}
/**
* @return Returns the userName.
*/
public String getHTTPProxyUsername() {
return httpproxyusername;
}
/**
* @return Returns the wildcard.
*/
public String getWildcard() {
return wildcard;
}
/**
* @param wildcard
* The wildcard to set.
*/
public void setWildcard( String wildcard ) {
this.wildcard = wildcard;
}
/**
* @return Returns the localDirectory.
*/
public String getlocalDirectory() {
return localDirectory;
}
/**
* @param localDirectory
* The localDirectory to set.
*/
public void setlocalDirectory( String localDirectory ) {
this.localDirectory = localDirectory;
}
/**
* @return Returns the onlyGettingNewFiles.
*/
public boolean isOnlyGettingNewFiles() {
return onlyGettingNewFiles;
}
/**
* @param onlyGettingNewFiles
* The onlyGettingNewFiles to set.
*/
public void setOnlyGettingNewFiles( boolean onlyGettingNewFiles ) {
this.onlyGettingNewFiles = onlyGettingNewFiles;
}
/**
* @param cachehostkeyin
* The cachehostkey to set.
*/
public void setCacheHostKey( boolean cachehostkeyin ) {
this.cachehostkey = cachehostkeyin;
}
/**
* @return Returns the cachehostkey.
*/
public boolean isCacheHostKey() {
return cachehostkey;
}
/**
* @param httpproxy
* The usehttpproxy to set.
*/
public void setUseHTTPProxy( boolean httpproxy ) {
this.usehttpproxy = httpproxy;
}
/**
* @return Returns the usehttpproxy.
*/
public boolean isUseHTTPProxy() {
return usehttpproxy;
}
/**
* @return Returns the use basic authentication flag.
*/
public boolean isUseBasicAuthentication() {
return useBasicAuthentication;
}
/**
* @param useBasicAuthentication
* The use basic authentication flag to set.
*/
public void setUseBasicAuthentication( boolean useBasicAuthentication ) {
this.useBasicAuthentication = useBasicAuthentication;
}
/**
* @param includeSubFolders
* The include sub folders flag to set.
*/
public void setIncludeSubFolders( boolean includeSubFolders ) {
this.includeSubFolders = includeSubFolders;
}
/**
* @return Returns the include sub folders flag.
*/
public boolean isIncludeSubFolders() {
return includeSubFolders;
}
/**
* @param createdestinationfolderin
* The createdestinationfolder to set.
*/
public void setCreateDestinationFolder( boolean createdestinationfolderin ) {
this.createdestinationfolder = createdestinationfolderin;
}
/**
* @return Returns the createdestinationfolder.
*/
public boolean isCreateDestinationFolder() {
return createdestinationfolder;
}
/**
* @return Returns the CreateTargetFolder.
*/
public boolean isCreateTargetFolder() {
return createtargetfolder;
}
/**
* @param createtargetfolderin
* The createtargetfolder to set.
*/
public void setCreateTargetFolder( boolean createtargetfolderin ) {
this.createtargetfolder = createtargetfolderin;
}
/**
* @param publickey
* The publicpublickey to set.
*/
public void setUsePublicKey( boolean publickey ) {
this.publicpublickey = publickey;
}
/**
* @return Returns the usehttpproxy.
*/
public boolean isUsePublicKey() {
return publicpublickey;
}
public String getServerPort() {
return serverPort;
}
public void setServerPort( String serverPort ) {
this.serverPort = serverPort;
}
public void setHTTPProxyPort( String proxyport ) {
this.httpproxyport = proxyport;
}
public String getHTTPProxyPort() {
return httpproxyport;
}
public void setDestinationFolder( String destinationfolderin ) {
this.destinationfolder = destinationfolderin;
}
public String getDestinationFolder() {
return destinationfolder;
}
/**
* @param timeout
* The timeout to set.
*/
public void setTimeout( int timeout ) {
this.timeout = timeout;
}
/**
* @return Returns the timeout.
*/
public int getTimeout() {
return timeout;
}
public Result execute( Result previousResult, int nr ) {
Result result = previousResult;
result.setResult( false );
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "JobSSH2GET.Log.GettingFieldsValue" ) );
}
// Get real variable value
String realServerName = environmentSubstitute( serverName );
int realServerPort = Const.toInt( environmentSubstitute( serverPort ), 22 );
String realUserName = environmentSubstitute( userName );
String realServerPassword = Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( password ) );
// Proxy Host
String realProxyHost = environmentSubstitute( httpProxyHost );
int realProxyPort = Const.toInt( environmentSubstitute( httpproxyport ), 22 );
String realproxyUserName = environmentSubstitute( httpproxyusername );
String realProxyPassword =
Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( httpProxyPassword ) );
// Key file
String realKeyFilename = environmentSubstitute( keyFilename );
String relKeyFilepass = environmentSubstitute( keyFilePass );
// target files
String realLocalDirectory = environmentSubstitute( localDirectory );
String realwildcard = environmentSubstitute( wildcard );
// Remote source
String realftpDirectory = environmentSubstitute( ftpDirectory );
// Destination folder (Move to)
String realDestinationFolder = environmentSubstitute( destinationfolder );
try {
// Remote source
realftpDirectory = FTPUtils.normalizePath( realftpDirectory );
// Destination folder (Move to)
realDestinationFolder = FTPUtils.normalizePath( realDestinationFolder );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.CanNotNormalizePath", e.getMessage() ) );
result.setNrErrors( 1 );
return result;
}
// Check for mandatory fields
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "JobSSH2GET.Log.CheckingMandatoryFields" ) );
}
boolean mandatoryok = true;
if ( Const.isEmpty( realServerName ) ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.ServernameMissing" ) );
}
if ( usehttpproxy ) {
if ( Const.isEmpty( realProxyHost ) ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.HttpProxyhostMissing" ) );
}
}
if ( publicpublickey ) {
if ( Const.isEmpty( realKeyFilename ) ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.KeyFileMissing" ) );
} else {
// Let's check if key file exists...
if ( !new File( realKeyFilename ).exists() ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.KeyFileNotExist" ) );
}
}
}
if ( Const.isEmpty( realLocalDirectory ) ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.LocalFolderMissing" ) );
} else {
// Check if target folder exists...
if ( !new File( realLocalDirectory ).exists() ) {
if ( createtargetfolder ) {
// Create Target folder
if ( !CreateFolder( realLocalDirectory ) ) {
mandatoryok = false;
}
} else {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.LocalFolderNotExists", realLocalDirectory ) );
}
} else {
if ( !new File( realLocalDirectory ).isDirectory() ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.LocalFolderNotFolder", realLocalDirectory ) );
}
}
}
if ( afterFtpPut.equals( "move_file" ) ) {
if ( Const.isEmpty( realDestinationFolder ) ) {
mandatoryok = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.DestinatFolderMissing" ) );
}
}
if ( mandatoryok ) {
Connection conn = null;
SFTPv3Client client = null;
boolean good = true;
try {
// Create a connection instance
conn =
getConnection(
realServerName, realServerPort, realProxyHost, realProxyPort, realproxyUserName, realProxyPassword );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.ConnectionInstanceCreated" ) );
}
if ( timeout > 0 ) {
// Use timeout
// Cache Host Key
if ( cachehostkey ) {
conn.connect( new SimpleVerifier( database ), 0, timeout * 1000 );
} else {
conn.connect( null, 0, timeout * 1000 );
}
} else {
// Cache Host Key
if ( cachehostkey ) {
conn.connect( new SimpleVerifier( database ) );
} else {
conn.connect();
}
}
// Authenticate
boolean isAuthenticated = false;
if ( publicpublickey ) {
isAuthenticated =
conn.authenticateWithPublicKey( realUserName, new File( realKeyFilename ), relKeyFilepass );
} else {
isAuthenticated = conn.authenticateWithPassword( realUserName, realServerPassword );
}
// LET'S CHECK AUTHENTICATION ...
if ( isAuthenticated == false ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.AuthenticationFailed" ) );
} else {
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobSSH2GET.Log.Connected", serverName, userName ) );
}
client = new SFTPv3Client( conn );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.ProtocolVersion", ""
+ client.getProtocolVersion() ) );
}
// Check if ftp (source) directory exists
if ( !Const.isEmpty( realftpDirectory ) ) {
if ( !sshDirectoryExists( client, realftpDirectory ) ) {
good = false;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.RemoteDirectoryNotExist", realftpDirectory ) );
} else if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.RemoteDirectoryExist", realftpDirectory ) );
}
}
if ( realDestinationFolder != null ) {
// Check now destination folder
if ( !sshDirectoryExists( client, realDestinationFolder ) ) {
if ( createdestinationfolder ) {
if ( !CreateRemoteFolder( client, realDestinationFolder ) ) {
good = false;
}
} else {
good = false;
logError( BaseMessages.getString(
PKG, "JobSSH2GET.Log.DestinatFolderNotExist", realDestinationFolder ) );
}
}
}
if ( good ) {
Pattern pattern = null;
if ( !Const.isEmpty( realwildcard ) ) {
pattern = Pattern.compile( realwildcard );
}
if ( includeSubFolders ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.RecursiveModeOn" ) );
}
copyRecursive( realftpDirectory, realLocalDirectory, client, pattern, parentJob );
} else {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.RecursiveModeOff" ) );
}
GetFiles( realftpDirectory, realLocalDirectory, client, pattern, parentJob );
}
/******************************** RESULT ********************/
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.Result.JobEntryEnd1" ) );
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.Result.TotalFiles", "" + nbfilestoget ) );
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.Result.TotalFilesPut", "" + nbgot ) );
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.Result.TotalFilesError", "" + nbrerror ) );
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.Result.JobEntryEnd2" ) );
}
if ( nbrerror == 0 ) {
result.setResult( true );
/******************************** RESULT ********************/
}
}
}
} catch ( Exception e ) {
result.setNrErrors( nbrerror );
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.ErrorFTP", e.getMessage() ) );
} finally {
if ( conn != null ) {
conn.close();
}
if ( client != null ) {
client.close();
}
}
}
return result;
}
private Connection getConnection( String servername, int serverport, String proxyhost, int proxyport,
String proxyusername, String proxypassword ) {
/* Create a connection instance */
Connection conn = new Connection( servername, serverport );
/* We want to connect through a HTTP proxy */
if ( usehttpproxy ) {
conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) );
/* Now connect */
// if the proxy requires basic authentication:
if ( useBasicAuthentication ) {
conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) );
}
}
return conn;
}
/**
* Check existence of a file
*
* @param sftpClient
* @param filename
* @return true, if file exists
* @throws Exception
*/
public boolean sshFileExists( SFTPv3Client sftpClient, String filename ) {
try {
SFTPv3FileAttributes attributes = sftpClient.stat( filename );
if ( attributes != null ) {
return ( attributes.isRegularFile() );
} else {
return false;
}
} catch ( Exception e ) {
return false;
}
}
/**
* Check existence of a local file
*
* @param filename
* @return true, if file exists
*/
public boolean FileExists( String filename ) {
FileObject file = null;
try {
file = KettleVFS.getFileObject( filename, this );
if ( !file.exists() ) {
return false;
} else {
if ( file.getType() == FileType.FILE ) {
return true;
} else {
return false;
}
}
} catch ( Exception e ) {
return false;
}
}
/**
* Checks if file is a directory
*
* @param sftpClient
* @param filename
* @return true, if filename is a directory
*/
public boolean isDirectory( SFTPv3Client sftpClient, String filename ) {
try {
return sftpClient.stat( filename ).isDirectory();
} catch ( Exception e ) {
// Ignore errors
}
return false;
}
/**
* Checks if a directory exists
*
* @param sftpClient
* @param directory
* @return true, if directory exists
*/
public boolean sshDirectoryExists( SFTPv3Client sftpClient, String directory ) {
try {
SFTPv3FileAttributes attributes = sftpClient.stat( directory );
if ( attributes != null ) {
return ( attributes.isDirectory() );
} else {
return false;
}
} catch ( Exception e ) {
return false;
}
}
/**
* Returns the file size of a file
*
* @param sftpClient
* @param filename
* @return the size of the file
* @throws Exception
*/
public long getFileSize( SFTPv3Client sftpClient, String filename ) throws Exception {
return sftpClient.stat( filename ).size.longValue();
}
/**********************************************************
*
* @param selectedfile
* @param wildcard
* @param pattern
* @return True if the selectedfile matches the wildcard
**********************************************************/
private boolean getFileWildcard( String selectedfile, Pattern pattern ) {
boolean getIt = true;
// First see if the file matches the regular expression!
if ( pattern != null ) {
Matcher matcher = pattern.matcher( selectedfile );
getIt = matcher.matches();
}
return getIt;
}
private boolean deleteOrMoveFiles( SFTPv3Client sftpClient, String filename, String destinationFolder ) {
boolean retval = false;
// Delete the file if this is needed!
if ( afterFtpPut.equals( "delete_file" ) ) {
try {
sftpClient.rm( filename );
retval = true;
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.DeletedFile", filename ) );
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.CanNotDeleteRemoteFile", filename ), e );
}
} else if ( afterFtpPut.equals( "move_file" ) ) {
String DestinationFullFilename = destinationFolder + Const.FILE_SEPARATOR + filename;
try {
sftpClient.mv( filename, DestinationFullFilename );
retval = true;
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.DeletedFile", filename ) );
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.MovedFile", filename, destinationFolder ), e );
}
}
return retval;
}
/**
* copy a directory from the remote host to the local one.
*
* @param sourceLocation
* the source directory on the remote host
* @param targetLocation
* the target directory on the local host
* @param sftpClient
* is an instance of SFTPv3Client that makes SFTP client connection over SSH-2
* @return the number of files successfully copied
* @throws Exception
*/
@SuppressWarnings( "unchecked" )
private void GetFiles( String sourceLocation, String targetLocation, SFTPv3Client sftpClient, Pattern pattern,
Job parentJob ) throws Exception {
String sourceFolder = ".";
if ( !Const.isEmpty( sourceLocation ) ) {
sourceFolder = sourceLocation + FTPUtils.FILE_SEPARATOR;
} else {
sourceFolder += FTPUtils.FILE_SEPARATOR;
}
Vector<SFTPv3DirectoryEntry> filelist = sftpClient.ls( sourceFolder );
if ( filelist != null ) {
Iterator<SFTPv3DirectoryEntry> iterator = filelist.iterator();
while ( iterator.hasNext() && !parentJob.isStopped() ) {
SFTPv3DirectoryEntry dirEntry = iterator.next();
if ( dirEntry == null ) {
continue;
}
if ( dirEntry.filename.equals( "." )
|| dirEntry.filename.equals( ".." ) || isDirectory( sftpClient, sourceFolder + dirEntry.filename ) ) {
continue;
}
if ( getFileWildcard( dirEntry.filename, pattern ) ) {
// Copy file from remote host
copyFile(
sourceFolder + dirEntry.filename, targetLocation + FTPUtils.FILE_SEPARATOR + dirEntry.filename,
sftpClient );
}
}
}
}
/**
* copy a directory from the remote host to the local one recursivly.
*
* @param sourceLocation
* the source directory on the remote host
* @param targetLocation
* the target directory on the local host
* @param sftpClient
* is an instance of SFTPv3Client that makes SFTP client connection over SSH-2
* @return the number of files successfully copied
* @throws Exception
*/
private void copyRecursive( String sourceLocation, String targetLocation, SFTPv3Client sftpClient,
Pattern pattern, Job parentJob ) throws Exception {
String sourceFolder = "." + FTPUtils.FILE_SEPARATOR;
if ( sourceLocation != null ) {
sourceFolder = sourceLocation;
}
if ( this.isDirectory( sftpClient, sourceFolder ) ) {
Vector<?> filelist = sftpClient.ls( sourceFolder );
Iterator<?> iterator = filelist.iterator();
while ( iterator.hasNext() ) {
SFTPv3DirectoryEntry dirEntry = (SFTPv3DirectoryEntry) iterator.next();
if ( dirEntry == null ) {
continue;
}
if ( dirEntry.filename.equals( "." ) || dirEntry.filename.equals( ".." ) ) {
continue;
}
copyRecursive( sourceFolder + FTPUtils.FILE_SEPARATOR + dirEntry.filename, targetLocation
+ Const.FILE_SEPARATOR + dirEntry.filename, sftpClient, pattern, parentJob );
}
} else if ( isFile( sftpClient, sourceFolder ) ) {
if ( getFileWildcard( sourceFolder, pattern ) ) {
copyFile( sourceFolder, targetLocation, sftpClient );
}
}
}
/**
* Checks if file is a file
*
* @param sftpClient
* @param filename
* @return true, if filename is a directory
*/
public boolean isFile( SFTPv3Client sftpClient, String filename ) {
try {
return sftpClient.stat( filename ).isRegularFile();
} catch ( Exception e ) {
// Ignore errors
}
return false;
}
/**
*
* @param sourceLocation
* @param targetLocation
* @param sftpClient
* @return
*/
private void copyFile( String sourceLocation, String targetLocation, SFTPv3Client sftpClient ) {
SFTPv3FileHandle sftpFileHandle = null;
FileOutputStream fos = null;
File transferFile = null;
long remoteFileSize = -1;
boolean filecopied = true;
try {
transferFile = new File( targetLocation );
if ( ( onlyGettingNewFiles == false )
|| ( onlyGettingNewFiles == true ) && !FileExists( transferFile.getAbsolutePath() ) ) {
new File( transferFile.getParent() ).mkdirs();
remoteFileSize = this.getFileSize( sftpClient, sourceLocation );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.ReceivingFile", sourceLocation, transferFile
.getAbsolutePath(), "" + remoteFileSize ) );
}
sftpFileHandle = sftpClient.openFileRO( sourceLocation );
fos = null;
long offset = 0;
fos = new FileOutputStream( transferFile );
byte[] buffer = new byte[2048];
while ( true ) {
int len = sftpClient.read( sftpFileHandle, offset, buffer, 0, buffer.length );
if ( len <= 0 ) {
break;
}
fos.write( buffer, 0, len );
offset += len;
}
fos.flush();
fos.close();
fos = null;
nbfilestoget++;
if ( remoteFileSize > 0 && remoteFileSize != transferFile.length() ) {
filecopied = false;
nbrerror++;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.RemoteFileLocalDifferent", ""
+ remoteFileSize, transferFile.length() + "", "" + offset ) );
} else {
nbgot++;
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobSSH2GET.Log.RemoteFileLocalCopied", sourceLocation, transferFile + "" ) );
}
}
}
// Let's now delete or move file if needed...
if ( filecopied && !afterFtpPut.equals( "do_nothing" ) ) {
deleteOrMoveFiles( sftpClient, sourceLocation, environmentSubstitute( destinationfolder ) );
}
} catch ( Exception e ) {
nbrerror++;
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.WritingFile", transferFile.getAbsolutePath(), e
.getMessage() ) );
} finally {
try {
if ( sftpFileHandle != null ) {
sftpClient.closeFile( sftpFileHandle );
sftpFileHandle = null;
}
if ( fos != null ) {
try {
fos.close();
fos = null;
} catch ( Exception ex ) {
// Ignore errors
}
}
} catch ( Exception e ) {
// Ignore errors
}
}
}
private boolean CreateFolder( String filefolder ) {
FileObject folder = null;
try {
folder = KettleVFS.getFileObject( filefolder, this );
if ( !folder.exists() ) {
if ( createtargetfolder ) {
folder.createFolder();
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.FolderCreated", folder.toString() ) );
}
} else {
return false;
}
}
return true;
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.CanNotCreateFolder", folder.toString() ), e );
} finally {
if ( folder != null ) {
try {
folder.close();
} catch ( Exception ex ) { /* Ignore */
}
}
}
return false;
}
/**
* Create remote folder
*
* @param sftpClient
* @param foldername
* @return true, if foldername is created
*/
private boolean CreateRemoteFolder( SFTPv3Client sftpClient, String foldername ) {
boolean retval = false;
if ( !sshDirectoryExists( sftpClient, foldername ) ) {
try {
sftpClient.mkdir( foldername, 0700 );
retval = true;
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSSH2GET.Log.RemoteFolderCreated", foldername ) );
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSSH2GET.Log.Error.CreatingRemoteFolder", foldername ), e );
}
}
return retval;
}
public boolean evaluates() {
return true;
}
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) {
List<ResourceReference> references = super.getResourceDependencies( jobMeta );
if ( !Const.isEmpty( serverName ) ) {
String realServerName = jobMeta.environmentSubstitute( serverName );
ResourceReference reference = new ResourceReference( this );
reference.getEntries().add( new ResourceEntry( realServerName, ResourceType.SERVER ) );
references.add( reference );
}
return references;
}
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
andValidator().validate( this, "serverName", remarks, putValidators( notBlankValidator() ) );
andValidator().validate(
this, "localDirectory", remarks, putValidators( notBlankValidator(), fileExistsValidator() ) );
andValidator().validate( this, "userName", remarks, putValidators( notBlankValidator() ) );
andValidator().validate( this, "password", remarks, putValidators( notNullValidator() ) );
andValidator().validate( this, "serverPort", remarks, putValidators( integerValidator() ) );
}
}
| 1.140625 | 1 |
retro/throttling/src/main/java/edu/brown/cs/systems/retro/throttling/ratelimiters/WrappedGoogleRateLimiter.java | forivall-mirrors/tracing-framework | 82 | 17 | package edu.brown.cs.systems.retro.throttling.ratelimiters;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Small extension to google rate limiter using a fair lock It ensures that when
* the rate is updated, at most one request observes the old rate. With google's
* rate limiter, requests observe the rate from when they were enqueued, so
* there is lag.
*
* @author a-jomace
*
*/
public class WrappedGoogleRateLimiter {
// Fair lock - provides ordering
private Lock lock = new ReentrantLock(true);
private final com.google.common.util.concurrent.RateLimiter r;
public WrappedGoogleRateLimiter() {
this(Double.MAX_VALUE);
}
public WrappedGoogleRateLimiter(double permitsPerSecond) {
this.r = com.google.common.util.concurrent.RateLimiter.create(permitsPerSecond);
}
public void setRate(double permitsPerSecond) {
this.r.setRate(permitsPerSecond);
}
public boolean tryAcquire() {
return tryAcquire(1);
}
public boolean tryAcquire(int permits) {
if (!lock.tryLock())
return false;
try {
return this.r.tryAcquire(permits);
} finally {
lock.unlock();
}
}
public double acquire() {
return acquire(1);
}
public double acquire(int permits) {
lock.lock();
try {
return this.r.acquire(permits);
} finally {
lock.unlock();
}
}
}
| 1.695313 | 2 |
snow-common/src/main/java/com/stylesmile/common/util/ReturnCode.java | stylesmile/snow | 4 | 25 | package com.stylesmile.common.util;
/**
* 数据返回码
* 0 : 成功
* @author : chenye
*/
public enum ReturnCode {
NOT_LOGIN("401","未登录"),
SUCCESS ("200","成功"),
FAIL ("500","内部失败"),
ACCESS_ERROR ("403","禁止访问"),
NOT_FOUND ("404","页面未发现");
private String code;
private String desc;
ReturnCode(String code, String desc) {
this.code = code;
this.desc = desc;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
}
| 0.933594 | 1 |
com/planet_ink/coffee_mud/Abilities/Druid/Chant_PoisonousVine.java | random-mud-pie/CoffeeMud | 1 | 33 | package com.planet_ink.coffee_mud.Abilities.Druid;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2019 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Chant_PoisonousVine extends Chant_SummonVine
{
@Override
public String ID()
{
return "Chant_PoisonousVine";
}
private final static String localizedName = CMLib.lang().L("Poisonous Vine");
@Override
public String name()
{
return localizedName;
}
private final static String localizedStaticDisplay = CMLib.lang().L("(Poisonous Vine)");
@Override
public String displayText()
{
return localizedStaticDisplay;
}
@Override
public int classificationCode()
{
return Ability.ACODE_CHANT|Ability.DOMAIN_PLANTCONTROL;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_BENEFICIAL_SELF;
}
@Override
public int enchantQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return 0;
}
@Override
public MOB determineMonster(final MOB caster, final int material)
{
final MOB victim=caster.getVictim();
final MOB newMOB=CMClass.getMOB("GenMOB");
int level=adjustedLevel(caster,0);
if(level<1)
level=1;
newMOB.basePhyStats().setLevel(level);
newMOB.basePhyStats().setAbility(13);
newMOB.baseCharStats().setMyRace(CMClass.getRace("Vine"));
final String name="a poisonous vine";
newMOB.setName(name);
newMOB.setDisplayText(L("@x1 looks enraged!",name));
newMOB.setDescription("");
CMLib.factions().setAlignment(newMOB,Faction.Align.NEUTRAL);
Ability A=CMClass.getAbility("Fighter_Rescue");
A.setProficiency(100);
newMOB.addAbility(A);
A=null;
final int classlevel=CMLib.ableMapper().qualifyingClassLevel(caster,this)-CMLib.ableMapper().qualifyingLevel(caster,this);
switch(classlevel/5)
{
case 0:
A = CMClass.getAbility("Poison_Sting");
break;
case 1:
A = CMClass.getAbility("Poison_Bloodboil");
break;
case 2:
A = CMClass.getAbility("Poison_Venom");
break;
default: A=CMClass.getAbility("Poison_Decreptifier"); break;
}
if(A!=null)
{
A.setProficiency(100);
newMOB.addAbility(A);
}
newMOB.addBehavior(CMClass.getBehavior("CombatAbilities"));
newMOB.setVictim(victim);
newMOB.basePhyStats().setSensesMask(newMOB.basePhyStats().sensesMask()|PhyStats.CAN_SEE_DARK);
newMOB.setLocation(caster.location());
newMOB.basePhyStats().setRejuv(PhyStats.NO_REJUV);
newMOB.basePhyStats().setDamage(6+(5*(level/5)));
newMOB.basePhyStats().setAttackAdjustment(10);
newMOB.basePhyStats().setArmor(100-(30+(level/2)));
newMOB.baseCharStats().setStat(CharStats.STAT_GENDER,'N');
newMOB.addNonUninvokableEffect(CMClass.getAbility("Prop_ModExperience"));
newMOB.setMiscText(newMOB.text());
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
newMOB.recoverMaxState();
newMOB.resetToMaxState();
newMOB.bringToLife(caster.location(),true);
CMLib.beanCounter().clearZeroMoney(newMOB,null);
newMOB.setMoneyVariation(0);
newMOB.setStartRoom(null); // keep before postFollow for Conquest
CMLib.commands().postFollow(newMOB,caster,true);
if(newMOB.amFollowing()!=caster)
caster.tell(L("@x1 seems unwilling to follow you.",newMOB.name()));
else
{
if(newMOB.getVictim()!=victim)
newMOB.setVictim(victim);
newMOB.location().showOthers(newMOB,victim,CMMsg.MSG_OK_ACTION,L("<S-NAME> start(s) attacking <T-NAMESELF>!"));
}
return(newMOB);
}
}
| 1.179688 | 1 |
wisdomSite/src/main/java/com/jlkj/common/exception/file/InvalidExtensionException.java | guodingfang/RuoYi-Vue | 1 | 41 | package com.jlkj.common.exception.file;
import java.util.Arrays;
import org.apache.commons.fileupload.FileUploadException;
/**
* 文件上传 误异常类
*
* @author jlkj
*/
public class InvalidExtensionException extends FileUploadException
{
private static final long serialVersionUID = 1L;
private String[] allowedExtension;
private String extension;
private String filename;
public InvalidExtensionException(String[] allowedExtension, String extension, String filename)
{
super("filename : [" + filename + "], extension : [" + extension + "], allowed extension : [" + Arrays.toString(allowedExtension) + "]");
this.allowedExtension = allowedExtension;
this.extension = extension;
this.filename = filename;
}
public String[] getAllowedExtension()
{
return allowedExtension;
}
public String getExtension()
{
return extension;
}
public String getFilename()
{
return filename;
}
public static class InvalidImageExtensionException extends InvalidExtensionException
{
private static final long serialVersionUID = 1L;
public InvalidImageExtensionException(String[] allowedExtension, String extension, String filename)
{
super(allowedExtension, extension, filename);
}
}
public static class InvalidFlashExtensionException extends InvalidExtensionException
{
private static final long serialVersionUID = 1L;
public InvalidFlashExtensionException(String[] allowedExtension, String extension, String filename)
{
super(allowedExtension, extension, filename);
}
}
public static class InvalidMediaExtensionException extends InvalidExtensionException
{
private static final long serialVersionUID = 1L;
public InvalidMediaExtensionException(String[] allowedExtension, String extension, String filename)
{
super(allowedExtension, extension, filename);
}
}
}
| 1.273438 | 1 |
sabot/kernel/src/main/java/com/dremio/exec/expr/fn/impl/DecimalFunctions.java | techfoxy/dremio-oss | 0 | 49 | /*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.exec.expr.fn.impl;
import java.math.BigDecimal;
import javax.inject.Inject;
import org.apache.arrow.vector.holders.BigIntHolder;
import org.apache.arrow.vector.holders.BitHolder;
import org.apache.arrow.vector.holders.DecimalHolder;
import org.apache.arrow.vector.holders.Float4Holder;
import org.apache.arrow.vector.holders.Float8Holder;
import org.apache.arrow.vector.holders.IntHolder;
import org.apache.arrow.vector.holders.NullableBigIntHolder;
import org.apache.arrow.vector.holders.NullableDecimalHolder;
import org.apache.arrow.vector.holders.NullableFloat8Holder;
import org.apache.arrow.vector.holders.NullableIntHolder;
import org.apache.arrow.vector.holders.NullableVarCharHolder;
import org.apache.arrow.vector.holders.VarCharHolder;
import com.dremio.exec.expr.AggrFunction;
import com.dremio.exec.expr.SimpleFunction;
import com.dremio.exec.expr.annotations.FunctionTemplate;
import com.dremio.exec.expr.annotations.FunctionTemplate.FunctionScope;
import com.dremio.exec.expr.annotations.FunctionTemplate.NullHandling;
import com.dremio.exec.expr.annotations.Output;
import com.dremio.exec.expr.annotations.Param;
import com.dremio.exec.expr.annotations.Workspace;
import com.dremio.exec.expr.fn.FunctionErrorContext;
import com.dremio.exec.expr.fn.FunctionGenerationHelper;
import com.dremio.exec.expr.fn.OutputDerivation;
import io.netty.buffer.ArrowBuf;
public class DecimalFunctions {
public static final String DECIMAL_CAST_NULL_ON_OVERFLOW = "castDECIMALNullOnOverflow";
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castVARCHAR"}, scope = FunctionScope.SIMPLE, nulls= NullHandling.NULL_IF_NULL)
public static class CastDecimalVarChar implements SimpleFunction {
@Param
DecimalHolder in;
@Param
BigIntHolder len;
@Output
VarCharHolder out;
@Inject
ArrowBuf buffer;
@Override
public void setup() {
}
@Override
public void eval() {
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
String istr = bd.toString();
out.start = 0;
out.end = Math.min((int)len.value, istr.length()); // truncate if target type has length smaller than that of input's string
buffer = buffer.reallocIfNeeded(out.end);
out.buffer = buffer;
out.buffer.setBytes(0, istr.substring(0,out.end).getBytes());
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castFLOAT8"}, scope = FunctionScope.SIMPLE, nulls= NullHandling.NULL_IF_NULL)
public static class CastDecimalFloat8 implements SimpleFunction {
@Param
DecimalHolder in;
@Output
Float8Holder out;
@Override
public void setup() {
}
@Override
public void eval() {
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
out.value = bd.doubleValue();
}
}
public static void main(String[] args) {
BigDecimal bd = new BigDecimal("99.0000");
System.out.println(bd.toString());
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.NULL_IF_NULL)
public static class CastVarCharDecimal implements SimpleFunction {
@Param
VarCharHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
String s = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(in.start, in.end, in.buffer);
java.math.BigDecimal bd = new java.math.BigDecimal(s).setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(bd, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {DECIMAL_CAST_NULL_ON_OVERFLOW}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.INTERNAL)
public static class CastVarcharDecimalNullOnOverflow implements SimpleFunction {
@Param
NullableVarCharHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
NullableDecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Workspace
IntHolder expectedSignificantDigits;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
expectedSignificantDigits.value = (int)(precision.value - scale.value);
}
@Override
public void eval() {
out.isSet = in.isSet;
if (in.isSet == 1) {
String s = com.dremio.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(in.start, in.end, in.buffer);
java.math.BigDecimal originalValue = new java.math.BigDecimal(s);
java.math.BigDecimal convertedValue = originalValue.setScale((int) scale.value, java.math.RoundingMode
.HALF_UP);
int significantDigitsConverted = convertedValue.precision() - convertedValue.scale();
if (significantDigitsConverted > expectedSignificantDigits.value) {
out.isSet = 0;
} else {
out.isSet = 1;
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(convertedValue, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {DECIMAL_CAST_NULL_ON_OVERFLOW}, derivation = OutputDerivation.DecimalCast
.class, nulls= NullHandling.INTERNAL)
public static class CastDecimalDecimalNullOnOverflow implements SimpleFunction {
@Param
NullableDecimalHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
NullableDecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
out.isSet = in.isSet;
if (in.isSet == 1) {
int index = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, index, in.scale);
java.math.BigDecimal result = input.setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
boolean overflow = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result,
(int) precision.value);
if (overflow) {
out.isSet = 0;
} else {
out.isSet = 1;
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.NULL_IF_NULL)
public static class CastDecimalDecimal implements SimpleFunction {
@Param
DecimalHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, index, in.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.roundWithPositiveScale(input,
(int) scale.value, java.math.RoundingMode.HALF_UP);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.NULL_IF_NULL)
public static class CastIntDecimal implements SimpleFunction {
@Param
IntHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
java.math.BigDecimal bd = java.math.BigDecimal.valueOf(in.value).setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(bd, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.NULL_IF_NULL)
public static class CastBigIntDecimal implements SimpleFunction {
@Param
BigIntHolder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
java.math.BigDecimal bd = java.math.BigDecimal.valueOf(in.value).setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(bd, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class)
public static class CastFloat4Decimal implements SimpleFunction {
@Param
Float4Holder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
NullableDecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
out.isSet = 1;
java.math.BigDecimal bd = java.math.BigDecimal.valueOf(in.value).setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(bd, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"castDECIMAL"}, derivation = OutputDerivation.DecimalCast.class, nulls= NullHandling.NULL_IF_NULL)
public static class CastFloat8Decimal implements SimpleFunction {
@Param
Float8Holder in;
@Param(constant = true)
BigIntHolder precision;
@Param(constant = true)
BigIntHolder scale;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
java.math.BigDecimal bd = java.math.BigDecimal.valueOf(in.value).setScale((int) scale.value, java.math.RoundingMode.HALF_UP);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(bd, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
out.precision = (int) precision.value;
out.scale = (int) scale.value;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "sum", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalSum implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableFloat8Holder sum;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableFloat8Holder out;
public void setup() {
sum = new NullableFloat8Holder();
sum.isSet = 1;
sum.value = 0;
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
nonNullCount.value = 0;
}
public void add() {
if (in.isSet != 0) {
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
sum.value += bd.doubleValue();
nonNullCount.value++;
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.value = sum.value;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
sum.value = 0;
nonNullCount.value = 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "$sum0", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalSumZero implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableFloat8Holder sum;
@Output NullableFloat8Holder out;
public void setup() {
sum = new NullableFloat8Holder();
sum.isSet = 1;
sum.value = 0;
}
public void add() {
if (in.isSet == 1) {
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
sum.value += bd.doubleValue();
}
}
public void output() {
out.isSet = 1;
out.value = sum.value;
}
public void reset() {
sum.value = 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "min", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalMin implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableFloat8Holder minVal;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableFloat8Holder out;
public void setup() {
minVal = new NullableFloat8Holder();
minVal.isSet = 1;
minVal.value = Double.MAX_VALUE;
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
nonNullCount.value = 0;
}
public void add() {
if (in.isSet != 0) {
nonNullCount.value = 1;
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
double val = bd.doubleValue();
if (val < minVal.value) {
minVal.value = val;
}
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.value = minVal.value;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
minVal.value = 0;
nonNullCount.value = 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "max", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalMax implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableFloat8Holder maxVal;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableFloat8Holder out;
public void setup() {
maxVal = new NullableFloat8Holder();
maxVal.isSet = 1;
maxVal.value = -Double.MAX_VALUE;
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
nonNullCount.value = 0;
}
public void add() {
if (in.isSet != 0) {
nonNullCount.value = 1;
in.start = (in.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal bd = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in.buffer, in.start, in.scale);
double val = bd.doubleValue();
if (val > maxVal.value) {
maxVal.value = val;
}
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.value = maxVal.value;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
maxVal.value = 0;
nonNullCount.value = 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "sum_v2", derivation = OutputDerivation.DecimalAggSum.class,
scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalSumV2 implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableDecimalHolder sum;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableDecimalHolder out;
@Inject ArrowBuf buffer;
public void setup() {
sum = new NullableDecimalHolder();
sum.isSet = 1;
buffer = buffer.reallocIfNeeded(16);
sum.buffer = buffer;
java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0);
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0);
sum.start = 0;
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
nonNullCount.value = 0;
}
public void add() {
if (in.isSet == 1) {
com.dremio.exec.util.DecimalUtils.addSignedDecimalInLittleEndianBytes(sum.buffer, sum.start, in.buffer, in
.start, sum.buffer, sum.start);
nonNullCount.value++;
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.buffer = sum.buffer;
out.start = sum.start;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
nonNullCount.value = 0;
java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0);
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0);
}
}
/**
* Sum0 returns 0 instead of nulls in case all aggregation values
* are null.
*/
@SuppressWarnings("unused")
@FunctionTemplate(name = "$sum0_v2", derivation = OutputDerivation.DecimalAggSum.class,
scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalSumZeroV2 implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableDecimalHolder sum;
@Output NullableDecimalHolder out;
@Inject ArrowBuf buffer;
public void setup() {
sum = new NullableDecimalHolder();
sum.isSet = 1;
buffer = buffer.reallocIfNeeded(16);
sum.buffer = buffer;
sum.start = 0;
java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0);
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0);
}
public void add() {
if (in.isSet == 1) {
com.dremio.exec.util.DecimalUtils.addSignedDecimalInLittleEndianBytes(sum.buffer, sum.start, in.buffer, in
.start, sum.buffer, sum.start);
}
}
public void output() {
out.isSet = 1;
out.buffer = sum.buffer;
out.start = sum.start;
}
public void reset() {
java.math.BigDecimal zero = new java.math.BigDecimal(java.math.BigInteger.ZERO, 0);
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(zero, sum.buffer, 0);
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "min_v2", derivation = OutputDerivation.DecimalAggMinMax.class,
scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalMinV2 implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableDecimalHolder minVal;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableDecimalHolder out;
@Inject ArrowBuf buffer;
public void setup() {
minVal = new NullableDecimalHolder();
minVal.isSet = 1;
minVal.start = 0;
buffer = buffer.reallocIfNeeded(16);
minVal.buffer = buffer;
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MAX_DECIMAL, minVal.buffer, 0);
nonNullCount.value = 0;
}
public void add() {
if (in.isSet != 0) {
nonNullCount.value = 1;
int compare = com.dremio.exec.util.DecimalUtils
.compareSignedDecimalInLittleEndianBytes(in.buffer, in.start, minVal.buffer, 0);
if (compare < 0) {
in.buffer.getBytes(in.start, minVal.buffer, 0 , org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH);
}
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.buffer = minVal.buffer;
out.start = 0;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MAX_DECIMAL, minVal.buffer, 0);
nonNullCount.value = 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "max_v2", derivation = OutputDerivation.DecimalAggMinMax.class,
scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
public static class NullableDecimalMaxV2 implements AggrFunction {
@Param NullableDecimalHolder in;
@Workspace NullableDecimalHolder maxVal;
@Workspace NullableBigIntHolder nonNullCount;
@Output NullableDecimalHolder out;
@Inject ArrowBuf buffer;
public void setup() {
maxVal = new NullableDecimalHolder();
maxVal.isSet = 1;
maxVal.start = 0;
buffer = buffer.reallocIfNeeded(16);
maxVal.buffer = buffer;
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MIN_DECIMAL, maxVal.buffer, 0);
nonNullCount = new NullableBigIntHolder();
nonNullCount.isSet = 1;
nonNullCount.value = 0;
}
public void add() {
if (in.isSet != 0) {
nonNullCount.value = 1;
int compare = com.dremio.exec.util.DecimalUtils
.compareSignedDecimalInLittleEndianBytes(in.buffer, in.start, maxVal.buffer, 0);
if (compare > 0) {
in.buffer.getBytes(in.start, maxVal.buffer, 0 , org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH);
}
}
}
public void output() {
if (nonNullCount.value > 0) {
out.isSet = 1;
out.buffer = maxVal.buffer;
out.start = 0;
} else {
// All values were null. Result should be null too
out.isSet = 0;
}
}
public void reset() {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(com.dremio.exec.util.DecimalUtils.MIN_DECIMAL, maxVal.buffer, 0);
nonNullCount.value = 0;
}
}
/**
* Decimal comparator where null appears last i.e. nulls are considered
* larger than all values.
*/
@FunctionTemplate(name = FunctionGenerationHelper.COMPARE_TO_NULLS_HIGH,
scope = FunctionTemplate.FunctionScope.SIMPLE,
nulls = NullHandling.INTERNAL)
public static class CompareDecimalVsDecimalNullsHigh implements SimpleFunction {
@Param NullableDecimalHolder left;
@Param NullableDecimalHolder right;
@Output
NullableIntHolder out;
public void setup() {}
public void eval() {
out.isSet = 1;
outside:
{
if ( left.isSet == 0 ) {
if ( right.isSet == 0 ) {
out.value = 0;
break outside;
} else {
out.value = 1;
break outside;
}
} else if ( right.isSet == 0 ) {
out.value = -1;
break outside;
}
out.value = com.dremio.exec.util.DecimalUtils.compareSignedDecimalInLittleEndianBytes
(left.buffer, left.start, right.buffer, right.start);
} // outside
}
}
/**
* Decimal comparator where null appears first i.e. nulls are considered
* smaller than all values.
*/
@FunctionTemplate(name = FunctionGenerationHelper.COMPARE_TO_NULLS_LOW,
scope = FunctionTemplate.FunctionScope.SIMPLE,
nulls = NullHandling.INTERNAL)
public static class CompareDecimalVsDecimalNullsLow implements SimpleFunction {
@Param NullableDecimalHolder left;
@Param NullableDecimalHolder right;
@Output
NullableIntHolder out;
public void setup() {}
public void eval() {
out.isSet = 1;
outside:
{
if ( left.isSet == 0 ) {
if ( right.isSet == 0 ) {
out.value = 0;
break outside;
} else {
out.value = -1;
break outside;
}
} else if ( right.isSet == 0 ) {
out.value = 1;
break outside;
}
out.value = com.dremio.exec.util.DecimalUtils.compareSignedDecimalInLittleEndianBytes
(left.buffer, left.start, right.buffer, right.start);
} // outside
}
}
public static BigDecimal checkOverflow(BigDecimal in) {
return in.precision() > 38 ? new BigDecimal(0) : in;
}
public static boolean checkOverflow(BigDecimal in, int precision) {
return in.precision() > precision;
}
public static BigDecimal addOrSubtract(boolean isSubtract, BigDecimal left, BigDecimal right,
int outPrecision, int outScale) {
if (isSubtract) {
right = right.negate();
}
int higherScale = Math.max(left.scale(), right.scale()); // >= outScale
BigDecimal leftScaled = left.setScale(higherScale, BigDecimal.ROUND_UNNECESSARY);
BigDecimal rightScaled = right.setScale(higherScale, BigDecimal.ROUND_UNNECESSARY);
BigDecimal result = leftScaled.add(rightScaled);
if (higherScale > outScale) {
result = result.setScale(outScale, BigDecimal.ROUND_HALF_UP);
}
return checkOverflow(result);
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "add", scope = FunctionScope.SIMPLE, derivation = OutputDerivation.DecimalAdd.class, nulls = NullHandling.NULL_IF_NULL)
public static class AddTwoDecimals implements SimpleFunction {
@Param
DecimalHolder in1;
@Param
DecimalHolder in2;
@Output
DecimalHolder out;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext errorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (in1.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in1.buffer, index, in1.scale);
index = (in2.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(in2.buffer, index, in2.scale);
org.apache.arrow.vector.types.pojo.ArrowType.Decimal resultTypeForOperation = org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.getResultTypeForOperation(org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.OperationType.ADD,
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(in1.precision, in1.scale),
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(in2.precision, in2.scale));
out.precision = resultTypeForOperation.getPrecision();
out.scale = resultTypeForOperation.getScale();
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.addOrSubtract(false, left, right, out.precision, out.scale);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw errorContext.error(e)
.build();
}
out.buffer = buffer;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "subtract", scope = FunctionScope.SIMPLE, derivation = OutputDerivation.DecimalSubtract.class, nulls = NullHandling.NULL_IF_NULL)
public static class SubtractDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
org.apache.arrow.vector.types.pojo.ArrowType.Decimal resultTypeForOperation = org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.getResultTypeForOperation(org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.OperationType.SUBTRACT,
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(leftHolder.precision, leftHolder.scale),
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(rightHolder.precision, rightHolder.scale));
resultHolder.precision = resultTypeForOperation.getPrecision();
resultHolder.scale = resultTypeForOperation.getScale();
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.addOrSubtract(true, left, right, resultHolder.precision, resultHolder.scale);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "multiply", derivation = OutputDerivation.DecimalMultiply.class, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class MultiplyDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
org.apache.arrow.vector.types.pojo.ArrowType.Decimal resultTypeForOperation = org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.getResultTypeForOperation(org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.OperationType.MULTIPLY,
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(leftHolder.precision, leftHolder.scale),
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(rightHolder.precision, rightHolder.scale));
resultHolder.precision = resultTypeForOperation.getPrecision();
resultHolder.scale = resultTypeForOperation.getScale();
java.math.BigDecimal result = left.multiply(right).setScale(resultHolder.scale, java.math.BigDecimal.ROUND_HALF_UP);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "divide", derivation = OutputDerivation.DecimalDivide.class, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class DivideDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
org.apache.arrow.vector.types.pojo.ArrowType.Decimal resultTypeForOperation = org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.getResultTypeForOperation(org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.OperationType.DIVIDE,
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(leftHolder.precision, leftHolder.scale),
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(rightHolder.precision, rightHolder.scale));
resultHolder.precision = resultTypeForOperation.getPrecision();
resultHolder.scale = resultTypeForOperation.getScale();
if (resultHolder.scale > leftHolder.scale - rightHolder.scale) {
left = left.setScale(resultHolder.scale + rightHolder.scale, java.math.BigDecimal.ROUND_UNNECESSARY);
}
java.math.BigInteger leftUnscaled = left.unscaledValue();
java.math.BigInteger rightUnscaled = right.unscaledValue();
java.math.BigInteger[] quotientAndRemainder = leftUnscaled.divideAndRemainder(rightUnscaled);
java.math.BigInteger resultUnscaled = quotientAndRemainder[0];
if (quotientAndRemainder[1].abs().multiply(java.math.BigInteger.valueOf(2)).compareTo
(rightUnscaled.abs()) >= 0) {
resultUnscaled = resultUnscaled.add(java.math.BigInteger.valueOf((leftUnscaled.signum() ^
rightUnscaled.signum()) + 1));
}
java.math.BigDecimal result = new java.math.BigDecimal(resultUnscaled, resultHolder.scale);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"modulo", "mod"}, derivation = OutputDerivation.DecimalMod.class, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class ModuloFunction implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
org.apache.arrow.vector.types.pojo.ArrowType.Decimal resultTypeForOperation = org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.getResultTypeForOperation(org.apache.arrow.gandiva.evaluator.DecimalTypeUtil.OperationType.MOD,
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(leftHolder.precision, leftHolder.scale),
new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(rightHolder.precision, rightHolder.scale));
resultHolder.precision = resultTypeForOperation.getPrecision();
resultHolder.scale = resultTypeForOperation.getScale();
if (leftHolder.scale < rightHolder.scale) {
left = left.setScale(rightHolder.scale, java.math.BigDecimal.ROUND_UNNECESSARY);
} else {
right = right.setScale(leftHolder.scale, java.math.BigDecimal.ROUND_UNNECESSARY);
}
java.math.BigInteger leftUnscaled = left.unscaledValue();
java.math.BigInteger rightUnscaled = right.unscaledValue();
java.math.BigInteger remainder = leftUnscaled.remainder(rightUnscaled);
java.math.BigDecimal result = new java.math.BigDecimal(remainder, resultHolder.scale);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"equal", "==", "="},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class EqualsDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) == 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(
names = {"not_equal", "<>", "!="},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class NotEqualsDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) != 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(
names = {"less_than", "<"},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class LessThanDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) < 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(
names = {"less_than_or_equal_to", "<="},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class LessThanEqDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) <= 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(
names = {"greater_than", ">"},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class GreaterThanDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) > 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(
names = {"greater_than_or_equal_to", ">="},
scope = FunctionScope.SIMPLE,
nulls = NullHandling.NULL_IF_NULL)
public static class GreaterThanEqDecimals implements SimpleFunction {
@Param
DecimalHolder leftHolder;
@Param
DecimalHolder rightHolder;
@Output
BitHolder resultHolder;
@Override
public void setup() {
}
@Override
public void eval() {
int index = (leftHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal left = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(leftHolder.buffer, index, leftHolder.scale);
index = (rightHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal right = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(rightHolder.buffer, index, rightHolder.scale);
resultHolder.value = (left.compareTo(right) >= 0) ? 1 : 0;
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "abs", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalMax.class)
public static class AbsDecimal implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = input.abs();
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
resultHolder.precision = inputHolder.precision;
resultHolder.scale = inputHolder.scale;
}
}
public static java.math.BigDecimal round(BigDecimal input, int scale, java.math.RoundingMode roundingMode) {
if (scale < 0) {
return com.dremio.exec.expr.fn.impl.DecimalFunctions.roundWithNegativeScale(input, scale, roundingMode);
}
return com.dremio.exec.expr.fn.impl.DecimalFunctions.roundWithPositiveScale(input, scale, roundingMode);
}
// scale is negative
private static BigDecimal roundWithNegativeScale(BigDecimal input, int scale, java.math.RoundingMode roundingMode) {
java.math.BigDecimal inputNoFractional = input.setScale(0, roundingMode);
java.math.BigDecimal result = new java.math.BigDecimal(inputNoFractional.unscaledValue()
.subtract(inputNoFractional.unscaledValue().remainder(java.math.BigInteger.TEN.pow(-scale))), scale);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
return result;
}
public static BigDecimal roundWithPositiveScale(BigDecimal input, int scale, java.math
.RoundingMode roundingMode) {
java.math.BigDecimal result = input.setScale(scale, roundingMode);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
return result;
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "round", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalSetScale.class)
public static class RoundDecimalWithScale implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Param(constant = true)
IntHolder scale;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, scale.value, java.math.RoundingMode.HALF_UP);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_SET_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale),
new com.dremio.common.expression.ValueExpressions.IntExpression(scale.value)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "round", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalZeroScale.class)
public static class RoundDecimal implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, 0, java.math.RoundingMode.HALF_UP);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_ZERO_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"truncate", "trunc"}, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalSetScale.class)
public static class TruncateDecimalWithScale implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Param(constant = true)
IntHolder scale;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, scale.value, java.math.RoundingMode.DOWN);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_SET_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale),
new com.dremio.common.expression.ValueExpressions.IntExpression(scale.value)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
@SuppressWarnings("unused")
@FunctionTemplate(names = {"truncate", "trunc"}, scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalZeroScale.class)
public static class TruncateDecimal implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, 0, java.math.RoundingMode.DOWN);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_ZERO_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "ceil", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalZeroScale.class)
public static class CeilDecimal implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, 0, java.math.RoundingMode.CEILING);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_ZERO_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
@SuppressWarnings("unused")
@FunctionTemplate(name = "floor", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL, derivation = OutputDerivation.DecimalZeroScale.class)
public static class FloorDecimal implements SimpleFunction {
@Param
DecimalHolder inputHolder;
@Output
DecimalHolder resultHolder;
@Inject
ArrowBuf buffer;
@Inject
FunctionErrorContext functionErrorContext;
@Override
public void setup() {
buffer = buffer.reallocIfNeeded(16);
}
@Override
public void eval() {
int index = (inputHolder.start / (org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH));
java.math.BigDecimal input = org.apache.arrow.vector.util.DecimalUtility.getBigDecimalFromArrowBuf(inputHolder.buffer, index, inputHolder.scale);
java.math.BigDecimal result = com.dremio.exec.expr.fn.impl.DecimalFunctions.round(input, 0, java.math.RoundingMode.FLOOR);
result = com.dremio.exec.expr.fn.impl.DecimalFunctions.checkOverflow(result);
try {
org.apache.arrow.vector.util.DecimalUtility.writeBigDecimalToArrowBuf(result, buffer, 0);
} catch (RuntimeException e) {
throw functionErrorContext.error(e)
.build();
}
resultHolder.buffer = buffer;
com.dremio.common.expression.CompleteType outputType =
com.dremio.exec.expr.fn.OutputDerivation.DECIMAL_ZERO_SCALE.getOutputType(
com.dremio.common.expression.CompleteType.DECIMAL,
java.util.Arrays.asList(
new com.dremio.common.expression.ValueExpressions.DecimalExpression(input, inputHolder.precision, inputHolder.scale)));
resultHolder.scale = outputType.getScale();
resultHolder.precision = outputType.getPrecision();
}
}
}
| 1.195313 | 1 |
src/main/java/edu/javavt17Second/config/WebConfig.java | intusmortius/javavt17Second | 0 | 57 | package edu.javavt17Second.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.ViewResolver;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.servlet.view.ResourceBundleViewResolver;
@Configuration
@ComponentScan({"edu.javavt17Second"})
@EnableWebMvc
public class WebConfig extends WebMvcConfigurerAdapter {
@Bean
public ViewResolver getViewResolver(){
InternalResourceViewResolver resolver = new InternalResourceViewResolver();
resolver.setPrefix("/pages/");
resolver.setSuffix(".jsp");
resolver.setOrder(2);
return resolver;
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**").addResourceLocations("/resources/");
if (!registry.hasMappingForPattern("/webjars/**")) {
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
}
} | 1.054688 | 1 |
src/main/java/com/facebook/LinkBench/LinkBenchRequest.java | Jeffery-Song/linkbench | 0 | 65 | /*
* Copyright 2012, Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.LinkBench;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import com.facebook.LinkBench.RealDistribution.DistributionType;
import com.facebook.LinkBench.distributions.AccessDistributions;
import com.facebook.LinkBench.distributions.AccessDistributions.AccessDistribution;
import com.facebook.LinkBench.distributions.ID2Chooser;
import com.facebook.LinkBench.distributions.ID2ChooserBase;
import com.facebook.LinkBench.distributions.AliID2Chooser;
import com.facebook.LinkBench.distributions.LogNormalDistribution;
import com.facebook.LinkBench.distributions.ProbabilityDistribution;
import com.facebook.LinkBench.generators.DataGenerator;
import com.facebook.LinkBench.stats.LatencyStats;
import com.facebook.LinkBench.stats.SampledStats;
import com.facebook.LinkBench.util.ClassLoadUtil;
import com.facebook.LinkBench.measurements.Measurements;
public class LinkBenchRequest implements Runnable {
private static long start_time;
// only for ali case.
private static AtomicLong _nodeid;
private final Logger logger = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
Properties props;
LinkStore linkStore;
NodeStore nodeStore;
RequestProgress progressTracker;
boolean use_duration;
int duration;
long numRequests;
/** Requests per second: <= 0 for unlimited rate */
private long requestrate;
/** Maximum number of failed requests: < 0 for unlimited */
private long maxFailedRequests;
/**
* Time to run benchmark for before collecting stats. Allows
* caches, etc to warm up.
*/
private long warmupTime;
/** Maximum time to run benchmark for, not including warmup time */
long maxTime;
int nrequesters;
int requesterID;
long maxid1;
long startid1;
Level debuglevel;
long displayFreq_ms;
long progressFreq_ms;
String dbid;
boolean singleAssoc = false;
// Control data generation settings
private LogNormalDistribution linkDataSize;
private DataGenerator linkAddDataGen;
private DataGenerator linkUpDataGen;
private LogNormalDistribution nodeDataSize;
private DataGenerator nodeAddDataGen;
private DataGenerator nodeUpDataGen;
// cummulative percentages
double pc_addlink;
double pc_deletelink;
double pc_updatelink;
double pc_countlink;
double pc_getlink;
double pc_getlinklist;
double pc_addnode;
double pc_deletenode;
double pc_updatenode;
double pc_getnode;
double pc_ali_login;
double pc_ali_reg;
double pc_ali_pay;
double pc_ali_get_fan;
double pc_ali_get_follow;
double pc_ali_recom;
double pc_ali_follow;
double pc_ali_unfollow;
// Chance of doing historical range query
double p_historical_getlinklist;
boolean is_ali;
private static class HistoryKey {
public final long id1;
public final long link_type;
public HistoryKey(long id1, long link_type) {
super();
this.id1 = id1;
this.link_type = link_type;
}
public HistoryKey(Link l) {
this(l.id1, l.link_type);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id1 ^ (id1 >>> 32));
result = prime * result + (int) (link_type ^ (link_type >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof HistoryKey))
return false;
HistoryKey other = (HistoryKey) obj;
return id1 == other.id1 && link_type == other.link_type;
}
}
// Cache of last link in lists where full list wasn't retrieved
ArrayList<Link> listTailHistory;
// Index of history to avoid duplicates
HashMap<HistoryKey, Integer> listTailHistoryIndex;
// Limit of cache size
private int listTailHistoryLimit;
// Probability distribution for ids in multiget
ProbabilityDistribution multigetDist;
// Statistics
SampledStats stats;
LatencyStats latencyStats;
// Other informational counters
long numfound = 0;
long numnotfound = 0;
long numHistoryQueries = 0;
Measurements _measurements;
/**
* Random number generator use for generating workload. If
* initialized with same seed, should generate same sequence of requests
* so that tests and benchmarks are repeatable.
*/
Random rng;
// Last node id accessed
long lastNodeId;
long requestsDone = 0;
long errors = 0;
boolean aborted;
// Access distributions
private AccessDistribution writeDist; // link writes
private AccessDistribution writeDistUncorr; // to blend with link writes
private double writeDistUncorrBlend; // Percentage to used writeDist2 for
private AccessDistribution readDist; // link reads
private AccessDistribution readDistUncorr; // to blend with link reads
private double readDistUncorrBlend; // Percentage to used readDist2 for
private AccessDistribution nodeReadDist; // node reads
private AccessDistribution nodeUpdateDist; // node writes
private AccessDistribution nodeDeleteDist; // node deletes
private ID2ChooserBase id2chooser;
public LinkBenchRequest(LinkStore linkStore,
NodeStore nodeStore,
Properties props,
LatencyStats latencyStats,
PrintStream csvStreamOut,
RequestProgress progressTracker,
Random rng,
int requesterID,
int nrequesters) {
assert(linkStore != null);
if (requesterID < 0 || requesterID >= nrequesters) {
throw new IllegalArgumentException("Bad requester id "
+ requesterID + "/" + nrequesters);
}
this.linkStore = linkStore;
this.nodeStore = nodeStore;
this.props = props;
this.latencyStats = latencyStats;
this.progressTracker = progressTracker;
this.rng = rng;
this.nrequesters = nrequesters;
this.requesterID = requesterID;
is_ali = ConfigUtil.getBool(props, "is_ali");
debuglevel = ConfigUtil.getDebugLevel(props);
dbid = ConfigUtil.getPropertyRequired(props, Config.DBID);
numRequests = ConfigUtil.getLong(props, Config.NUM_REQUESTS);
use_duration = ConfigUtil.getBool(props, Config.USE_DURATION, true);
duration = ConfigUtil.getInt(props, Config.DURATION, 30);
requestrate = ConfigUtil.getLong(props, Config.REQUEST_RATE, 0L);
maxFailedRequests = ConfigUtil.getLong(props, Config.MAX_FAILED_REQUESTS, 0L);
warmupTime = Math.max(0, ConfigUtil.getLong(props, Config.WARMUP_TIME, 0L));
maxTime = ConfigUtil.getLong(props, Config.MAX_TIME);
maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
startid1 = ConfigUtil.getLong(props, Config.MIN_ID);
// math functions may cause problems for id1 < 1
if (startid1 <= 0) {
throw new LinkBenchConfigError("startid1 must be >= 1");
}
if (maxid1 <= startid1) {
throw new LinkBenchConfigError("maxid1 must be > startid1");
}
// is this a single assoc test?
if (startid1 + 1 == maxid1) {
singleAssoc = true;
logger.info("Testing single row assoc read.");
}
initRequestProbabilities(props);
initLinkDataGeneration(props);
initLinkRequestDistributions(props, requesterID, nrequesters);
if (pc_getnode > pc_getlinklist) {
// Load stuff for node workload if needed
if (nodeStore == null) {
throw new IllegalArgumentException("nodeStore not provided but non-zero " +
"probability of node operation");
}
initNodeDataGeneration(props);
initNodeRequestDistributions(props);
}
displayFreq_ms = ConfigUtil.getLong(props, Config.DISPLAY_FREQ, 60L) * 1000;
progressFreq_ms = ConfigUtil.getLong(props, Config.PROGRESS_FREQ, 6L) * 1000;
int maxsamples = ConfigUtil.getInt(props, Config.MAX_STAT_SAMPLES);
stats = new SampledStats(requesterID, maxsamples, csvStreamOut);
listTailHistoryLimit = 2048; // Hardcoded limit for now
listTailHistory = new ArrayList<Link>(listTailHistoryLimit);
listTailHistoryIndex = new HashMap<HistoryKey, Integer>();
p_historical_getlinklist = ConfigUtil.getDouble(props,
Config.PR_GETLINKLIST_HISTORY, 0.0) / 100;
lastNodeId = startid1;
_measurements=Measurements.getMeasurements();
synchronized(this.getClass()) {
if (_nodeid == null) {
_nodeid = new AtomicLong(maxid1);
}
}
}
private void initRequestProbabilities(Properties props) {
pc_addlink = ConfigUtil.getDouble(props, Config.PR_ADD_LINK);
pc_deletelink = pc_addlink + ConfigUtil.getDouble(props, Config.PR_DELETE_LINK);
pc_updatelink = pc_deletelink + ConfigUtil.getDouble(props, Config.PR_UPDATE_LINK);
pc_countlink = pc_updatelink + ConfigUtil.getDouble(props, Config.PR_COUNT_LINKS);
pc_getlink = pc_countlink + ConfigUtil.getDouble(props, Config.PR_GET_LINK);
pc_getlinklist = pc_getlink + ConfigUtil.getDouble(props, Config.PR_GET_LINK_LIST);
pc_addnode = pc_getlinklist + ConfigUtil.getDouble(props, Config.PR_ADD_NODE, 0.0);
pc_updatenode = pc_addnode + ConfigUtil.getDouble(props, Config.PR_UPDATE_NODE, 0.0);
pc_deletenode = pc_updatenode + ConfigUtil.getDouble(props, Config.PR_DELETE_NODE, 0.0);
pc_getnode = pc_deletenode + ConfigUtil.getDouble(props, Config.PR_GET_NODE, 0.0);
pc_ali_login = pc_getnode + ConfigUtil.getDouble(props, Config.PR_ALI_LOGIN, 0.0);
pc_ali_reg = pc_ali_login + ConfigUtil.getDouble(props, Config.PR_ALI_REG, 0.0);
pc_ali_pay = pc_ali_reg + ConfigUtil.getDouble(props, Config.PR_ALI_PAY, 0.0);
pc_ali_get_fan = pc_ali_pay + ConfigUtil.getDouble(props, Config.PR_ALI_GET_FAN, 0.0);
pc_ali_get_follow = pc_ali_get_fan + ConfigUtil.getDouble(props, Config.PR_ALI_GET_FOLLOW, 0.0);
pc_ali_recom = pc_ali_get_follow + ConfigUtil.getDouble(props, Config.PR_ALI_RECOM, 0.0);
pc_ali_follow = pc_ali_recom + ConfigUtil.getDouble(props, Config.PR_ALI_FOLLOW, 0.0);
pc_ali_unfollow = pc_ali_follow + ConfigUtil.getDouble(props, Config.PR_ALI_UNFOLLOW, 0.0);
if (Math.abs(pc_ali_follow - 100.0) > 1e-5) {//compare real numbers
throw new LinkBenchConfigError("Percentages of request types do not " +
"add to 100, only " + pc_ali_follow + "!");
}
}
private void initLinkRequestDistributions(Properties props, int requesterID,
int nrequesters) {
writeDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_WRITES);
readDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_READS);
// Load uncorrelated distributions for blending if needed
writeDistUncorr = null;
if (props.containsKey(Config.WRITE_UNCORR_BLEND)) {
// Ratio of queries to use uncorrelated. Convert from percentage
writeDistUncorrBlend = ConfigUtil.getDouble(props,
Config.WRITE_UNCORR_BLEND) / 100.0;
if (writeDistUncorrBlend > 0.0) {
writeDistUncorr = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_WRITES_UNCORR);
}
}
readDistUncorr = null;
if (props.containsKey(Config.READ_UNCORR_BLEND)) {
// Ratio of queries to use uncorrelated. Convert from percentage
readDistUncorrBlend = ConfigUtil.getDouble(props,
Config.READ_UNCORR_BLEND) / 100.0;
if (readDistUncorrBlend > 0.0) {
readDistUncorr = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_READS_UNCORR);
}
}
if (is_ali) {
id2chooser = new AliID2Chooser(props, startid1, maxid1, nrequesters, requesterID);
} else {
id2chooser = new ID2Chooser(props, startid1, maxid1,
nrequesters, requesterID);
}
// Distribution of #id2s per multiget
String multigetDistClass = props.getProperty(Config.LINK_MULTIGET_DIST);
if (multigetDistClass != null && multigetDistClass.trim().length() != 0) {
int multigetMin = ConfigUtil.getInt(props, Config.LINK_MULTIGET_DIST_MIN);
int multigetMax = ConfigUtil.getInt(props, Config.LINK_MULTIGET_DIST_MAX);
try {
multigetDist = ClassLoadUtil.newInstance(multigetDistClass,
ProbabilityDistribution.class);
multigetDist.init(multigetMin, multigetMax, props,
Config.LINK_MULTIGET_DIST_PREFIX);
} catch (ClassNotFoundException e) {
logger.error(e);
throw new LinkBenchConfigError("Class" + multigetDistClass +
" could not be loaded as ProbabilityDistribution");
}
} else {
multigetDist = null;
}
}
private void initLinkDataGeneration(Properties props) {
try {
double medLinkDataSize = ConfigUtil.getDouble(props,
Config.LINK_DATASIZE);
linkDataSize = new LogNormalDistribution();
linkDataSize.init(0, LinkStore.MAX_LINK_DATA, medLinkDataSize,
Config.LINK_DATASIZE_SIGMA);
linkAddDataGen = ClassLoadUtil.newInstance(
ConfigUtil.getPropertyRequired(props, Config.LINK_ADD_DATAGEN),
DataGenerator.class);
linkAddDataGen.init(props, Config.LINK_ADD_DATAGEN_PREFIX);
linkUpDataGen = ClassLoadUtil.newInstance(
ConfigUtil.getPropertyRequired(props, Config.LINK_UP_DATAGEN),
DataGenerator.class);
linkUpDataGen.init(props, Config.LINK_UP_DATAGEN_PREFIX);
} catch (ClassNotFoundException ex) {
logger.error(ex);
throw new LinkBenchConfigError("Error loading data generator class: "
+ ex.getMessage());
}
}
private void initNodeRequestDistributions(Properties props) {
try {
nodeReadDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_READS);
} catch (LinkBenchConfigError e) {
// Not defined
logger.info("Node access distribution not configured: " +
e.getMessage());
throw new LinkBenchConfigError("Node read distribution not " +
"configured but node read operations have non-zero probability");
}
try {
nodeUpdateDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_UPDATES);
} catch (LinkBenchConfigError e) {
// Not defined
logger.info("Node access distribution not configured: " +
e.getMessage());
throw new LinkBenchConfigError("Node write distribution not " +
"configured but node write operations have non-zero probability");
}
try {
nodeDeleteDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_DELETES);
} catch (LinkBenchConfigError e) {
// Not defined
logger.info("Node delete distribution not configured: " +
e.getMessage());
throw new LinkBenchConfigError("Node delete distribution not " +
"configured but node write operations have non-zero probability");
}
}
private void initNodeDataGeneration(Properties props) {
try {
double medNodeDataSize = ConfigUtil.getDouble(props,
Config.NODE_DATASIZE);
nodeDataSize = new LogNormalDistribution();
nodeDataSize.init(0, NodeStore.MAX_NODE_DATA, medNodeDataSize,
Config.NODE_DATASIZE_SIGMA);
String dataGenClass = ConfigUtil.getPropertyRequired(props,
Config.NODE_ADD_DATAGEN);
nodeAddDataGen = ClassLoadUtil.newInstance(dataGenClass,
DataGenerator.class);
nodeAddDataGen.init(props, Config.NODE_ADD_DATAGEN_PREFIX);
dataGenClass = ConfigUtil.getPropertyRequired(props,
Config.NODE_UP_DATAGEN);
nodeUpDataGen = ClassLoadUtil.newInstance(dataGenClass,
DataGenerator.class);
nodeUpDataGen.init(props, Config.NODE_UP_DATAGEN_PREFIX);
} catch (ClassNotFoundException ex) {
logger.error(ex);
throw new LinkBenchConfigError("Error loading data generator class: "
+ ex.getMessage());
}
}
public long getRequestsDone() {
return requestsDone;
}
public boolean didAbort() {
return aborted;
}
// gets id1 for the request based on desired distribution
private long chooseRequestID(DistributionType type, long previousId1) {
AccessDistribution dist;
switch (type) {
case LINK_READS:
// Blend between distributions if needed
if (readDistUncorr == null || rng.nextDouble() >= readDistUncorrBlend) {
dist = readDist;
} else {
dist = readDistUncorr;
}
break;
case LINK_WRITES:
// Blend between distributions if needed
if (writeDistUncorr == null || rng.nextDouble() >= writeDistUncorrBlend) {
dist = writeDist;
} else {
dist = writeDistUncorr;
}
break;
case LINK_WRITES_UNCORR:
dist = writeDistUncorr;
break;
case NODE_READS:
dist = nodeReadDist;
break;
case NODE_UPDATES:
dist = nodeUpdateDist;
break;
case NODE_DELETES:
dist = nodeDeleteDist;
break;
default:
throw new RuntimeException("Unknown value for type: " + type);
}
long newid1 = dist.nextID(rng, previousId1);
// Distribution responsible for generating number in range
assert((newid1 >= startid1) && (newid1 < maxid1));
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("id1 generated = " + newid1 +
" for access distribution: " + dist.getClass().getName() + ": " +
dist.toString());
}
if (dist.getShuffler() != null) {
// Shuffle to go from position in space ranked from most to least accessed,
// to the real id space
newid1 = startid1 + dist.getShuffler().permute(newid1 - startid1);
}
return newid1;
}
/**
* Randomly choose a single request and execute it, updating statistics
* @param recordStats If true, record latency and other stats.
* @return true if successful, false on error
*/
private boolean oneRequest(boolean recordStats) {
double r = rng.nextDouble() * 100.0;
long starttime = 0;
long endtime = 0;
LinkBenchOp type = LinkBenchOp.UNKNOWN; // initialize to invalid value
Link link = new Link();
try {
if (r <= pc_addlink) {
// generate add request
type = LinkBenchOp.ADD_LINK;
link.id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
link.link_type = id2chooser.chooseRandomLinkType(rng);
link.id2 = id2chooser.chooseForOp(rng, link.id1, link.link_type,
ID2Chooser.P_ADD_EXIST);
link.visibility = LinkStore.VISIBILITY_DEFAULT;
link.version = 0;
link.time = System.currentTimeMillis();
link.data = linkAddDataGen.fill(rng,
new byte[(int)linkDataSize.choose(rng)]);
starttime = System.nanoTime();
// no inverses for now
boolean alreadyExists = linkStore.addLink(dbid, link, true);
boolean added = !alreadyExists;
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("addLink id1=" + link.id1 + " link_type="
+ link.link_type + " id2=" + link.id2 + " added=" + added);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_deletelink) {
type = LinkBenchOp.DELETE_LINK;
long id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
long id2 = id2chooser.chooseForOp(rng, id1, link_type,
ID2Chooser.P_DELETE_EXIST);
starttime = System.nanoTime();
linkStore.deleteLink(dbid, id1, link_type, id2, true, // no inverse
false);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("deleteLink id1=" + id1 + " link_type=" + link_type
+ " id2=" + id2);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_updatelink) {
type = LinkBenchOp.UPDATE_LINK;
link.id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
link.link_type = id2chooser.chooseRandomLinkType(rng);
// Update one of the existing links
link.id2 = id2chooser.chooseForOp(rng, link.id1, link.link_type,
ID2Chooser.P_UPDATE_EXIST);
link.visibility = LinkStore.VISIBILITY_DEFAULT;
link.version = 0;
link.time = System.currentTimeMillis();
link.data = linkUpDataGen.fill(rng,
new byte[(int)linkDataSize.choose(rng)]);
starttime = System.nanoTime();
// no inverses for now
boolean found1 = linkStore.addLink(dbid, link, true);
boolean found = found1;
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("updateLink id1=" + link.id1 + " link_type="
+ link.link_type + " id2=" + link.id2 + " found=" + found);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_countlink) {
type = LinkBenchOp.COUNT_LINK;
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
starttime = System.nanoTime();
long count = linkStore.countLinks(dbid, id1, link_type);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("countLink id1=" + id1 + " link_type=" + link_type
+ " count=" + count);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_getlink) {
type = LinkBenchOp.MULTIGET_LINK;
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
int nid2s = 1;
if (multigetDist != null) {
nid2s = (int)multigetDist.choose(rng);
}
long id2s[] = id2chooser.chooseMultipleForOp(rng, id1, link_type, nid2s,
ID2Chooser.P_GET_EXIST);
starttime = System.nanoTime();
int found = getLink(id1, link_type, id2s);
assert(found >= 0 && found <= nid2s);
endtime = System.nanoTime();
if (found > 0) {
numfound += found;
} else {
numnotfound += nid2s - found;
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_getlinklist) {
type = LinkBenchOp.GET_LINKS_LIST;
Link links[];
if (rng.nextDouble() < p_historical_getlinklist &&
!this.listTailHistory.isEmpty()) {
// Get linklist tail is not counted??
links = getLinkListTail();
// System.err.println("getlinklist tail");
} else {
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
starttime = System.nanoTime();
links = getLinkList(id1, link_type);
endtime = System.nanoTime();
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), links == null?1:0);
// int count = ((links == null) ? 0 : links.length);
if (recordStats && links != null) {
// stats.addStats(LinkBenchOp.RANGE_SIZE, count, false);
_measurements.measure("LinkRangeSize", links.length);
}
} else if (r <= pc_addnode) {
type = LinkBenchOp.ADD_NODE;
Node newNode = createAddNode();
starttime = System.nanoTime();
lastNodeId = nodeStore.addNode(dbid, newNode);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("addNode " + newNode);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_updatenode) {
type = LinkBenchOp.UPDATE_NODE;
// Choose an id that has previously been created (but might have
// been since deleted
long upId = chooseRequestID(DistributionType.NODE_UPDATES,
lastNodeId);
// Generate new data randomly
Node newNode = createUpdateNode(upId);
starttime = System.nanoTime();
boolean changed = nodeStore.updateNode(dbid, newNode);
endtime = System.nanoTime();
lastNodeId = upId;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("updateNode " + newNode + " changed=" + changed);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_deletenode) {
type = LinkBenchOp.DELETE_NODE;
long idToDelete = chooseRequestID(DistributionType.NODE_DELETES,
lastNodeId);
starttime = System.nanoTime();
boolean deleted = nodeStore.deleteNode(dbid, LinkStore.DEFAULT_NODE_TYPE,
idToDelete);
endtime = System.nanoTime();
lastNodeId = idToDelete;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("deleteNode " + idToDelete + " deleted=" + deleted);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_getnode) {
type = LinkBenchOp.GET_NODE;
starttime = System.nanoTime();
long idToFetch = chooseRequestID(DistributionType.NODE_READS,
lastNodeId);
Node fetched = nodeStore.getNode(dbid, LinkStore.DEFAULT_NODE_TYPE, idToFetch);
endtime = System.nanoTime();
lastNodeId = idToFetch;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
if (fetched == null) {
logger.trace("getNode " + idToFetch + " not found");
} else {
logger.trace("getNode " + fetched);
}
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_ali_login) {
type = LinkBenchOp.ALI_LOGIN;
// Choose an id that has previously been created (but might have
// been since deleted
long upId = chooseRequestID(DistributionType.NODE_UPDATES,
lastNodeId);
// Generate new data randomly
// Node newNode = createUpdateNode(upId);
starttime = System.nanoTime();
long updated_nodes = nodeStore.aliLogin(upId);
endtime = System.nanoTime();
lastNodeId = upId;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("login " + upId + " updated_nodes=" + updated_nodes);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_ali_reg) {
type = LinkBenchOp.ALI_REG;
Node newNode = createAddNode();
newNode.id = _nodeid.getAndIncrement();
if (id2chooser.calcLinkCount(newNode.id, LinkBase.REFERRER_TYPE) == 1) {
starttime = System.nanoTime();
long referrer_id = id2chooser.chooseForOp(rng, newNode.id, LinkBase.REFERRER_TYPE, 1);
nodeStore.aliRegRef(newNode, referrer_id);
endtime = System.nanoTime();
} else {
starttime = System.nanoTime();
nodeStore.aliReg(newNode);
endtime = System.nanoTime();
}
lastNodeId = newNode.id;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("register " + newNode);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_ali_pay) {
// generate add request
type = LinkBenchOp.ALI_PAY;
link.id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
link.id2 = id2chooser.chooseForOp(rng, link.id1, LinkBase.TRANSFER_FAKE_LINK_TYPE,
ID2Chooser.P_UPDATE_EXIST);
starttime = System.nanoTime();
// no inverses for now
boolean alreadyExists = nodeStore.aliPay(link.id1, link.id2);
boolean added = !alreadyExists;
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("ali pay id1=" + link.id1 + " id2=" + link.id2 + " added=" + added);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_ali_get_fan) {
// taken from get link list
type = LinkBenchOp.ALI_GET_FAN;
Node nodes[];
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
starttime = System.nanoTime();
nodes = linkStore.aliGetFan(id1);
endtime = System.nanoTime();
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), nodes == null?1:0);
} else if (r <= pc_ali_get_follow) {
// taken from get link list
type = LinkBenchOp.ALI_GET_FOLLOW;
Node nodes[];
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
starttime = System.nanoTime();
nodes = linkStore.aliGetFollow(id1);
endtime = System.nanoTime();
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), nodes == null?1:0);
} else if (r <= pc_ali_recom) {
// taken from get link list
type = LinkBenchOp.ALI_RECOM;
Node nodes[];
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
starttime = System.nanoTime();
nodes = linkStore.aliRecom(id1);
endtime = System.nanoTime();
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), nodes == null?1:0);
} else if (r <= pc_ali_follow) {
// tabek from add link
type = LinkBenchOp.ALI_FOLLOW;
link.id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
link.link_type = LinkBase.LINKBENCH_DEFAULT_TYPE;
link.id2 = id2chooser.chooseForOp(rng, link.id1, link.link_type,
ID2Chooser.P_ADD_EXIST);
link.time = System.currentTimeMillis();
starttime = System.nanoTime();
// no inverses for now
boolean alreadyExists = linkStore.aliFollow(link);
endtime = System.nanoTime();
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else if (r <= pc_ali_unfollow) {
// taken from delete link
type = LinkBenchOp.ALI_UNFOLLOW;
long id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
long id2 = id2chooser.chooseForOp(rng, id1, LinkBase.LINKBENCH_DEFAULT_TYPE,
ID2Chooser.P_DELETE_EXIST);
starttime = System.nanoTime();
linkStore.deleteLink(dbid, id1, LinkBase.LINKBENCH_DEFAULT_TYPE, id2, true, // no inverse
false);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("ali unfollow id1=" + id1 + " link_type=" + LinkBase.LINKBENCH_DEFAULT_TYPE
+ " id2=" + id2);
}
_measurements.measure(type.displayName(), (endtime - starttime)/1000);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 0);
} else {
logger.error("No-op in requester: last probability < 1.0");
return false;
}
// convert to microseconds
long timetaken = (endtime - starttime)/1000;
if (recordStats) {
// record statistics
stats.addStats(type, timetaken, false);
latencyStats.recordLatency(requesterID, type, timetaken);
}
return true;
} catch (ConflictException e) {
long endtime2 = System.nanoTime();
long timetaken2 = (endtime2 - starttime)/1000;
_measurements.measure(type.displayName(), timetaken2);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 1);
if (recordStats) {
stats.addStats(type, timetaken2, true);
}
linkStore.clearErrors(requesterID);
return false;
} catch (MissingException e) {
long endtime2 = System.nanoTime();
long timetaken2 = (endtime2 - starttime)/1000;
_measurements.measure(type.displayName(), timetaken2);
_measurements.measure("OVERALL_M", (endtime - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 2);
if (recordStats) {
stats.addStats(type, timetaken2, true);
}
linkStore.clearErrors(requesterID);
return false;
} catch (Throwable e){//Catch exception if any
long endtime2 = System.nanoTime();
long timetaken2 = (endtime2 - starttime)/1000;
_measurements.measure(type.displayName(), timetaken2);
_measurements.measure("OVERALL_M", (endtime2 - starttime)/1000);
_measurements.reportReturnCode(type.displayName(), 3);
logger.error(type.displayName() + " error " +
e.getMessage(), e);
if (recordStats) {
stats.addStats(type, timetaken2, true);
}
linkStore.clearErrors(requesterID);
System.exit(1);
return false;
}
}
/**
* Create a new node for adding to database
* @return
*/
private Node createAddNode() {
byte data[] = nodeAddDataGen.fill(rng, new byte[(int)nodeDataSize.choose(rng)]);
return new Node(-1, LinkStore.DEFAULT_NODE_TYPE, 1,
(int)(System.currentTimeMillis()/1000), data);
}
/**
* Create new node for updating in database
*/
private Node createUpdateNode(long id) {
byte data[] = nodeUpDataGen.fill(rng, new byte[(int)nodeDataSize.choose(rng)]);
return new Node(id, LinkStore.DEFAULT_NODE_TYPE, 2,
(int)(System.currentTimeMillis()/1000), data);
}
@Override
public void run() {
logger.info("Requester thread #" + requesterID + " started: will do "
+ numRequests + " ops after " + warmupTime + " second warmup");
logger.debug("Requester thread #" + requesterID + " first random number "
+ rng.nextLong());
try {
this.linkStore.initialize(props, Phase.REQUEST, requesterID);
if (this.nodeStore != null && this.nodeStore != this.linkStore) {
this.nodeStore.initialize(props, Phase.REQUEST, requesterID);
}
} catch (Exception e) {
logger.error("Error while initializing store", e);
throw new RuntimeException(e);
}
long warmupStartTime = System.currentTimeMillis();
boolean warmupDone = warmupTime <= 0;
long benchmarkStartTime;
if (!warmupDone) {
benchmarkStartTime = warmupStartTime + warmupTime * 1000;
} else {
benchmarkStartTime = warmupStartTime;
}
long endTime = benchmarkStartTime + maxTime * 1000;
long lastUpdate = warmupStartTime;
long curTime = warmupStartTime;
long i;
if (singleAssoc) {
LinkBenchOp type = LinkBenchOp.UNKNOWN;
try {
Link link = new Link();
// add a single assoc to the database
link.id1 = 45;
link.id1 = 46;
type = LinkBenchOp.ADD_LINK;
// no inverses for now
linkStore.addLink(dbid, link, true);
// read this assoc from the database over and over again
type = LinkBenchOp.MULTIGET_LINK;
for (i = 0; i < numRequests; i++) {
int found = getLink(link.id1, link.link_type,
new long[]{link.id2});
if (found == 1) {
requestsDone++;
} else {
logger.warn("ThreadID = " + requesterID +
" not found link for id1=45");
}
}
} catch (Throwable e) {
logger.error(type.displayName() + "error " +
e.getMessage(), e);
aborted = true;
}
closeStores();
return;
}
long warmupRequests = 0;
long requestsSinceLastUpdate = 0;
long lastStatDisplay_ms = curTime;
long reqTime_ns = System.nanoTime();
double requestrate_ns = ((double)requestrate)/1e9;
// long start_time = System.currentTimeMillis();
while (true) {
if (use_duration) {
long sec = (System.currentTimeMillis() - start_time) / 1000L;
if (sec >= duration) break;
} else if (requestsDone < numRequests) {
break;
}
if (requestrate > 0) {
reqTime_ns = Timer.waitExpInterval(rng, reqTime_ns, requestrate_ns);
}
boolean success = oneRequest(warmupDone);
if (!success) {
errors++;
if (maxFailedRequests >= 0 && errors > maxFailedRequests) {
logger.error(String.format("Requester #%d aborting: %d failed requests" +
" (out of %d total) ", requesterID, errors, requestsDone));
aborted = true;
break;
}
}
curTime = System.currentTimeMillis();
// Track requests done
if (warmupDone) {
requestsDone++;
requestsSinceLastUpdate++;
if (requestsSinceLastUpdate >= RequestProgress.THREAD_REPORT_INTERVAL) {
progressTracker.addRetries(linkStore.getRetries());
progressTracker.update(requestsSinceLastUpdate);
requestsSinceLastUpdate = 0;
}
} else {
warmupRequests++;
}
// Per-thread periodic progress updates
if (curTime > lastUpdate + progressFreq_ms) {
if (warmupDone) {
logger.info(String.format("Requester #%d %d/%d requests done",
requesterID, requestsDone, numRequests));
lastUpdate = curTime;
} else {
logger.info(String.format("Requester #%d warming up. " +
" %d warmup requests done. %d/%d seconds of warmup done",
requesterID, warmupRequests, (curTime - warmupStartTime) / 1000,
warmupTime));
lastUpdate = curTime;
}
}
// Per-thread periodic stat dumps after warmup done
if (warmupDone && (lastStatDisplay_ms + displayFreq_ms) <= curTime) {
displayStats(lastStatDisplay_ms, curTime);
stats.resetSamples();
lastStatDisplay_ms = curTime;
}
// Check if warmup completed
if (!warmupDone && curTime >= benchmarkStartTime) {
warmupDone = true;
lastUpdate = curTime;
lastStatDisplay_ms = curTime;
requestsSinceLastUpdate = 0;
logger.info(String.format("Requester #%d warmup finished " +
" after %d warmup requests. 0/%d requests done",
requesterID, warmupRequests, numRequests));
lastUpdate = curTime;
}
// Enforce time limit
if (curTime > endTime) {
logger.info(String.format("Requester #%d: time limit of %ds elapsed" +
", shutting down.", requesterID, maxTime));
break;
}
}
// Do final update of statistics
progressTracker.update(requestsSinceLastUpdate);
// displayStats(lastStatDisplay_ms, System.currentTimeMillis());
// Report final stats
logger.info("ThreadID = " + requesterID +
" total requests = " + requestsDone +
" requests/second = " + ((1000 * requestsDone)/
Math.max(1, (curTime - benchmarkStartTime))) +
" found = " + numfound +
" not found = " + numnotfound +
" history queries = " + numHistoryQueries + "/" +
stats.getCount(LinkBenchOp.GET_LINKS_LIST));
closeStores();
}
/**
* Close datastores before finishing
*/
private void closeStores() {
linkStore.close();
if (nodeStore != null && nodeStore != linkStore) {
nodeStore.close();
}
}
private void displayStats(long lastStatDisplay_ms, long now_ms) {
stats.displayStats(lastStatDisplay_ms, now_ms,
Arrays.asList(
LinkBenchOp.MULTIGET_LINK, LinkBenchOp.GET_LINKS_LIST,
LinkBenchOp.COUNT_LINK,
LinkBenchOp.UPDATE_LINK, LinkBenchOp.ADD_LINK,
LinkBenchOp.RANGE_SIZE, LinkBenchOp.ADD_NODE,
LinkBenchOp.UPDATE_NODE, LinkBenchOp.DELETE_NODE,
LinkBenchOp.GET_NODE));
}
int getLink(long id1, long link_type, long id2s[]) throws Exception {
Link links[] = linkStore.multigetLinks(dbid, id1, link_type, id2s);
return links == null ? 0 : links.length;
}
Link[] getLinkList(long id1, long link_type) throws Exception {
Link links[] = linkStore.getLinkList(dbid, id1, link_type);
if (links == null) {return null;}
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("getLinkList(id1=" + id1 + ", link_type=" + link_type
+ ") => count=" + (links == null ? 0 : links.length));
}
// If there were more links than limit, record
if (links != null && links.length >= linkStore.getRangeLimit()) {
Link lastLink = links[links.length-1];
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("Maybe more history for (" + id1 +"," +
link_type + " older than " + lastLink.time);
}
addTailCacheEntry(lastLink);
}
return links;
}
Link[] getLinkListTail() throws Exception {
assert(!listTailHistoryIndex.isEmpty());
assert(!listTailHistory.isEmpty());
int choice = rng.nextInt(listTailHistory.size());
Link prevLast = listTailHistory.get(choice);
// Get links past the oldest last retrieved
Link links[] = linkStore.getLinkList(dbid, prevLast.id1,
prevLast.link_type, 0, prevLast.time, 1, linkStore.getRangeLimit());
if (links == null) {return null;}
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("getLinkListTail(id1=" + prevLast.id1 + ", link_type="
+ prevLast.link_type + ", max_time=" + prevLast.time
+ " => count=" + (links == null ? 0 : links.length));
}
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("Historical range query for (" + prevLast.id1 +"," +
prevLast.link_type + " older than " + prevLast.time +
": " + (links == null ? 0 : links.length) + " results");
}
if (links != null && links.length == linkStore.getRangeLimit()) {
// There might be yet more history
Link last = links[links.length-1];
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("might be yet more history for (" + last.id1 +"," +
last.link_type + " older than " + last.time);
}
// Update in place
listTailHistory.set(choice, last.clone());
} else {
// No more history after this, remove from cache
removeTailCacheEntry(choice, null);
}
numHistoryQueries++;
return links;
}
/**
* Add a new link to the history cache, unless already present
* @param lastLink the last (i.e. lowest timestamp) link retrieved
*/
private void addTailCacheEntry(Link lastLink) {
HistoryKey key = new HistoryKey(lastLink);
if (listTailHistoryIndex.containsKey(key)) {
// Already present
return;
}
if (listTailHistory.size() < listTailHistoryLimit) {
listTailHistory.add(lastLink.clone());
listTailHistoryIndex.put(key, listTailHistory.size() - 1);
} else {
// Need to evict entry
int choice = rng.nextInt(listTailHistory.size());
removeTailCacheEntry(choice, lastLink.clone());
}
}
/**
* Remove or replace entry in listTailHistory and update index
* @param pos index of entry in listTailHistory
* @param repl replace with this if not null
*/
private void removeTailCacheEntry(int pos, Link repl) {
Link entry = listTailHistory.get(pos);
if (pos == listTailHistory.size() - 1) {
// removing from last position, don't need to fill gap
listTailHistoryIndex.remove(new HistoryKey(entry));
int lastIx = listTailHistory.size() - 1;
if (repl == null) {
listTailHistory.remove(lastIx);
} else {
listTailHistory.set(lastIx, repl);
listTailHistoryIndex.put(new HistoryKey(repl), lastIx);
}
} else {
if (repl == null) {
// Replace with last entry in cache to fill gap
repl = listTailHistory.get(listTailHistory.size() - 1);
listTailHistory.remove(listTailHistory.size() - 1);
}
listTailHistory.set(pos, repl);
listTailHistoryIndex.put(new HistoryKey(repl), pos);
}
}
public static class RequestProgress {
// How many ops before a thread should register its progress
static final int THREAD_REPORT_INTERVAL = 250;
/** How many ops before a progress update should be printed to console */
private final long interval;
private final Logger progressLogger;
private long totalRequests;
private final AtomicLong requestsDone;
private final AtomicInteger retries;
private long benchmarkStartTime;
private long last_update_time;
private long last_update_done;
private long warmupTime_s;
private long timeLimit_s;
private boolean use_duration = false;
private int duration = 0;
public RequestProgress(Logger progressLogger, long totalRequests,
long timeLimit_s, long warmupTime_s, long interval) {
this.interval = interval;
this.progressLogger = progressLogger;
this.totalRequests = totalRequests;
this.requestsDone = new AtomicLong();
this.retries = new AtomicInteger();
this.timeLimit_s = timeLimit_s;
this.warmupTime_s = warmupTime_s;
this.last_update_done = 0;
}
public void setUseDuration(int d) {
use_duration = true;
duration = d;
}
public void startTimer() {
benchmarkStartTime = System.currentTimeMillis() + warmupTime_s * 1000;
last_update_time = benchmarkStartTime;
LinkBenchRequest.start_time = benchmarkStartTime;
}
public long getBenchmarkStartTime() {
return benchmarkStartTime;
}
public void addRetries(int retry) {
retries.addAndGet(retry);
}
public void update(long requestIncr) {
long curr = requestsDone.addAndGet(requestIncr);
long prev = curr - requestIncr;
if ((curr / interval) > (prev / interval) || curr == totalRequests) {
float progressPercent = ((float) curr) / totalRequests * 100;
long now = System.currentTimeMillis();
long elapsed = now - benchmarkStartTime;
float elapsed_s = ((float) elapsed) / 1000;
float limitPercent = (elapsed_s / ((float) timeLimit_s)) * 100;
float rate = curr / ((float)elapsed_s);
if (use_duration) {
float slot_sec = (now - last_update_time)/(float)1000;
float latest_tp = (curr - last_update_done)/(float)slot_sec;
System.err.println(String.format(
"%.1f/%d duration, %d done, total tp: %.1f ops/sec; tp of last %.1fs: %.1f ops/sec, aborts is %d",
elapsed_s, duration, curr, rate, slot_sec, latest_tp, retries.get()));
} else {
System.err.println(String.format(
"%d/%d requests finished: %.1f%% complete at %.1f ops/sec; tp of last 1s: %.1f ops/sec" +
" %.1f/%d secs elapsed: %.1f%% of time limit used",
curr, totalRequests, progressPercent, rate, (curr - last_update_done)/((now - last_update_time)/1000.0),
elapsed_s, timeLimit_s, limitPercent));
}
last_update_time = now;
last_update_done = curr;
}
}
public void force_print() {
long curr = requestsDone.get();
float progressPercent = ((float) curr) / totalRequests * 100;
long now = System.currentTimeMillis();
long elapsed = now - benchmarkStartTime;
float elapsed_s = ((float) elapsed) / 1000;
float limitPercent = (elapsed_s / ((float) timeLimit_s)) * 100;
float rate = curr / ((float)elapsed_s);
if (use_duration) {
float slot_sec = (now - last_update_time)/(float)1000;
float latest_tp = (curr - last_update_done)/(float)slot_sec;
System.err.println(String.format(
"%.1f/%d duration, %d done, total tp: %.1f ops/sec; tp of last %.1fs: %.1f ops/sec, aborts is %d",
elapsed_s, duration, curr, rate, slot_sec, latest_tp, retries.get()));
} else {
System.err.println(String.format(
"%d/%d requests finished: %.1f%% complete at %.1f ops/sec; tp of last 1s: %.1f ops/sec" +
" %.1f/%d secs elapsed: %.1f%% of time limit used",
curr, totalRequests, progressPercent, rate, (curr - last_update_done)/((now - last_update_time)/1000.0),
elapsed_s, timeLimit_s, limitPercent));
}
last_update_time = now;
last_update_done = curr;
}
}
public static RequestProgress createProgress(Logger logger,
Properties props) {
long total_requests = ConfigUtil.getLong(props, Config.NUM_REQUESTS)
* ConfigUtil.getLong(props, Config.NUM_REQUESTERS);
long progressInterval = ConfigUtil.getLong(props, Config.REQ_PROG_INTERVAL,
10000L);
long warmupTime = ConfigUtil.getLong(props, Config.WARMUP_TIME, 0L);
long maxTime = ConfigUtil.getLong(props, Config.MAX_TIME);
boolean use_duration = ConfigUtil.getBool(props, Config.USE_DURATION, true);
int duration = ConfigUtil.getInt(props, Config.DURATION, 0);
RequestProgress rp = new RequestProgress(logger, total_requests,
maxTime, warmupTime, progressInterval);
if (use_duration) {
rp.setUseDuration(duration);
}
return rp;
}
}
| 1.460938 | 1 |
src/gmail/auto/send/data.java | davisnguyen111195/Auto1 | 1 | 73 | package gmail.auto.send;
import java.util.List;
public class data {
public static List<data> listData;
public static List<String> listTemplate;
public static List<String> listSubject;
public static List<String> listEmail;
private String user;
private String pass;
private String recoveryMail;
public data() {
}
public data(String user, String pass, String recoveryMail) {
this.user = user;
this.pass = pass;
this.recoveryMail = recoveryMail;
}
@Override
public String toString() {
return user + "," + pass + "," + recoveryMail;
}
public void setUser(String user) {
this.user = user;
}
public void setPass(String pass) {
this.pass = pass;
}
public void setRecoveryMail(String recoveryMail) {
this.recoveryMail = recoveryMail;
}
public String getUser() {
return user;
}
public String getPass() {
return pass;
}
public String getRecoveryMail() {
return recoveryMail;
}
}
| 1.28125 | 1 |
src/main/java/com/ruoyi/project/system/kbm/service/ITOrgService.java | lzsyt/ruoyi_kbm | 0 | 81 | package com.ruoyi.project.system.kbm.service;
import com.ruoyi.project.system.kbm.domain.TOrg;
import java.util.List;
/**
* 【请填写功能名称】Service接口
*
* @author ruoyi
* @date 2019-09-19
*/
public interface ITOrgService
{
public List<TOrg> getChild(List<TOrg> orgs);
/**
* 查询【请填写功能名称】
*
* @param id 【请填写功能名称】ID
* @return 【请填写功能名称】
*/
public TOrg selectTOrgById(String id);
/**
* 查询【请填写功能名称】列表
*
* @param tOrg 【请填写功能名称】
* @return 【请填写功能名称】集合
*/
public List<TOrg> selectTOrgList(TOrg tOrg);
/**
* 新增【请填写功能名称】
*
* @param tOrg 【请填写功能名称】
* @return 结果
*/
public int insertTOrg(TOrg tOrg);
/**
* 修改【请填写功能名称】
*
* @param tOrg 【请填写功能名称】
* @return 结果
*/
public int updateTOrg(TOrg tOrg);
/**
* 批量删除【请填写功能名称】
*
* @param ids 需要删除的数据ID
* @return 结果
*/
public int deleteTOrgByIds(String ids);
/**
* 删除【请填写功能名称】信息
*
* @param id 【请填写功能名称】ID
* @return 结果
*/
public int deleteTOrgById(String id);
}
| 1.273438 | 1 |
runescape-client/src/main/java/Varcs.java | Smeety/OpenOSRS | 0 | 89 | import java.io.EOFException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import net.runelite.mapping.Export;
import net.runelite.mapping.Implements;
import net.runelite.mapping.ObfuscatedGetter;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("cy")
@Implements("Varcs")
public class Varcs {
@ObfuscatedName("b")
@ObfuscatedGetter(
intValue = -1058708955
)
@Export("canvasWidth")
public static int canvasWidth;
@ObfuscatedName("q")
@Export("intsPersistence")
boolean[] intsPersistence;
@ObfuscatedName("j")
@Export("map")
Map map;
@ObfuscatedName("p")
@Export("strings")
String[] strings;
@ObfuscatedName("g")
@Export("unwrittenChanges")
boolean unwrittenChanges;
@ObfuscatedName("n")
@ObfuscatedGetter(
longValue = -7654666675170575125L
)
long field1291;
Varcs() {
this.unwrittenChanges = false;
int var1 = Clock.archive2.getGroupFileCount(19);
this.map = new HashMap();
this.intsPersistence = new boolean[var1];
int var2;
for (var2 = 0; var2 < var1; ++var2) {
VarcInt var4 = (VarcInt)VarcInt.VarcInt_cached.get((long)var2);
VarcInt var3;
if (var4 != null) {
var3 = var4;
} else {
byte[] var5 = VarcInt.VarcInt_archive.takeFile(19, var2);
var4 = new VarcInt();
if (var5 != null) {
var4.method4573(new Buffer(var5));
}
VarcInt.VarcInt_cached.put(var4, (long)var2);
var3 = var4;
}
this.intsPersistence[var2] = var3.persist;
}
var2 = 0;
if (Clock.archive2.method4452(15)) {
var2 = Clock.archive2.getGroupFileCount(15);
}
this.strings = new String[var2];
this.read();
}
@ObfuscatedName("m")
@ObfuscatedSignature(
descriptor = "(III)V",
garbageValue = "-1324015283"
)
@Export("setInt")
void setInt(int var1, int var2) {
this.map.put(var1, var2);
if (this.intsPersistence[var1]) {
this.unwrittenChanges = true;
}
}
@ObfuscatedName("o")
@ObfuscatedSignature(
descriptor = "(IB)I",
garbageValue = "16"
)
@Export("getInt")
int getInt(int var1) {
Object var2 = this.map.get(var1);
return var2 instanceof Integer ? (Integer)var2 : -1;
}
@ObfuscatedName("q")
@ObfuscatedSignature(
descriptor = "(ILjava/lang/String;I)V",
garbageValue = "105969632"
)
@Export("setString")
void setString(int var1, String var2) {
this.map.put(var1, var2);
}
@ObfuscatedName("j")
@ObfuscatedSignature(
descriptor = "(II)Ljava/lang/String;",
garbageValue = "-1642204852"
)
@Export("getString")
String getString(int var1) {
Object var2 = this.map.get(var1);
return var2 instanceof String ? (String)var2 : "";
}
@ObfuscatedName("p")
@ObfuscatedSignature(
descriptor = "(ILjava/lang/String;B)V",
garbageValue = "14"
)
@Export("setStringOld")
void setStringOld(int var1, String var2) {
this.strings[var1] = var2;
}
@ObfuscatedName("g")
@ObfuscatedSignature(
descriptor = "(II)Ljava/lang/String;",
garbageValue = "658470799"
)
@Export("getStringOld")
String getStringOld(int var1) {
return this.strings[var1];
}
@ObfuscatedName("n")
@ObfuscatedSignature(
descriptor = "(I)V",
garbageValue = "-943211657"
)
@Export("clearTransient")
void clearTransient() {
int var1;
for (var1 = 0; var1 < this.intsPersistence.length; ++var1) {
if (!this.intsPersistence[var1]) {
this.map.remove(var1);
}
}
for (var1 = 0; var1 < this.strings.length; ++var1) {
this.strings[var1] = null;
}
}
@ObfuscatedName("u")
@ObfuscatedSignature(
descriptor = "(ZI)Lmy;",
garbageValue = "-297386898"
)
@Export("getPreferencesFile")
AccessFile getPreferencesFile(boolean var1) {
return GameShell.getPreferencesFile("2", GrandExchangeOfferWorldComparator.field54.name, var1);
}
@ObfuscatedName("a")
@ObfuscatedSignature(
descriptor = "(I)V",
garbageValue = "-1923811872"
)
@Export("write")
void write() {
AccessFile var1 = this.getPreferencesFile(true);
try {
int var2 = 3;
int var3 = 0;
Iterator var4 = this.map.entrySet().iterator();
while (var4.hasNext()) {
Entry var5 = (Entry)var4.next();
int var6 = (Integer)var5.getKey();
if (this.intsPersistence[var6]) {
Object var7 = var5.getValue();
var2 += 3;
if (var7 instanceof Integer) {
var2 += 4;
} else if (var7 instanceof String) {
var2 += DynamicObject.stringCp1252NullTerminatedByteSize((String)var7);
}
++var3;
}
}
Buffer var33 = new Buffer(var2);
var33.writeByte(2);
var33.writeShort(var3);
Iterator var34 = this.map.entrySet().iterator();
label183:
while (true) {
Entry var21;
int var22;
do {
if (!var34.hasNext()) {
var1.write(var33.array, 0, var33.offset);
break label183;
}
var21 = (Entry)var34.next();
var22 = (Integer)var21.getKey();
} while(!this.intsPersistence[var22]);
var33.writeShort(var22);
Object var8 = var21.getValue();
Class var10 = var8.getClass();
class3[] var11 = new class3[]{class3.field9, class3.field8, class3.field10};
class3[] var12 = var11;
int var13 = 0;
class3 var9;
class3 var14;
while (true) {
if (var13 >= var12.length) {
var9 = null;
break;
}
var14 = var12[var13];
if (var10 == var14.field12) {
var9 = var14;
break;
}
++var13;
}
var33.writeByte(var9.field15);
Class var23 = var8.getClass();
class3[] var15 = new class3[]{class3.field9, class3.field8, class3.field10};
class3[] var16 = var15;
int var17 = 0;
while (true) {
if (var17 >= var16.length) {
var14 = null;
break;
}
class3 var18 = var16[var17];
if (var23 == var18.field12) {
var14 = var18;
break;
}
++var17;
}
if (var14 == null) {
throw new IllegalArgumentException();
}
class0 var35 = var14.field13;
var35.vmethod59(var8, var33);
}
} catch (Exception var31) {
} finally {
try {
var1.close();
} catch (Exception var30) {
}
}
this.unwrittenChanges = false;
this.field1291 = MilliClock.currentTimeMillis();
}
@ObfuscatedName("z")
@ObfuscatedSignature(
descriptor = "(B)V",
garbageValue = "-50"
)
@Export("read")
void read() {
AccessFile var1 = this.getPreferencesFile(false);
label230: {
try {
byte[] var2 = new byte[(int)var1.length()];
int var4;
for (int var3 = 0; var3 < var2.length; var3 += var4) {
var4 = var1.read(var2, var3, var2.length - var3);
if (var4 == -1) {
throw new EOFException();
}
}
Buffer var15 = new Buffer(var2);
if (var15.array.length - var15.offset >= 1) {
int var16 = var15.readUnsignedByte();
if (var16 >= 0 && var16 <= 2) {
int var7;
int var8;
int var9;
int var17;
if (var16 >= 2) {
var17 = var15.readUnsignedShort();
var7 = 0;
while (true) {
if (var7 >= var17) {
break label230;
}
var8 = var15.readUnsignedShort();
var9 = var15.readUnsignedByte();
class3[] var10 = new class3[]{class3.field9, class3.field8, class3.field10};
class3 var11 = (class3)FaceNormal.findEnumerated(var10, var9);
Object var12 = var11.method40(var15);
if (this.intsPersistence[var8]) {
this.map.put(var8, var12);
}
++var7;
}
} else {
var17 = var15.readUnsignedShort();
for (var7 = 0; var7 < var17; ++var7) {
var8 = var15.readUnsignedShort();
var9 = var15.readInt();
if (this.intsPersistence[var8]) {
this.map.put(var8, var9);
}
}
var7 = var15.readUnsignedShort();
var8 = 0;
while (true) {
if (var8 >= var7) {
break label230;
}
var15.readUnsignedShort();
var15.readStringCp1252NullTerminated();
++var8;
}
}
}
return;
}
} catch (Exception var26) {
break label230;
} finally {
try {
var1.close();
} catch (Exception var25) {
}
}
return;
}
this.unwrittenChanges = false;
}
@ObfuscatedName("w")
@ObfuscatedSignature(
descriptor = "(I)V",
garbageValue = "999745813"
)
@Export("tryWrite")
void tryWrite() {
if (this.unwrittenChanges && this.field1291 < MilliClock.currentTimeMillis() - 60000L) {
this.write();
}
}
@ObfuscatedName("y")
@ObfuscatedSignature(
descriptor = "(B)Z",
garbageValue = "0"
)
@Export("hasUnwrittenChanges")
boolean hasUnwrittenChanges() {
return this.unwrittenChanges;
}
}
| 1.390625 | 1 |
src/com/kamotelabs/collections12/App.java | martianworm17/udemy-java-tutorial-collections | 0 | 97 | package com.kamotelabs.collections12;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
public class App {
public static void main(String[] args) {
/*
* Consider:
* 1. what you need the collection to do
* 2. are you using the fastest collection for your purposes
* (think about insertion/deletion, retrieval and traversal
*/
//////////////// LISTS ///////////////////////////////////
// Store lists of objects
// Duplicates are allowed
// Objects remain in order
// Elements are indexed via an integer
// cf. shopping list
// Checking for particular item in list is slow
// Looking an item up by index is fast
// Iterating through lists is relatively fast
// Note: you can sort lists if you want to.
// If you only add or remove items at end of list, use ArrayList.
List<String> list1 = new ArrayList<String>();
// Removing or adding items elsewhere in the list?
List<String> list2 = new LinkedList<String>();
////////////////SETS ///////////////////////////////////
// Only store unique values
// Great for removing duplicates
// Not indexed, unlike lists
// Very fast to check if a particular object exists
// If you want to use your own objects, you must implement hashCode() and equals().
// Order is unimportant and OK if it changes?
// HashSet is not ordered.
Set<String> set1 = new HashSet<String>();
// Sorted in natural order? Use TreeSet - must implement Comparable for custom types
// (1,2,3 ..., a,b,c.... etc)
Set<String> set2 = new TreeSet<String>();
// Elements remain in order they were added
Set<String> set3 = new LinkedHashSet<String>();
////////////////MAPS ///////////////////////////////////
// Key value pairs.
// Like lookup tables
// Retrieving a value by key is fast
// Iterating over map values is very slow
// Maps not really optimised for iteration
// If you want to use your own objects as keys, you must implement hashCode() and equals().
// Keys not in any particular order, and order liable to change.
Map<String, String> map1 = new HashMap<String, String>();
// Keys sorted in natural order - must implement Comparable for custom types
Map<String, String> map2 = new TreeMap<String, String>();
// Keys remain in order added
Map<String, String> map3 = new LinkedHashMap<String, String>();
// There are also the SortedSet and SortedMap interfaces.
}
} | 2.59375 | 3 |
Basic Java/Lista 5/src/tools/Sign.java | JacekLeja/Java-projects | 0 | 105 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package tools;
/**
*
* @author jacek
*/
public class Sign extends Symbol{
public Sign(){
set_arity(1);
}
public double compute(){
double a = get_argument();
if(a < 0)
return -1;
return 1;
}
}
| 0.855469 | 1 |
src/test/java/com/endava/cats/generator/simple/RegexGeneratorTest.java | Endava/cats | 558 | 113 | package com.endava.cats.generator.simple;
import io.quarkus.test.junit.QuarkusTest;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.regex.Pattern;
@QuarkusTest
class RegexGeneratorTest {
@Test
void shouldReturnDefaultWhenMaxLessThanZero() {
String result = RegexGenerator.generate(Pattern.compile("test"), "test", 10, -10);
Assertions.assertThat(result).isEqualTo(RegexGenerator.DEFAULT);
}
@Test
void shouldReturnPrefixWhenPrefixMatchesPatternAndMinLessThanZero() {
String result = RegexGenerator.generate(Pattern.compile("test"), "test", -10, 10);
Assertions.assertThat(result).isEqualTo("test");
}
@Test
void shouldReturnDefaultWhenPrefixNotMatchingPattern() {
String result = RegexGenerator.generate(Pattern.compile("test"), "test", 10, 10);
Assertions.assertThat(result).isEqualTo(RegexGenerator.DEFAULT);
}
}
| 1.335938 | 1 |
aliyun-java-sdk-miniapplcdp/src/main/java/com/aliyuncs/miniapplcdp/model/v20200113/ListAppModulesResponse.java | rnarla123/aliyun-openapi-java-sdk | 0 | 121 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.miniapplcdp.model.v20200113;
import java.util.List;
import com.aliyuncs.AcsResponse;
import com.aliyuncs.miniapplcdp.transform.v20200113.ListAppModulesResponseUnmarshaller;
import com.aliyuncs.transform.UnmarshallerContext;
/**
* @author auto create
* @version
*/
public class ListAppModulesResponse extends AcsResponse {
private String requestId;
private Data data;
public String getRequestId() {
return this.requestId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
public Data getData() {
return this.data;
}
public void setData(Data data) {
this.data = data;
}
public static class Data {
private List<ModuleItem> items;
public List<ModuleItem> getItems() {
return this.items;
}
public void setItems(List<ModuleItem> items) {
this.items = items;
}
public static class ModuleItem {
private String commitId;
private String description;
private Boolean directDependency;
private String icon;
private String minimumPlatformVersion;
private String moduleId;
private String moduleName;
private String ownerUserId;
private String platform;
private String version;
public String getCommitId() {
return this.commitId;
}
public void setCommitId(String commitId) {
this.commitId = commitId;
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public Boolean getDirectDependency() {
return this.directDependency;
}
public void setDirectDependency(Boolean directDependency) {
this.directDependency = directDependency;
}
public String getIcon() {
return this.icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
public String getMinimumPlatformVersion() {
return this.minimumPlatformVersion;
}
public void setMinimumPlatformVersion(String minimumPlatformVersion) {
this.minimumPlatformVersion = minimumPlatformVersion;
}
public String getModuleId() {
return this.moduleId;
}
public void setModuleId(String moduleId) {
this.moduleId = moduleId;
}
public String getModuleName() {
return this.moduleName;
}
public void setModuleName(String moduleName) {
this.moduleName = moduleName;
}
public String getOwnerUserId() {
return this.ownerUserId;
}
public void setOwnerUserId(String ownerUserId) {
this.ownerUserId = ownerUserId;
}
public String getPlatform() {
return this.platform;
}
public void setPlatform(String platform) {
this.platform = platform;
}
public String getVersion() {
return this.version;
}
public void setVersion(String version) {
this.version = version;
}
}
}
@Override
public ListAppModulesResponse getInstance(UnmarshallerContext context) {
return ListAppModulesResponseUnmarshaller.unmarshall(this, context);
}
@Override
public boolean checkShowJsonItemName() {
return false;
}
}
| 1.171875 | 1 |
src/main/java/FilterTheSpire/utils/SeedTesting.java | brianyee/FilterTheSpire | 0 | 129 | package FilterTheSpire.utils;
import FilterTheSpire.simulators.MonsterRngSimulator;
import com.megacrit.cardcrawl.core.Settings;
// TODO: DEBUG ONLY / REMOVE
public class SeedTesting {
public static void bossTest() {
MonsterRngSimulator helper = new MonsterRngSimulator(Settings.seed);
helper.print();
}
}
| 0.5 | 0 |
src/java/org/apache/commons/el/EmptyOperator.java | zhangwei5095/commons-el | 1 | 137 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.el;
import java.lang.reflect.Array;
import java.util.Collection;
import java.util.Map;
import javax.servlet.jsp.el.ELException;
/**
*
* <p>The implementation of the empty operator
*
* @author <NAME> - Art Technology Group
* @version $Change: 181177 $$DateTime: 2001/06/26 08:45:09 $$Author$
**/
public class EmptyOperator
extends UnaryOperator
{
//-------------------------------------
// Singleton
//-------------------------------------
public static final EmptyOperator SINGLETON =
new EmptyOperator ();
//-------------------------------------
/**
*
* Constructor
**/
public EmptyOperator ()
{
}
//-------------------------------------
// Expression methods
//-------------------------------------
/**
*
* Returns the symbol representing the operator
**/
public String getOperatorSymbol ()
{
return "empty";
}
//-------------------------------------
/**
*
* Applies the operator to the given value
**/
public Object apply (Object pValue)
throws ELException
{
// See if the value is null
if (pValue == null) {
return PrimitiveObjects.getBoolean (true);
}
// See if the value is a zero-length String
else if ("".equals (pValue)) {
return PrimitiveObjects.getBoolean (true);
}
// See if the value is a zero-length array
else if (pValue.getClass ().isArray () &&
Array.getLength (pValue) == 0) {
return PrimitiveObjects.getBoolean (true);
}
// See if the value is an empty Map
else if (pValue instanceof Map &&
((Map) pValue).isEmpty ()) {
return PrimitiveObjects.getBoolean (true);
}
// See if the value is an empty Collection
else if (pValue instanceof Collection &&
((Collection) pValue).isEmpty ()) {
return PrimitiveObjects.getBoolean (true);
}
// Otherwise, not empty
else {
return PrimitiveObjects.getBoolean (false);
}
}
//-------------------------------------
}
| 1.539063 | 2 |
app/src/main/java/com/engloryintertech/small/nets/utils/Platform.java | XinRan5312/JpushIMAndUmengPay | 2 | 145 | package com.engloryintertech.small.nets.utils;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
public class Platform {
private static final Platform PLATFORM = findPlatform();
public static Platform get() {
return PLATFORM;
}
private static Platform findPlatform() {
try {
Class.forName("android.os.Build");
if (Build.VERSION.SDK_INT != 0) {
return new Android();
}
} catch (ClassNotFoundException ignored) {
}
return new Platform();
}
public Executor defaultCallbackExecutor() {
return Executors.newCachedThreadPool();
}
public void execute(Runnable runnable) {
defaultCallbackExecutor().execute(runnable);
}
static class Android extends Platform {
@Override
public Executor defaultCallbackExecutor() {
return new MainThreadExecutor();
}
static class MainThreadExecutor implements Executor {
private final Handler handler = new Handler(Looper.getMainLooper());
@Override
public void execute(Runnable r) {
handler.post(r);
}
}
}
}
| 1.28125 | 1 |
ldw598156412-v2-master/v2/Chapter4/src/main/java/com/springboot/chapter4/service/HelloService.java | CoderDream/Deep-Out-Spring-Boot-2 | 0 | 153 | package com.springboot.chapter4.service;
public interface HelloService {
public void sayHello(String name);
}
| 0.683594 | 1 |
azkarra-json-serializers/src/main/java/io/streamthoughts/azkarra/serialization/json/TopologyDescriptorSerializer.java | thebearmayor/azkarra-streams | 0 | 161 | /*
* Copyright 2019 StreamThoughts.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.streamthoughts.azkarra.serialization.json;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import io.streamthoughts.azkarra.api.providers.TopologyDescriptor;
import java.io.IOException;
/**
* The {@link JsonSerializer} to serialize {@link TopologyDescriptor} instance.
*/
public class TopologyDescriptorSerializer extends JsonSerializer<TopologyDescriptor> {
/**
* {@inheritDoc}
*/
@Override
public void serialize(final TopologyDescriptor descriptor,
final JsonGenerator gen,
final SerializerProvider serializers) throws IOException {
gen.writeStartObject();
gen.writeFieldName("name");
gen.writeString(descriptor.className());
gen.writeFieldName("version");
gen.writeString(descriptor.version().toString());
gen.writeFieldName("description");
gen.writeString(descriptor.description());
gen.writeFieldName("aliases");
gen.writeObject(descriptor.aliases());
gen.writeFieldName("config");
gen.writeObject(descriptor.streamsConfigs());
gen.writeEndObject();
}
}
| 1.148438 | 1 |
MyParkingApp-Client/app/src/main/java/com/infi/myparkingapp_client/DeviceDetailFragment.java | thisIsAnil/MyParkingApp | 0 | 169 | package com.infi.myparkingapp_client;
/**
* Created by Asus on 17-09-2016.
*/
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.net.wifi.WpsInfo;
import android.net.wifi.p2p.WifiP2pConfig;
import android.net.wifi.p2p.WifiP2pDevice;
import android.net.wifi.p2p.WifiP2pInfo;
import android.net.wifi.p2p.WifiP2pManager.ConnectionInfoListener;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
import android.support.v7.app.AlertDialog;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
public class DeviceDetailFragment extends Fragment implements ConnectionInfoListener {
protected static final int CHOOSE_FILE_RESULT_CODE = 20;
private View mContentView = null;
private WifiP2pDevice device;
private WifiP2pInfo info;
ProgressDialog progressDialog = null;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
mContentView = inflater.inflate(R.layout.device_detail, null);
mContentView.findViewById(R.id.btn_connect).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
WifiP2pConfig config = new WifiP2pConfig();
config.deviceAddress = device.deviceAddress;
config.wps.setup = WpsInfo.PBC;
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
progressDialog = ProgressDialog.show(getActivity(), "Press back to cancel",
"Connecting to :" + device.deviceAddress, true, true
);
((DeviceListFragment.DeviceActionListener) getActivity()).connect(config);
}
});
mContentView.findViewById(R.id.btn_disconnect).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
((DeviceListFragment.DeviceActionListener) getActivity()).disconnect();
}
});
return mContentView;
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
String val=data.getStringExtra("empty_slot");
TextView statusText = (TextView) mContentView.findViewById(R.id.status_text);
statusText.setText("Sending: " + val);
Intent serviceIntent = new Intent(getActivity(), FileTransferService.class);
serviceIntent.setAction(FileTransferService.ACTION_SEND_FILE);
serviceIntent.putExtra(FileTransferService.EXTRAS_FILE_PATH, val);
serviceIntent.putExtra(FileTransferService.EXTRAS_GROUP_OWNER_ADDRESS,
info.groupOwnerAddress.getHostAddress());
serviceIntent.putExtra(FileTransferService.EXTRAS_GROUP_OWNER_PORT, 8988);
getActivity().startService(serviceIntent);
}
@Override
public void onConnectionInfoAvailable(final WifiP2pInfo info) {
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
this.info = info;
this.getView().setVisibility(View.VISIBLE);
// The owner IP is now known.
TextView view = (TextView) mContentView.findViewById(R.id.group_owner);
view.setText(getResources().getString(R.string.group_owner_text)
+ ((info.isGroupOwner == true) ? getResources().getString(R.string.yes)
: getResources().getString(R.string.no)));
// InetAddress from WifiP2pInfo struct.
view = (TextView) mContentView.findViewById(R.id.device_info);
view.setText("Owner IP - " + info.groupOwnerAddress.getHostAddress());
// After the group negotiation, we assign the group owner as the file
// server. The file server is single threaded, single connection server
// socket.
if (info.groupFormed && info.isGroupOwner) {
new FileServerAsyncTask(getActivity(), mContentView.findViewById(R.id.status_text))
.execute();
} else if (info.groupFormed) {
// The other device acts as the client. In this case, we enable the
// get file button.
// mContentView.findViewById(R.id.btn_start_client).setVisibility(View.VISIBLE);
((TextView) mContentView.findViewById(R.id.status_text)).setText(getResources()
.getString(R.string.client_text));
}
// hide the connect button
mContentView.findViewById(R.id.btn_connect).setVisibility(View.GONE);
}
/**
* Updates the UI with device data
*
* @param device the device to be displayed
*/
public void showDetails(WifiP2pDevice device) {
this.device = device;
this.getView().setVisibility(View.VISIBLE);
TextView view = (TextView) mContentView.findViewById(R.id.device_address);
view.setText(device.deviceAddress);
view = (TextView) mContentView.findViewById(R.id.device_info);
view.setText(device.toString());
}
/**
* Clears the UI fields after a disconnect or direct mode disable operation.
*/
public void resetViews() {
mContentView.findViewById(R.id.btn_connect).setVisibility(View.VISIBLE);
TextView view = (TextView) mContentView.findViewById(R.id.device_address);
view.setText(R.string.empty);
view = (TextView) mContentView.findViewById(R.id.device_info);
view.setText(R.string.empty);
view = (TextView) mContentView.findViewById(R.id.group_owner);
view.setText(R.string.empty);
view = (TextView) mContentView.findViewById(R.id.status_text);
view.setText(R.string.empty);
this.getView().setVisibility(View.GONE);
}
/**
* A simple server socket that accepts connection and writes some data on
* the stream.
*/
public static class FileServerAsyncTask extends AsyncTask<Void, Void, String> {
private Context context;
private TextView statusText;
/**
* @param context
* @param statusText
*/
public FileServerAsyncTask(Context context, View statusText) {
this.context = context;
this.statusText = (TextView) statusText;
}
@Override
protected String doInBackground(Void... params) {
ServerSocket serverSocket = null;
Socket client = null;
DataInputStream inputstream = null;
try {
serverSocket = new ServerSocket(8988);
client = serverSocket.accept();
inputstream = new DataInputStream(client.getInputStream());
String str = inputstream.readUTF();
serverSocket.close();
return str;
} catch (IOException e) {
Log.e(WiFiDirectActivity.TAG, e.getMessage());
return null;
}finally{
if(inputstream != null){
try{
inputstream.close();
} catch (IOException e) {
Log.e(WiFiDirectActivity.TAG, e.getMessage());
}
}
if(client != null){
try{
client.close();
} catch (IOException e) {
Log.e(WiFiDirectActivity.TAG, e.getMessage());
}
}
if(serverSocket != null){
try{
serverSocket.close();
} catch (IOException e) {
Log.e(WiFiDirectActivity.TAG, e.getMessage());
}
}
}
}
/*
* (non-Javadoc)
* @see android.os.AsyncTask#onPostExecute(java.lang.Object)
*/
@Override
protected void onPostExecute(String result) {
if (result != null) {
statusText.setText("Empty Slot is - " + result);
Toast.makeText(context,"Received Success: "+result,Toast.LENGTH_LONG).show();
if(result.equals("-1"+"//_//"+"-1"+"//_//"+"-1")){
context.startActivity(new Intent(context,AccessDeniedActivity.class));
}else{
String[] data=result.split("//_//");
StringPref stringPref=new StringPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.ALLOCATED_SLOT,data[2]);
stringPref.setStringPref(result.split("//_//")[2]);
stringPref=new StringPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.SERVER_IP,data[0]);
stringPref.setStringPref(data[0]);
stringPref=new StringPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.EMPTY_SLOTS,data[1]);
stringPref.setStringPref(data[1]);
stringPref=new StringPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.SLOTS_NO,data[3]);
stringPref.setStringPref(data[3]);
stringPref=new StringPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.LANES,data[4]);
stringPref.setStringPref(data[4]);
IntPref intPref=new IntPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.ROAD_WIDTH,Integer.parseInt(data[5]));
intPref.set(Integer.parseInt(data[5]));
intPref=new IntPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.LANE_WIDTH,Integer.parseInt(data[6]));
intPref.set(Integer.parseInt(data[6]));
intPref=new IntPref(context.getSharedPreferences(KEYS.PREF_NAME,Context.MODE_PRIVATE),KEYS.SLOT_WIDTH,Integer.parseInt(data[7]));
intPref.set(Integer.parseInt(data[7]));
Intent i=new Intent(context,SimulationActivity.class);
i.putExtra("empty_slots",result);
context.startActivity(i);
}
//TODO Generate Route
}
}
/*
* (non-Javadoc)
* @see android.os.AsyncTask#onPreExecute()
*/
@Override
protected void onPreExecute() {
statusText.setText("Opening a server socket");
}
}
public static boolean copyFile(InputStream inputStream, OutputStream out) {
byte buf[] = new byte[1024];
int len;
try {
while ((len = inputStream.read(buf)) != -1) {
out.write(buf, 0, len);
}
out.close();
inputStream.close();
} catch (IOException e) {
Log.d(WiFiDirectActivity.TAG, e.toString());
return false;
}
return true;
}
}
| 1.1875 | 1 |
samza-core/src/test/java/org/apache/samza/coordinator/stream/TestCoordinatorStreamMessage.java | pubnub/samza | 0 | 177 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.stream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage;
import org.apache.samza.coordinator.stream.messages.Delete;
import org.apache.samza.coordinator.stream.messages.SetConfig;
import org.junit.Test;
public class TestCoordinatorStreamMessage {
@Test
public void testCoordinatorStreamMessage() {
CoordinatorStreamMessage message = new CoordinatorStreamMessage("source");
assertEquals("source", message.getSource());
assertEquals(CoordinatorStreamMessage.VERSION, message.getVersion());
assertNotNull(message.getUsername());
assertTrue(message.getTimestamp() > 0);
assertTrue(!message.isDelete());
CoordinatorStreamMessage secondMessage = new CoordinatorStreamMessage(message.getKeyArray(), message.getMessageMap());
assertEquals(secondMessage, message);
}
@Test
public void testCoordinatorStreamMessageIsDelete() {
CoordinatorStreamMessage message = new CoordinatorStreamMessage(new Object[] {}, null);
assertTrue(message.isDelete());
assertNull(message.getMessageMap());
}
@Test
public void testSetConfig() {
SetConfig setConfig = new SetConfig("source", "key", "value");
assertEquals(SetConfig.TYPE, setConfig.getType());
assertEquals("key", setConfig.getKey());
assertEquals("value", setConfig.getConfigValue());
assertFalse(setConfig.isDelete());
assertEquals(CoordinatorStreamMessage.VERSION, setConfig.getVersion());
}
@Test
public void testDelete() {
Delete delete = new Delete("source2", "key", "delete-type");
assertEquals("delete-type", delete.getType());
assertEquals("key", delete.getKey());
assertNull(delete.getMessageMap());
assertTrue(delete.isDelete());
assertEquals(CoordinatorStreamMessage.VERSION, delete.getVersion());
}
}
| 1.28125 | 1 |
jzy3d-native-jogl-newt/src/main/java/org/jzy3d/chart/factories/NewtPainterFactory.java | jzy3d/jzy3d-api | 202 | 185 | package org.jzy3d.chart.factories;
import java.util.Date;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jzy3d.bridge.awt.FrameAWT;
import org.jzy3d.chart.Chart;
import org.jzy3d.chart.controllers.keyboard.camera.ICameraKeyController;
import org.jzy3d.chart.controllers.keyboard.camera.NewtCameraKeyController;
import org.jzy3d.chart.controllers.keyboard.screenshot.IScreenshotKeyController;
import org.jzy3d.chart.controllers.keyboard.screenshot.IScreenshotKeyController.IScreenshotEventListener;
import org.jzy3d.chart.controllers.keyboard.screenshot.NewtScreenshotKeyController;
import org.jzy3d.chart.controllers.mouse.camera.ICameraMouseController;
import org.jzy3d.chart.controllers.mouse.camera.NewtCameraMouseController;
import org.jzy3d.chart.controllers.mouse.picking.IMousePickingController;
import org.jzy3d.chart.controllers.mouse.picking.NewtMousePickingController;
import org.jzy3d.maths.Rectangle;
import org.jzy3d.maths.Utils;
import org.jzy3d.plot3d.rendering.canvas.CanvasNewtAwt;
import org.jzy3d.plot3d.rendering.canvas.ICanvas;
import org.jzy3d.plot3d.rendering.canvas.Quality;
import org.jzy3d.plot3d.rendering.scene.Scene;
import org.jzy3d.plot3d.rendering.view.AWTNativeViewOverlay;
import org.jzy3d.plot3d.rendering.view.AWTRenderer3d;
import org.jzy3d.plot3d.rendering.view.IViewOverlay;
import org.jzy3d.plot3d.rendering.view.Renderer3d;
import org.jzy3d.plot3d.rendering.view.View;
import org.jzy3d.plot3d.rendering.view.layout.IViewportLayout;
import org.jzy3d.plot3d.rendering.view.layout.ViewAndColorbarsLayout;
import com.jogamp.opengl.GLCapabilities;
public class NewtPainterFactory extends NativePainterFactory implements IPainterFactory {
public static String SCREENSHOT_FOLDER = "./data/screenshots/";
static Logger logger = LogManager.getLogger(NewtPainterFactory.class);
public NewtPainterFactory() {
super();
}
public NewtPainterFactory(GLCapabilities capabilities) {
super(capabilities);
}
@Override
public IViewOverlay newViewOverlay() {
return new AWTNativeViewOverlay();
}
@Override
public IViewportLayout newViewportLayout() {
return new ViewAndColorbarsLayout();
}
/** Provide AWT Texture loading for screenshots */
@Override
public Renderer3d newRenderer3D(View view) {
return new AWTRenderer3d(view, traceGL, debugGL);
}
@Override
public ICanvas newCanvas(IChartFactory factory, Scene scene, Quality quality) {
boolean traceGL = false;
boolean debugGL = false;
return new CanvasNewtAwt(factory, scene, quality, getCapabilities(), traceGL, debugGL);
}
@Override
public ICameraMouseController newMouseCameraController(Chart chart) {
return new NewtCameraMouseController(chart);
}
@Override
public IMousePickingController newMousePickingController(Chart chart, int clickWidth) {
return new NewtMousePickingController(chart, clickWidth);
}
@Override
public ICameraKeyController newKeyboardCameraController(Chart chart) {
return new NewtCameraKeyController(chart);
}
@Override
public IScreenshotKeyController newKeyboardScreenshotController(Chart chart) {
// trigger screenshot on 's' letter
String file =
SCREENSHOT_FOLDER + "capture-" + Utils.dat2str(new Date(), "yyyy-MM-dd-HH-mm-ss") + ".png";
IScreenshotKeyController screenshot = new NewtScreenshotKeyController(chart, file);
screenshot.addListener(new IScreenshotEventListener() {
@Override
public void failedScreenshot(String file, Exception e) {
logger.error("Failed to save screenshot to '" + file + "'", e);
}
@Override
public void doneScreenshot(String file) {
logger.info("Screenshot: " + file);
}
});
return screenshot;
}
@Override
public IFrame newFrame(Chart chart, Rectangle bounds, String title) {
return new FrameAWT(chart, bounds, title, null);
}
@Override
public IFrame newFrame(Chart chart) {
return newFrame(chart, new Rectangle(0, 0, 800, 600), "Jzy3d");
}
}
| 1.34375 | 1 |
src/1_100/0039_Combination_Sum/Combination_Sum.java | himichael/LeetCode | 1 | 193 | class Solution {
public List<List<Integer>> combinationSum(int[] candidates, int target) {
Arrays.sort(candidates);
dfs(candidates,target,0,new Stack<Integer>());
return res;
}
List<List<Integer>> res = new ArrayList<List<Integer>>();
void dfs(int[] candidates, int target, int index, Stack<Integer> cur) {
if(0 == target) {
res.add(new ArrayList<Integer>(cur));
return;
}
for(int i=index;i<candidates.length;i++) {
if(candidates[i] > target) {
break;
}
cur.push(candidates[i]);
dfs(candidates,target-candidates[i],i,cur);
cur.pop();
}
}
} | 2.234375 | 2 |
src/cm3033/CyrilNiobe/Main.java | CyrilNb/Pollware | 0 | 201 | package cm3033.CyrilNiobe;
import java.net.URL;
import javax.swing.ImageIcon;
import javafx.animation.Animation;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.application.Application;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXMLLoader;
import javafx.stage.Stage;
import javafx.util.Duration;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.layout.BorderPane;
/**
* Main class which is called when we run the program
* Start the interface
* @author <NAME> - 2016
* Module: Concurrent Programming Coursework - Robert Gordon University
*/
public class Main extends Application {
@Override
public void start(Stage primaryStage) {
try {
FXMLLoader loader = new FXMLLoader(getClass().getResource("ServerGUI.fxml"));
Parent root = (Parent) loader.load(getClass().getResource("ServerGUI.fxml").openStream());
Scene scene = new Scene(root);
MenuBar menubar = new MenuBar();
((BorderPane) root).setBottom(menubar);
menubar.setUseSystemMenuBar(true);
Menu filemenu = new Menu("About");
Menu editmenu = new Menu("Server");
menubar.getMenus().addAll(filemenu,editmenu);
primaryStage.setScene(scene);
primaryStage.setTitle("Big Brother house voting");
primaryStage.show();
ServerGUIController mainController = loader.<ServerGUIController>getController();
Timeline timeline = new Timeline(new KeyFrame(
Duration.millis(100),
new EventHandler<ActionEvent>() {
@Override public void handle(ActionEvent actionEvent) {
if(mainController != null)
mainController.refresh();
}
}
));
timeline.setCycleCount(Animation.INDEFINITE);
timeline.play();
} catch(Exception e) {
e.printStackTrace();
}
}
/**
* main method
* @param args
*/
public static void main(String[] args) {
try {
URL iconURL = Main.class.getResource("dock_icon.jpg");
java.awt.Image image = new ImageIcon(iconURL).getImage();
com.apple.eawt.Application.getApplication().setDockIconImage(image);
} catch (Exception e) {
// Won't work on Windows or Linux.
}
launch(args);
}
}
| 1.65625 | 2 |
src/main/java/com/kfyty/mybatis/auto/mapper/match/SQLConditionEnum.java | kfyty/mybatis-jpa-support | 2 | 209 | package com.kfyty.mybatis.auto.mapper.match;
import com.kfyty.mybatis.auto.mapper.exception.SQLConditionMatchException;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
/**
* 功能描述: SQL 条件枚举
*
* @author <EMAIL>
* @date 2019/11/7 10:27
* @since JDK 1.8
*/
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public enum SQLConditionEnum {
CONDITION_OR("Or", "", " or "),
CONDITION_AND("And", "", " and "),
CONDITION_BETWEEN("Between", " between #{%s} and #{%s} ", ""),
CONDITION_LessThan("LessThan", " < #{%s} ", ""),
CONDITION_LessEqual("LessEqual", " <= #{%s} ", ""),
CONDITION_GreaterThan("GreaterThan", " > #{%s} ", ""),
CONDITION_GreaterEqual("GreaterEqual", " >= #{%s} ", ""),
CONDITION_NotEqual("NotEqual", " <> #{%s} ", ""),
CONDITION_EQUAL("Equal", " = #{%s} ", ""),
CONDITION_NotNull("NotNull", " is not null ", ""),
CONDITION_IsNull("IsNull", " is null ", ""),
CONDITION_NotLike("NotLike", " not like '${%s}' ", ""),
CONDITION_LeftLike("LeftLike", " like '%%${%s}' ", ""),
CONDITION_RightLike("RightLike", " like '${%s}%%' ", ""),
CONDITION_Contains("Contains", " like '%%${%s}%%' ", ""),
CONDITION_Like("Like", " like '${%s}' ", ""),
CONDITION_NotIn("NotIn", " not in ( %s ) ", ""),
CONDITION_In("In", " in ( %s ) ", ""),
CONDITION_OrderByAsc("Asc", " %s asc ", ""),
CONDITION_OrderByDesc("Desc", " %s desc ", ""),
CONDITION_DEFAULT("Default", " = #{%s} ", "");
private String condition;
private String template;
private String separate;
public String condition() {
return this.condition;
}
public String template() {
return this.template;
}
public String separate() {
return this.separate;
}
public static SQLConditionEnum matchSQLCondition(String methodName) {
Objects.requireNonNull(methodName, "sql condition match error: method name is null !");
Set<SQLConditionEnum> conditionEnums = new HashSet<>();
for (SQLConditionEnum value : SQLConditionEnum.values()) {
if(methodName.contains(value.condition())) {
if(value.equals(SQLConditionEnum.CONDITION_In) && conditionEnums.contains(SQLConditionEnum.CONDITION_NotIn)) {
continue;
}
if(value.equals(SQLConditionEnum.CONDITION_Like) && (
conditionEnums.contains(SQLConditionEnum.CONDITION_LeftLike) ||
conditionEnums.contains(SQLConditionEnum.CONDITION_RightLike))) {
continue;
}
if(value.equals(SQLConditionEnum.CONDITION_EQUAL) && (
conditionEnums.contains(SQLConditionEnum.CONDITION_NotEqual) ||
conditionEnums.contains(SQLConditionEnum.CONDITION_LessEqual) ||
conditionEnums.contains(SQLConditionEnum.CONDITION_GreaterEqual))) {
continue;
}
conditionEnums.add(value);
}
}
if(conditionEnums.size() > 1) {
throw new SQLConditionMatchException("SQL condition match error: more than one matched !");
}
return conditionEnums.isEmpty() ? CONDITION_DEFAULT : conditionEnums.iterator().next();
}
}
| 1.125 | 1 |
src/main/java/com/intland/jenkins/collector/TestResultCollector.java | daniel-beck-bot/codebeamer-result-trend-updater-plugin | 0 | 217 | /*
* Copyright (c) 2015 Intland Software (<EMAIL>)
*/
package com.intland.jenkins.collector;
import com.intland.jenkins.util.TimeUtil;
import com.intland.jenkins.collector.dto.TestResultDto;
import hudson.model.AbstractBuild;
import hudson.model.BuildListener;
import hudson.model.Run;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.tasks.test.AggregatedTestResultAction;
import hudson.tasks.test.TestResult;
import java.util.List;
public class TestResultCollector {
public static TestResultDto collectTestResultData(AbstractBuild<?, ?> build, BuildListener listener) {
String formattedTestDuration = "";
int totalCount = 0;
int failCount = 0;
int lastFailCount = 0;
String failedDifference = "";
long testDuration = 0l;
AbstractTestResultAction action = build.getAction(AbstractTestResultAction.class);
Object lastTestResult = getPreviousTestResult(build);
if (action != null && action.getResult() != null) {
if (action.getResult() instanceof List) { // aggregateResult
List<AggregatedTestResultAction.ChildReport> childReports = (List<AggregatedTestResultAction.ChildReport>) action.getResult();
for (AggregatedTestResultAction.ChildReport childReport : childReports) {
TestResult testResult = (TestResult) childReport.result;
testDuration += new Float(testResult.getDuration() * 1000).longValue();
totalCount += testResult.getTotalCount();
failCount += testResult.getFailCount();
}
if (lastTestResult != null) {
childReports = (List<AggregatedTestResultAction.ChildReport>) lastTestResult;
for (AggregatedTestResultAction.ChildReport childReport : childReports) {
TestResult testResult = (TestResult) childReport.result;
lastFailCount += testResult.getFailCount();
}
}
formattedTestDuration = TimeUtil.formatMillisIntoMinutesAndSeconds(testDuration);
} else if (action.getResult() instanceof TestResult) { // junit result
TestResult testResult = (TestResult) action.getResult();
testDuration = new Float(testResult.getDuration() * 1000).longValue();
formattedTestDuration = TimeUtil.formatMillisIntoMinutesAndSeconds(testDuration);
totalCount = testResult.getTotalCount();
failCount = testResult.getFailCount();
if (lastTestResult != null) {
lastFailCount = ((TestResult) lastTestResult).getFailCount();
}
} else {
listener.getLogger().println("This build does not have a supported test run type");
}
failedDifference = failDifference(failCount, lastFailCount);
} else {
listener.getLogger().println("This build does not have a test run");
}
return new TestResultDto(formattedTestDuration, totalCount, failCount, failedDifference, testDuration);
}
private static Object getPreviousTestResult(AbstractBuild build) {
AbstractTestResultAction result = null;
int counter = build.getNumber();
while (result == null && counter > 0) {
counter--;
Run candidateBuild = build.getParent().getBuild(String.valueOf(counter));
if (candidateBuild == null) {
continue;
}
AbstractTestResultAction candidateTestResultAction = candidateBuild.getAction(AbstractTestResultAction.class);
if (candidateTestResultAction != null) {
return candidateTestResultAction.getResult();
}
}
return null;
}
private static String failDifference(int failCount1, int failCount2) {
String sign;
if (failCount1 > failCount2) {
sign = "+";
} else if (failCount1 < failCount2) {
sign = "-";
} else {
sign = "±";
}
return sign + Math.abs(failCount1 - failCount2);
}
}
| 1.289063 | 1 |
indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java | drcrallen/druid | 1 | 225 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.google.common.primitives.Longs;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import io.druid.common.guava.ThreadRenamingRunnable;
import io.druid.concurrent.Execs;
import io.druid.data.input.InputRow;
import io.druid.data.input.Row;
import io.druid.data.input.Rows;
import io.druid.indexer.hadoop.SegmentInputRow;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.logger.Logger;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.BaseProgressIndicator;
import io.druid.segment.ProgressIndicator;
import io.druid.segment.QueryableIndex;
import io.druid.segment.column.ColumnCapabilitiesImpl;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.NumberedShardSpec;
import io.druid.timeline.partition.ShardSpec;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
*/
public class IndexGeneratorJob implements Jobby
{
private static final Logger log = new Logger(IndexGeneratorJob.class);
public static List<DataSegment> getPublishedSegments(HadoopDruidIndexerConfig config)
{
final Configuration conf = JobHelper.injectSystemProperties(new Configuration());
final ObjectMapper jsonMapper = HadoopDruidIndexerConfig.JSON_MAPPER;
ImmutableList.Builder<DataSegment> publishedSegmentsBuilder = ImmutableList.builder();
final Path descriptorInfoDir = config.makeDescriptorInfoDir();
try {
FileSystem fs = descriptorInfoDir.getFileSystem(conf);
for (FileStatus status : fs.listStatus(descriptorInfoDir)) {
final DataSegment segment = jsonMapper.readValue(fs.open(status.getPath()), DataSegment.class);
publishedSegmentsBuilder.add(segment);
log.info("Adding segment %s to the list of published segments", segment.getIdentifier());
}
}
catch (FileNotFoundException e) {
log.error(
"[%s] SegmentDescriptorInfo is not found usually when indexing process did not produce any segments meaning"
+ " either there was no input data to process or all the input events were discarded due to some error",
e.getMessage()
);
Throwables.propagate(e);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
List<DataSegment> publishedSegments = publishedSegmentsBuilder.build();
return publishedSegments;
}
private final HadoopDruidIndexerConfig config;
private IndexGeneratorStats jobStats;
public IndexGeneratorJob(
HadoopDruidIndexerConfig config
)
{
this.config = config;
this.jobStats = new IndexGeneratorStats();
}
protected void setReducerClass(final Job job)
{
job.setReducerClass(IndexGeneratorReducer.class);
}
public IndexGeneratorStats getJobStats()
{
return jobStats;
}
public boolean run()
{
try {
Job job = Job.getInstance(
new Configuration(),
String.format("%s-index-generator-%s", config.getDataSource(), config.getIntervals())
);
job.getConfiguration().set("io.sort.record.percent", "0.23");
JobHelper.injectSystemProperties(job);
config.addJobProperties(job);
job.setMapperClass(IndexGeneratorMapper.class);
job.setMapOutputValueClass(BytesWritable.class);
SortableBytes.useSortableBytesAsMapOutputKey(job);
int numReducers = Iterables.size(config.getAllBuckets().get());
if (numReducers == 0) {
throw new RuntimeException("No buckets?? seems there is no data to index.");
}
if (config.getSchema().getTuningConfig().getUseCombiner()) {
job.setCombinerClass(IndexGeneratorCombiner.class);
job.setCombinerKeyGroupingComparatorClass(BytesWritable.Comparator.class);
}
job.setNumReduceTasks(numReducers);
job.setPartitionerClass(IndexGeneratorPartitioner.class);
setReducerClass(job);
job.setOutputKeyClass(BytesWritable.class);
job.setOutputValueClass(Text.class);
job.setOutputFormatClass(IndexGeneratorOutputFormat.class);
FileOutputFormat.setOutputPath(job, config.makeIntermediatePath());
config.addInputPaths(job);
config.intoConfiguration(job);
JobHelper.setupClasspath(
JobHelper.distributedClassPath(config.getWorkingPath()),
JobHelper.distributedClassPath(config.makeIntermediatePath()),
job
);
job.submit();
log.info("Job %s submitted, status available at %s", job.getJobName(), job.getTrackingURL());
boolean success = job.waitForCompletion(true);
Counter invalidRowCount = job.getCounters()
.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.INVALID_ROW_COUNTER);
jobStats.setInvalidRowCount(invalidRowCount.getValue());
return success;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private static IncrementalIndex makeIncrementalIndex(
Bucket theBucket,
AggregatorFactory[] aggs,
HadoopDruidIndexerConfig config,
Iterable<String> oldDimOrder,
Map<String, ColumnCapabilitiesImpl> oldCapabilities
)
{
final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig();
final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder()
.withMinTimestamp(theBucket.time.getMillis())
.withTimestampSpec(config.getSchema().getDataSchema().getParser().getParseSpec().getTimestampSpec())
.withDimensionsSpec(config.getSchema().getDataSchema().getParser())
.withQueryGranularity(config.getSchema().getDataSchema().getGranularitySpec().getQueryGranularity())
.withMetrics(aggs)
.withRollup(config.getSchema().getDataSchema().getGranularitySpec().isRollup())
.build();
OnheapIncrementalIndex newIndex = new OnheapIncrementalIndex(
indexSchema,
!tuningConfig.isIgnoreInvalidRows(),
tuningConfig.getRowFlushBoundary()
);
if (oldDimOrder != null && !indexSchema.getDimensionsSpec().hasCustomDimensions()) {
newIndex.loadDimensionIterable(oldDimOrder, oldCapabilities);
}
return newIndex;
}
public static class IndexGeneratorMapper extends HadoopDruidIndexerMapper<BytesWritable, BytesWritable>
{
private static final HashFunction hashFunction = Hashing.murmur3_128();
private AggregatorFactory[] aggregators;
private AggregatorFactory[] combiningAggs;
@Override
protected void setup(Context context)
throws IOException, InterruptedException
{
super.setup(context);
aggregators = config.getSchema().getDataSchema().getAggregators();
combiningAggs = new AggregatorFactory[aggregators.length];
for (int i = 0; i < aggregators.length; ++i) {
combiningAggs[i] = aggregators[i].getCombiningFactory();
}
}
@Override
protected void innerMap(
InputRow inputRow,
Object value,
Context context,
boolean reportParseExceptions
) throws IOException, InterruptedException
{
// Group by bucket, sort by timestamp
final Optional<Bucket> bucket = getConfig().getBucket(inputRow);
if (!bucket.isPresent()) {
throw new ISE("WTF?! No bucket found for row: %s", inputRow);
}
final long truncatedTimestamp = granularitySpec.getQueryGranularity().truncate(inputRow.getTimestampFromEpoch());
final byte[] hashedDimensions = hashFunction.hashBytes(
HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(
Rows.toGroupKey(
truncatedTimestamp,
inputRow
)
)
).asBytes();
// type SegmentInputRow serves as a marker that these InputRow instances have already been combined
// and they contain the columns as they show up in the segment after ingestion, not what you would see in raw
// data
byte[] serializedInputRow = inputRow instanceof SegmentInputRow ?
InputRowSerde.toBytes(inputRow, combiningAggs, reportParseExceptions)
:
InputRowSerde.toBytes(inputRow, aggregators, reportParseExceptions);
context.write(
new SortableBytes(
bucket.get().toGroupKey(),
// sort rows by truncated timestamp and hashed dimensions to help reduce spilling on the reducer side
ByteBuffer.allocate(Longs.BYTES + hashedDimensions.length)
.putLong(truncatedTimestamp)
.put(hashedDimensions)
.array()
).toBytesWritable(),
new BytesWritable(serializedInputRow)
);
}
}
public static class IndexGeneratorCombiner extends Reducer<BytesWritable, BytesWritable, BytesWritable, BytesWritable>
{
private HadoopDruidIndexerConfig config;
private AggregatorFactory[] aggregators;
private AggregatorFactory[] combiningAggs;
@Override
protected void setup(Context context)
throws IOException, InterruptedException
{
config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration());
aggregators = config.getSchema().getDataSchema().getAggregators();
combiningAggs = new AggregatorFactory[aggregators.length];
for (int i = 0; i < aggregators.length; ++i) {
combiningAggs[i] = aggregators[i].getCombiningFactory();
}
}
@Override
protected void reduce(
final BytesWritable key, Iterable<BytesWritable> values, final Context context
) throws IOException, InterruptedException
{
Iterator<BytesWritable> iter = values.iterator();
BytesWritable first = iter.next();
if (iter.hasNext()) {
LinkedHashSet<String> dimOrder = Sets.newLinkedHashSet();
SortableBytes keyBytes = SortableBytes.fromBytesWritable(key);
Bucket bucket = Bucket.fromGroupKey(keyBytes.getGroupKey()).lhs;
IncrementalIndex index = makeIncrementalIndex(bucket, combiningAggs, config, null, null);
index.add(InputRowSerde.fromBytes(first.getBytes(), aggregators));
while (iter.hasNext()) {
context.progress();
InputRow value = InputRowSerde.fromBytes(iter.next().getBytes(), aggregators);
if (!index.canAppendRow()) {
dimOrder.addAll(index.getDimensionOrder());
log.info("current index full due to [%s]. creating new index.", index.getOutOfRowsReason());
flushIndexToContextAndClose(key, index, context);
index = makeIncrementalIndex(bucket, combiningAggs, config, dimOrder, index.getColumnCapabilities());
}
index.add(value);
}
flushIndexToContextAndClose(key, index, context);
} else {
context.write(key, first);
}
}
private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context)
throws IOException, InterruptedException
{
final List<String> dimensions = index.getDimensionNames();
Iterator<Row> rows = index.iterator();
while (rows.hasNext()) {
context.progress();
Row row = rows.next();
InputRow inputRow = getInputRowFromRow(row, dimensions);
// reportParseExceptions is true as any unparseable data is already handled by the mapper.
context.write(
key,
new BytesWritable(InputRowSerde.toBytes(inputRow, combiningAggs, true))
);
}
index.close();
}
private InputRow getInputRowFromRow(final Row row, final List<String> dimensions)
{
return new InputRow()
{
@Override
public List<String> getDimensions()
{
return dimensions;
}
@Override
public long getTimestampFromEpoch()
{
return row.getTimestampFromEpoch();
}
@Override
public DateTime getTimestamp()
{
return row.getTimestamp();
}
@Override
public List<String> getDimension(String dimension)
{
return row.getDimension(dimension);
}
@Override
public Object getRaw(String dimension)
{
return row.getRaw(dimension);
}
@Override
public float getFloatMetric(String metric)
{
return row.getFloatMetric(metric);
}
@Override
public long getLongMetric(String metric)
{
return row.getLongMetric(metric);
}
@Override
public int compareTo(Row o)
{
return row.compareTo(o);
}
};
}
}
public static class IndexGeneratorPartitioner extends Partitioner<BytesWritable, Writable> implements Configurable
{
private Configuration config;
@Override
public int getPartition(BytesWritable bytesWritable, Writable value, int numPartitions)
{
final ByteBuffer bytes = ByteBuffer.wrap(bytesWritable.getBytes());
bytes.position(4); // Skip length added by SortableBytes
int shardNum = bytes.getInt();
if (config.get("mapred.job.tracker").equals("local")) {
return shardNum % numPartitions;
} else {
if (shardNum >= numPartitions) {
throw new ISE("Not enough partitions, shard[%,d] >= numPartitions[%,d]", shardNum, numPartitions);
}
return shardNum;
}
}
@Override
public Configuration getConf()
{
return config;
}
@Override
public void setConf(Configuration config)
{
this.config = config;
}
}
public static class IndexGeneratorReducer extends Reducer<BytesWritable, BytesWritable, BytesWritable, Text>
{
protected HadoopDruidIndexerConfig config;
private List<String> metricNames = Lists.newArrayList();
private AggregatorFactory[] aggregators;
private AggregatorFactory[] combiningAggs;
protected ProgressIndicator makeProgressIndicator(final Context context)
{
return new BaseProgressIndicator()
{
@Override
public void progress()
{
super.progress();
context.progress();
}
};
}
private File persist(
final IncrementalIndex index,
final Interval interval,
final File file,
final ProgressIndicator progressIndicator
) throws IOException
{
if (config.isBuildV9Directly()) {
return HadoopDruidIndexerConfig.INDEX_MERGER_V9.persist(
index, interval, file, config.getIndexSpec(), progressIndicator
);
} else {
return HadoopDruidIndexerConfig.INDEX_MERGER.persist(
index, interval, file, config.getIndexSpec(), progressIndicator
);
}
}
protected File mergeQueryableIndex(
final List<QueryableIndex> indexes,
final AggregatorFactory[] aggs,
final File file,
ProgressIndicator progressIndicator
) throws IOException
{
boolean rollup = config.getSchema().getDataSchema().getGranularitySpec().isRollup();
if (config.isBuildV9Directly()) {
return HadoopDruidIndexerConfig.INDEX_MERGER_V9.mergeQueryableIndex(
indexes, rollup, aggs, file, config.getIndexSpec(), progressIndicator
);
} else {
return HadoopDruidIndexerConfig.INDEX_MERGER.mergeQueryableIndex(
indexes, rollup, aggs, file, config.getIndexSpec(), progressIndicator
);
}
}
@Override
protected void setup(Context context)
throws IOException, InterruptedException
{
config = HadoopDruidIndexerConfig.fromConfiguration(context.getConfiguration());
aggregators = config.getSchema().getDataSchema().getAggregators();
combiningAggs = new AggregatorFactory[aggregators.length];
for (int i = 0; i < aggregators.length; ++i) {
metricNames.add(aggregators[i].getName());
combiningAggs[i] = aggregators[i].getCombiningFactory();
}
}
@Override
protected void reduce(
BytesWritable key, Iterable<BytesWritable> values, final Context context
) throws IOException, InterruptedException
{
SortableBytes keyBytes = SortableBytes.fromBytesWritable(key);
Bucket bucket = Bucket.fromGroupKey(keyBytes.getGroupKey()).lhs;
final Interval interval = config.getGranularitySpec().bucketInterval(bucket.time).get();
ListeningExecutorService persistExecutor = null;
List<ListenableFuture<?>> persistFutures = Lists.newArrayList();
IncrementalIndex index = makeIncrementalIndex(
bucket,
combiningAggs,
config,
null,
null
);
try {
File baseFlushFile = File.createTempFile("base", "flush");
baseFlushFile.delete();
baseFlushFile.mkdirs();
Set<File> toMerge = Sets.newTreeSet();
int indexCount = 0;
int lineCount = 0;
int runningTotalLineCount = 0;
long startTime = System.currentTimeMillis();
Set<String> allDimensionNames = Sets.newLinkedHashSet();
final ProgressIndicator progressIndicator = makeProgressIndicator(context);
int numBackgroundPersistThreads = config.getSchema().getTuningConfig().getNumBackgroundPersistThreads();
if (numBackgroundPersistThreads > 0) {
final BlockingQueue<Runnable> queue = new SynchronousQueue<>();
ExecutorService executorService = new ThreadPoolExecutor(
numBackgroundPersistThreads,
numBackgroundPersistThreads,
0L,
TimeUnit.MILLISECONDS,
queue,
Execs.makeThreadFactory("IndexGeneratorJob_persist_%d"),
new RejectedExecutionHandler()
{
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor)
{
try {
executor.getQueue().put(r);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RejectedExecutionException("Got Interrupted while adding to the Queue");
}
}
}
);
persistExecutor = MoreExecutors.listeningDecorator(executorService);
} else {
persistExecutor = MoreExecutors.sameThreadExecutor();
}
for (final BytesWritable bw : values) {
context.progress();
final InputRow inputRow = index.formatRow(InputRowSerde.fromBytes(bw.getBytes(), aggregators));
int numRows = index.add(inputRow);
++lineCount;
if (!index.canAppendRow()) {
allDimensionNames.addAll(index.getDimensionOrder());
log.info(index.getOutOfRowsReason());
log.info(
"%,d lines to %,d rows in %,d millis",
lineCount - runningTotalLineCount,
numRows,
System.currentTimeMillis() - startTime
);
runningTotalLineCount = lineCount;
final File file = new File(baseFlushFile, String.format("index%,05d", indexCount));
toMerge.add(file);
context.progress();
final IncrementalIndex persistIndex = index;
persistFutures.add(
persistExecutor.submit(
new ThreadRenamingRunnable(String.format("%s-persist", file.getName()))
{
@Override
public void doRun()
{
try {
persist(persistIndex, interval, file, progressIndicator);
}
catch (Exception e) {
log.error(e, "persist index error");
throw Throwables.propagate(e);
}
finally {
// close this index
persistIndex.close();
}
}
}
)
);
index = makeIncrementalIndex(
bucket,
combiningAggs,
config,
allDimensionNames,
persistIndex.getColumnCapabilities()
);
startTime = System.currentTimeMillis();
++indexCount;
}
}
allDimensionNames.addAll(index.getDimensionOrder());
log.info("%,d lines completed.", lineCount);
List<QueryableIndex> indexes = Lists.newArrayListWithCapacity(indexCount);
final File mergedBase;
if (toMerge.size() == 0) {
if (index.isEmpty()) {
throw new IAE("If you try to persist empty indexes you are going to have a bad time");
}
mergedBase = new File(baseFlushFile, "merged");
persist(index, interval, mergedBase, progressIndicator);
} else {
if (!index.isEmpty()) {
final File finalFile = new File(baseFlushFile, "final");
persist(index, interval, finalFile, progressIndicator);
toMerge.add(finalFile);
}
Futures.allAsList(persistFutures).get(1, TimeUnit.HOURS);
persistExecutor.shutdown();
for (File file : toMerge) {
indexes.add(HadoopDruidIndexerConfig.INDEX_IO.loadIndex(file));
}
mergedBase = mergeQueryableIndex(
indexes, aggregators, new File(baseFlushFile, "merged"), progressIndicator
);
}
final FileSystem outputFS = new Path(config.getSchema().getIOConfig().getSegmentOutputPath())
.getFileSystem(context.getConfiguration());
// ShardSpec used for partitioning within this Hadoop job.
final ShardSpec shardSpecForPartitioning = config.getShardSpec(bucket).getActualSpec();
// ShardSpec to be published.
final ShardSpec shardSpecForPublishing;
if (config.isForceExtendableShardSpecs()) {
shardSpecForPublishing = new NumberedShardSpec(shardSpecForPartitioning.getPartitionNum(),config.getShardSpecCount(bucket));
} else {
shardSpecForPublishing = shardSpecForPartitioning;
}
final DataSegment segmentTemplate = new DataSegment(
config.getDataSource(),
interval,
config.getSchema().getTuningConfig().getVersion(),
null,
ImmutableList.copyOf(allDimensionNames),
metricNames,
shardSpecForPublishing,
-1,
-1
);
final DataSegment segment = JobHelper.serializeOutIndex(
segmentTemplate,
context.getConfiguration(),
context,
context.getTaskAttemptID(),
mergedBase,
JobHelper.makeSegmentOutputPath(
new Path(config.getSchema().getIOConfig().getSegmentOutputPath()),
outputFS,
segmentTemplate
)
);
Path descriptorPath = config.makeDescriptorInfoPath(segment);
descriptorPath = JobHelper.prependFSIfNullScheme(
FileSystem.get(
descriptorPath.toUri(),
context.getConfiguration()
), descriptorPath
);
log.info("Writing descriptor to path[%s]", descriptorPath);
JobHelper.writeSegmentDescriptor(
config.makeDescriptorInfoDir().getFileSystem(context.getConfiguration()),
segment,
descriptorPath,
context
);
for (File file : toMerge) {
FileUtils.deleteDirectory(file);
}
}
catch (ExecutionException e) {
throw Throwables.propagate(e);
}
catch (TimeoutException e) {
throw Throwables.propagate(e);
}
finally {
index.close();
if (persistExecutor != null) {
persistExecutor.shutdownNow();
}
}
}
}
public static class IndexGeneratorOutputFormat extends TextOutputFormat
{
@Override
public void checkOutputSpecs(JobContext job) throws IOException
{
Path outDir = getOutputPath(job);
if (outDir == null) {
throw new InvalidJobConfException("Output directory not set.");
}
}
}
public static class IndexGeneratorStats
{
private long invalidRowCount = 0;
public long getInvalidRowCount()
{
return invalidRowCount;
}
public void setInvalidRowCount(long invalidRowCount)
{
this.invalidRowCount = invalidRowCount;
}
}
}
| 1.28125 | 1 |
src/main/java/com/alipay/api/domain/UserInfomation.java | Likenttt/alipay-sdk-java-all | 0 | 233 | package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 用户信息
*
* @author auto create
* @since 1.0, 2021-08-30 10:48:04
*/
public class UserInfomation extends AlipayObject {
private static final long serialVersionUID = 6269437764973454579L;
/**
* 证件号
*/
@ApiField("cert_no")
private String certNo;
/**
* 身份证:IDENTITY_CARD、护照:PASSPORT、军官证:OFFICER_CARD、士兵证:SOLDIER_CARD、户口本:HOKOU等。如有其它类型需要支持,请与蚂蚁金服工作人员联系。
*/
@ApiField("cert_type")
private String certType;
/**
* 扩展信息
*/
@ApiListField("ext_info")
@ApiField("order_ext_info")
private List<OrderExtInfo> extInfo;
/**
* 手机号
*/
@ApiField("mobile")
private String mobile;
/**
* 姓名
*/
@ApiField("name")
private String name;
/**
* 支付宝uid
*/
@ApiField("user_id")
private String userId;
public String getCertNo() {
return this.certNo;
}
public void setCertNo(String certNo) {
this.certNo = certNo;
}
public String getCertType() {
return this.certType;
}
public void setCertType(String certType) {
this.certType = certType;
}
public List<OrderExtInfo> getExtInfo() {
return this.extInfo;
}
public void setExtInfo(List<OrderExtInfo> extInfo) {
this.extInfo = extInfo;
}
public String getMobile() {
return this.mobile;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getUserId() {
return this.userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
| 1.03125 | 1 |
android/src/main/java/me/yohom/bmap_map_fluttify/sub_handler/SubHandler3.java | yohom/bmap_map_fluttify | 0 | 241 | //////////////////////////////////////////////////////////
// GENERATED BY FLUTTIFY. DO NOT EDIT IT.
//////////////////////////////////////////////////////////
package me.yohom.bmap_map_fluttify.sub_handler;
import android.os.Bundle;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.PluginRegistry.Registrar;
import io.flutter.plugin.common.StandardMethodCodec;
import io.flutter.plugin.platform.PlatformViewRegistry;
import me.yohom.bmap_map_fluttify.BmapMapFluttifyPlugin.Handler;
import me.yohom.foundation_fluttify.core.FluttifyMessageCodec;
import static me.yohom.foundation_fluttify.FoundationFluttifyPluginKt.getEnableLog;
import static me.yohom.foundation_fluttify.FoundationFluttifyPluginKt.getHEAP;
@SuppressWarnings("ALL")
public class SubHandler3 {
public static Map<String, Handler> getSubHandler(BinaryMessenger messenger) {
return new HashMap<String, Handler>() {{
// getter
put("com.baidu.mapapi.search.geocode.GeoCodeOption::get_mCity_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.geocode.GeoCodeOption __this__ = (com.baidu.mapapi.search.geocode.GeoCodeOption) __args__.get("__this__");
String __result__ = __this__.mCity;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.geocode.GeoCodeOption::get_mAddress_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.geocode.GeoCodeOption __this__ = (com.baidu.mapapi.search.geocode.GeoCodeOption) __args__.get("__this__");
String __result__ = __this__.mAddress;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.LocationShareURLOption::get_mLocation_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.LocationShareURLOption __this__ = (com.baidu.mapapi.search.share.LocationShareURLOption) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.mLocation;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.LocationShareURLOption::get_mName_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.LocationShareURLOption __this__ = (com.baidu.mapapi.search.share.LocationShareURLOption) __args__.get("__this__");
String __result__ = __this__.mName;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.LocationShareURLOption::get_mSnippet_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.LocationShareURLOption __this__ = (com.baidu.mapapi.search.share.LocationShareURLOption) __args__.get("__this__");
String __result__ = __this__.mSnippet;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.PoiDetailShareURLOption::get_mUid_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.PoiDetailShareURLOption __this__ = (com.baidu.mapapi.search.share.PoiDetailShareURLOption) __args__.get("__this__");
String __result__ = __this__.mUid;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.RouteShareURLOption::get_mFrom_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.search.route.PlanNode> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.RouteShareURLOption __this__ = (com.baidu.mapapi.search.share.RouteShareURLOption) __args__.get("__this__");
com.baidu.mapapi.search.route.PlanNode __result__ = __this__.mFrom;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.RouteShareURLOption::get_mTo_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.search.route.PlanNode> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.RouteShareURLOption __this__ = (com.baidu.mapapi.search.share.RouteShareURLOption) __args__.get("__this__");
com.baidu.mapapi.search.route.PlanNode __result__ = __this__.mTo;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.RouteShareURLOption::get_mMode_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.search.share.RouteShareURLOption.RouteShareMode> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.RouteShareURLOption __this__ = (com.baidu.mapapi.search.share.RouteShareURLOption) __args__.get("__this__");
com.baidu.mapapi.search.share.RouteShareURLOption.RouteShareMode __result__ = __this__.mMode;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.RouteShareURLOption::get_mPn_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.RouteShareURLOption __this__ = (com.baidu.mapapi.search.share.RouteShareURLOption) __args__.get("__this__");
Integer __result__ = __this__.mPn;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.search.share.RouteShareURLOption::get_mCityCode_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.search.share.RouteShareURLOption __this__ = (com.baidu.mapapi.search.share.RouteShareURLOption) __args__.get("__this__");
Integer __result__ = __this__.mCityCode;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.Tile::get_data_batch", (__argsBatch__, __methodResult__) -> {
List<byte[]> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.Tile __this__ = (com.baidu.mapapi.map.Tile) __args__.get("__this__");
byte[] __result__ = __this__.data;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.Tile::get_height_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.Tile __this__ = (com.baidu.mapapi.map.Tile) __args__.get("__this__");
Integer __result__ = __this__.height;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.Tile::get_width_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.Tile __this__ = (com.baidu.mapapi.map.Tile) __args__.get("__this__");
Integer __result__ = __this__.width;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WinRound::get_left_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WinRound __this__ = (com.baidu.mapapi.map.WinRound) __args__.get("__this__");
Integer __result__ = __this__.left;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WinRound::get_right_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WinRound __this__ = (com.baidu.mapapi.map.WinRound) __args__.get("__this__");
Integer __result__ = __this__.right;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WinRound::get_top_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WinRound __this__ = (com.baidu.mapapi.map.WinRound) __args__.get("__this__");
Integer __result__ = __this__.top;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WinRound::get_bottom_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WinRound __this__ = (com.baidu.mapapi.map.WinRound) __args__.get("__this__");
Integer __result__ = __this__.bottom;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.Stroke::get_strokeWidth_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.Stroke __this__ = (com.baidu.mapapi.map.Stroke) __args__.get("__this__");
Integer __result__ = __this__.strokeWidth;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.Stroke::get_color_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.Stroke __this__ = (com.baidu.mapapi.map.Stroke) __args__.get("__this__");
Integer __result__ = __this__.color;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WeightedLatLng::get_intensity_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WeightedLatLng __this__ = (com.baidu.mapapi.map.WeightedLatLng) __args__.get("__this__");
Double __result__ = __this__.intensity;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WeightedLatLng::get_latLng_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WeightedLatLng __this__ = (com.baidu.mapapi.map.WeightedLatLng) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.latLng;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.BaiduMap::get_mapStatusReason_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.BaiduMap __this__ = (com.baidu.mapapi.map.BaiduMap) __args__.get("__this__");
Integer __result__ = __this__.mapStatusReason;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_rotate_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
Float __result__ = __this__.rotate;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_target_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.target;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_overlook_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
Float __result__ = __this__.overlook;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_zoom_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
Float __result__ = __this__.zoom;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_targetScreen_batch", (__argsBatch__, __methodResult__) -> {
List<android.graphics.Point> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
android.graphics.Point __result__ = __this__.targetScreen;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_bound_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLngBounds> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
com.baidu.mapapi.model.LatLngBounds __result__ = __this__.bound;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MapStatus::get_winRound_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.map.WinRound> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MapStatus __this__ = (com.baidu.mapapi.map.MapStatus) __args__.get("__this__");
com.baidu.mapapi.map.WinRound __result__ = __this__.winRound;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationConfiguration::get_locationMode_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.map.MyLocationConfiguration.LocationMode> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationConfiguration __this__ = (com.baidu.mapapi.map.MyLocationConfiguration) __args__.get("__this__");
com.baidu.mapapi.map.MyLocationConfiguration.LocationMode __result__ = __this__.locationMode;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationConfiguration::get_enableDirection_batch", (__argsBatch__, __methodResult__) -> {
List<Boolean> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationConfiguration __this__ = (com.baidu.mapapi.map.MyLocationConfiguration) __args__.get("__this__");
Boolean __result__ = __this__.enableDirection;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationConfiguration::get_customMarker_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.map.BitmapDescriptor> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationConfiguration __this__ = (com.baidu.mapapi.map.MyLocationConfiguration) __args__.get("__this__");
com.baidu.mapapi.map.BitmapDescriptor __result__ = __this__.customMarker;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationConfiguration::get_accuracyCircleFillColor_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationConfiguration __this__ = (com.baidu.mapapi.map.MyLocationConfiguration) __args__.get("__this__");
Integer __result__ = __this__.accuracyCircleFillColor;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationConfiguration::get_accuracyCircleStrokeColor_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationConfiguration __this__ = (com.baidu.mapapi.map.MyLocationConfiguration) __args__.get("__this__");
Integer __result__ = __this__.accuracyCircleStrokeColor;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.TileOverlayOptions::get_datasource_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.TileOverlayOptions __this__ = (com.baidu.mapapi.map.TileOverlayOptions) __args__.get("__this__");
Integer __result__ = __this__.datasource;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.TileOverlayOptions::get_urlString_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.TileOverlayOptions __this__ = (com.baidu.mapapi.map.TileOverlayOptions) __args__.get("__this__");
String __result__ = __this__.urlString;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.WearMapView::get_mTask_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.map.WearMapView.AnimationTask> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.WearMapView __this__ = (com.baidu.mapapi.map.WearMapView) __args__.get("__this__");
com.baidu.mapapi.map.WearMapView.AnimationTask __result__ = __this__.mTask;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_cityID_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.cityID;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_cityName_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
String __result__ = __this__.cityName;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_ratio_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.ratio;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_status_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.status;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_geoPt_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.geoPt;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_size_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.size;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_serversize_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.serversize;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_level_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Integer __result__ = __this__.level;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLUpdateElement::get_update_batch", (__argsBatch__, __methodResult__) -> {
List<Boolean> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLUpdateElement __this__ = (com.baidu.mapapi.map.offline.MKOLUpdateElement) __args__.get("__this__");
Boolean __result__ = __this__.update;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_cityID_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
Integer __result__ = __this__.cityID;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_size_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
Integer __result__ = __this__.size;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_dataSize_batch", (__argsBatch__, __methodResult__) -> {
List<Long> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
Long __result__ = __this__.dataSize;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_cityName_batch", (__argsBatch__, __methodResult__) -> {
List<String> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
String __result__ = __this__.cityName;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_cityType_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
Integer __result__ = __this__.cityType;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.offline.MKOLSearchRecord::get_childCities_batch", (__argsBatch__, __methodResult__) -> {
List<java.util.ArrayList<com.baidu.mapapi.map.offline.MKOLSearchRecord>> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.offline.MKOLSearchRecord __this__ = (com.baidu.mapapi.map.offline.MKOLSearchRecord) __args__.get("__this__");
java.util.ArrayList<com.baidu.mapapi.map.offline.MKOLSearchRecord> __result__ = __this__.childCities;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_latitude_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Double __result__ = __this__.latitude;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_longitude_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Double __result__ = __this__.longitude;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_speed_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Float __result__ = __this__.speed;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_direction_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Float __result__ = __this__.direction;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_accuracy_batch", (__argsBatch__, __methodResult__) -> {
List<Float> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Float __result__ = __this__.accuracy;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.map.MyLocationData::get_satellitesNum_batch", (__argsBatch__, __methodResult__) -> {
List<Integer> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.map.MyLocationData __this__ = (com.baidu.mapapi.map.MyLocationData) __args__.get("__this__");
Integer __result__ = __this__.satellitesNum;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.http.HttpClient::get_isHttpsEnable_batch", (__argsBatch__, __methodResult__) -> {
List<Boolean> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.http.HttpClient __this__ = (com.baidu.mapapi.http.HttpClient) __args__.get("__this__");
Boolean __result__ = __this__.isHttpsEnable;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLngBounds::get_northeast_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLngBounds __this__ = (com.baidu.mapapi.model.LatLngBounds) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.northeast;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLngBounds::get_southwest_batch", (__argsBatch__, __methodResult__) -> {
List<com.baidu.mapapi.model.LatLng> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLngBounds __this__ = (com.baidu.mapapi.model.LatLngBounds) __args__.get("__this__");
com.baidu.mapapi.model.LatLng __result__ = __this__.southwest;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLng::get_latitude_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLng __this__ = (com.baidu.mapapi.model.LatLng) __args__.get("__this__");
Double __result__ = __this__.latitude;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLng::get_longitude_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLng __this__ = (com.baidu.mapapi.model.LatLng) __args__.get("__this__");
Double __result__ = __this__.longitude;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLng::get_latitudeE6_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLng __this__ = (com.baidu.mapapi.model.LatLng) __args__.get("__this__");
Double __result__ = __this__.latitudeE6;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// getter
put("com.baidu.mapapi.model.LatLng::get_longitudeE6_batch", (__argsBatch__, __methodResult__) -> {
List<Double> __resultList__ = new ArrayList<>();
for (int __i__ = 0; __i__ < ((List<Map<String, Object>>) __argsBatch__).size(); __i__++) {
Map<String, Object> __args__ = ((List<Map<String, Object>>) __argsBatch__).get(__i__);
// ref object
com.baidu.mapapi.model.LatLng __this__ = (com.baidu.mapapi.model.LatLng) __args__.get("__this__");
Double __result__ = __this__.longitudeE6;
__resultList__.add(__result__);
}
__methodResult__.success(__resultList__);
});
// setter
put("com.baidu.location.LocationClientOption::set_coorType", (__args__, __methodResult__) -> {
// ref arg
String coorType = (String) ((Map<String, Object>) __args__).get("coorType");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.coorType = coorType;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_addrType", (__args__, __methodResult__) -> {
// ref arg
String addrType = (String) ((Map<String, Object>) __args__).get("addrType");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.addrType = addrType;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_openGps", (__args__, __methodResult__) -> {
// ref arg
boolean openGps = (boolean) ((Map<String, Object>) __args__).get("openGps");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.openGps = openGps;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_scanSpan", (__args__, __methodResult__) -> {
// ref arg
Number scanSpan = (Number) ((Map<String, Object>) __args__).get("scanSpan");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.scanSpan = scanSpan.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_timeOut", (__args__, __methodResult__) -> {
// ref arg
Number timeOut = (Number) ((Map<String, Object>) __args__).get("timeOut");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.timeOut = timeOut.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_prodName", (__args__, __methodResult__) -> {
// ref arg
String prodName = (String) ((Map<String, Object>) __args__).get("prodName");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.prodName = prodName;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_priority", (__args__, __methodResult__) -> {
// ref arg
Number priority = (Number) ((Map<String, Object>) __args__).get("priority");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.priority = priority.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_location_change_notify", (__args__, __methodResult__) -> {
// ref arg
boolean location_change_notify = (boolean) ((Map<String, Object>) __args__).get("location_change_notify");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.location_change_notify = location_change_notify;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_disableLocCache", (__args__, __methodResult__) -> {
// ref arg
boolean disableLocCache = (boolean) ((Map<String, Object>) __args__).get("disableLocCache");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.disableLocCache = disableLocCache;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_enableSimulateGps", (__args__, __methodResult__) -> {
// ref arg
boolean enableSimulateGps = (boolean) ((Map<String, Object>) __args__).get("enableSimulateGps");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.enableSimulateGps = enableSimulateGps;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_serviceName", (__args__, __methodResult__) -> {
// ref arg
String serviceName = (String) ((Map<String, Object>) __args__).get("serviceName");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.serviceName = serviceName;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isIgnoreCacheException", (__args__, __methodResult__) -> {
// ref arg
boolean isIgnoreCacheException = (boolean) ((Map<String, Object>) __args__).get("isIgnoreCacheException");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isIgnoreCacheException = isIgnoreCacheException;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isIgnoreKillProcess", (__args__, __methodResult__) -> {
// ref arg
boolean isIgnoreKillProcess = (boolean) ((Map<String, Object>) __args__).get("isIgnoreKillProcess");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isIgnoreKillProcess = isIgnoreKillProcess;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_mIsNeedDeviceDirect", (__args__, __methodResult__) -> {
// ref arg
boolean mIsNeedDeviceDirect = (boolean) ((Map<String, Object>) __args__).get("mIsNeedDeviceDirect");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mIsNeedDeviceDirect = mIsNeedDeviceDirect;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedAptag", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedAptag = (boolean) ((Map<String, Object>) __args__).get("isNeedAptag");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedAptag = isNeedAptag;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedAptagd", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedAptagd = (boolean) ((Map<String, Object>) __args__).get("isNeedAptagd");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedAptagd = isNeedAptagd;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedPoiRegion", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedPoiRegion = (boolean) ((Map<String, Object>) __args__).get("isNeedPoiRegion");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedPoiRegion = isNeedPoiRegion;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedRegular", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedRegular = (boolean) ((Map<String, Object>) __args__).get("isNeedRegular");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedRegular = isNeedRegular;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedAltitude", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedAltitude = (boolean) ((Map<String, Object>) __args__).get("isNeedAltitude");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedAltitude = isNeedAltitude;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isNeedNewVersionRgc", (__args__, __methodResult__) -> {
// ref arg
boolean isNeedNewVersionRgc = (boolean) ((Map<String, Object>) __args__).get("isNeedNewVersionRgc");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isNeedNewVersionRgc = isNeedNewVersionRgc;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_isOnceLocation", (__args__, __methodResult__) -> {
// ref arg
boolean isOnceLocation = (boolean) ((Map<String, Object>) __args__).get("isOnceLocation");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.isOnceLocation = isOnceLocation;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_autoNotifyMaxInterval", (__args__, __methodResult__) -> {
// ref arg
Number autoNotifyMaxInterval = (Number) ((Map<String, Object>) __args__).get("autoNotifyMaxInterval");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.autoNotifyMaxInterval = autoNotifyMaxInterval.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_autoNotifyLocSensitivity", (__args__, __methodResult__) -> {
// ref arg
Number autoNotifyLocSensitivity = (Number) ((Map<String, Object>) __args__).get("autoNotifyLocSensitivity");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.autoNotifyLocSensitivity = autoNotifyLocSensitivity.floatValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_autoNotifyMinTimeInterval", (__args__, __methodResult__) -> {
// ref arg
Number autoNotifyMinTimeInterval = (Number) ((Map<String, Object>) __args__).get("autoNotifyMinTimeInterval");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.autoNotifyMinTimeInterval = autoNotifyMinTimeInterval.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_autoNotifyMinDistance", (__args__, __methodResult__) -> {
// ref arg
Number autoNotifyMinDistance = (Number) ((Map<String, Object>) __args__).get("autoNotifyMinDistance");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.autoNotifyMinDistance = autoNotifyMinDistance.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.LocationClientOption::set_wifiCacheTimeOut", (__args__, __methodResult__) -> {
// ref arg
Number wifiCacheTimeOut = (Number) ((Map<String, Object>) __args__).get("wifiCacheTimeOut");
com.baidu.location.LocationClientOption __this__ = (com.baidu.location.LocationClientOption) ((Map<String, Object>) __args__).get("__this__");
__this__.wifiCacheTimeOut = wifiCacheTimeOut.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mLatitude", (__args__, __methodResult__) -> {
// ref arg
Number mLatitude = (Number) ((Map<String, Object>) __args__).get("mLatitude");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mLatitude = mLatitude.doubleValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mLongitude", (__args__, __methodResult__) -> {
// ref arg
Number mLongitude = (Number) ((Map<String, Object>) __args__).get("mLongitude");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mLongitude = mLongitude.doubleValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mRadius", (__args__, __methodResult__) -> {
// ref arg
Number mRadius = (Number) ((Map<String, Object>) __args__).get("mRadius");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mRadius = mRadius.floatValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_differDistance", (__args__, __methodResult__) -> {
// ref arg
Number differDistance = (Number) ((Map<String, Object>) __args__).get("differDistance");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.differDistance = differDistance.floatValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mCoorType", (__args__, __methodResult__) -> {
// ref arg
String mCoorType = (String) ((Map<String, Object>) __args__).get("mCoorType");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mCoorType = mCoorType;
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mLatitudeC", (__args__, __methodResult__) -> {
// ref arg
Number mLatitudeC = (Number) ((Map<String, Object>) __args__).get("mLatitudeC");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mLatitudeC = mLatitudeC.doubleValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_mLongitudeC", (__args__, __methodResult__) -> {
// ref arg
Number mLongitudeC = (Number) ((Map<String, Object>) __args__).get("mLongitudeC");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.mLongitudeC = mLongitudeC.doubleValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_Notified", (__args__, __methodResult__) -> {
// ref arg
Number Notified = (Number) ((Map<String, Object>) __args__).get("Notified");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.Notified = Notified.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.location.BDNotifyListener::set_isAdded", (__args__, __methodResult__) -> {
// ref arg
boolean isAdded = (boolean) ((Map<String, Object>) __args__).get("isAdded");
com.baidu.location.BDNotifyListener __this__ = (com.baidu.location.BDNotifyListener) ((Map<String, Object>) __args__).get("__this__");
__this__.isAdded = isAdded;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.utils.poi.DispathcPoiData::set_name", (__args__, __methodResult__) -> {
// ref arg
String name = (String) ((Map<String, Object>) __args__).get("name");
com.baidu.mapapi.utils.poi.DispathcPoiData __this__ = (com.baidu.mapapi.utils.poi.DispathcPoiData) ((Map<String, Object>) __args__).get("__this__");
__this__.name = name;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.utils.poi.DispathcPoiData::set_pt", (__args__, __methodResult__) -> {
// ref arg
com.baidu.mapapi.model.LatLng pt = (com.baidu.mapapi.model.LatLng) ((Map<String, Object>) __args__).get("pt");
com.baidu.mapapi.utils.poi.DispathcPoiData __this__ = (com.baidu.mapapi.utils.poi.DispathcPoiData) ((Map<String, Object>) __args__).get("__this__");
__this__.pt = pt;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.utils.poi.DispathcPoiData::set_addr", (__args__, __methodResult__) -> {
// ref arg
String addr = (String) ((Map<String, Object>) __args__).get("addr");
com.baidu.mapapi.utils.poi.DispathcPoiData __this__ = (com.baidu.mapapi.utils.poi.DispathcPoiData) ((Map<String, Object>) __args__).get("__this__");
__this__.addr = addr;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.utils.poi.DispathcPoiData::set_uid", (__args__, __methodResult__) -> {
// ref arg
String uid = (String) ((Map<String, Object>) __args__).get("uid");
com.baidu.mapapi.utils.poi.DispathcPoiData __this__ = (com.baidu.mapapi.utils.poi.DispathcPoiData) ((Map<String, Object>) __args__).get("__this__");
__this__.uid = uid;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_key", (__args__, __methodResult__) -> {
// ref arg
String key = (String) ((Map<String, Object>) __args__).get("key");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.key = key;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_city", (__args__, __methodResult__) -> {
// ref arg
String city = (String) ((Map<String, Object>) __args__).get("city");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.city = city;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_district", (__args__, __methodResult__) -> {
// ref arg
String district = (String) ((Map<String, Object>) __args__).get("district");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.district = district;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_pt", (__args__, __methodResult__) -> {
// ref arg
com.baidu.mapapi.model.LatLng pt = (com.baidu.mapapi.model.LatLng) ((Map<String, Object>) __args__).get("pt");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.pt = pt;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_uid", (__args__, __methodResult__) -> {
// ref arg
String uid = (String) ((Map<String, Object>) __args__).get("uid");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.uid = uid;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_tag", (__args__, __methodResult__) -> {
// ref arg
String tag = (String) ((Map<String, Object>) __args__).get("tag");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.tag = tag;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_address", (__args__, __methodResult__) -> {
// ref arg
String address = (String) ((Map<String, Object>) __args__).get("address");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.address = address;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo::set_poiChildrenInfoList", (__args__, __methodResult__) -> {
// ref arg
java.util.List<com.baidu.mapapi.search.core.PoiChildrenInfo> poiChildrenInfoList = (java.util.List<com.baidu.mapapi.search.core.PoiChildrenInfo>) ((Map<String, Object>) __args__).get("poiChildrenInfoList");
com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo __this__ = (com.baidu.mapapi.search.sug.SuggestionResult.SuggestionInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.poiChildrenInfoList = poiChildrenInfoList;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionSearchOption::set_mCity", (__args__, __methodResult__) -> {
// ref arg
String mCity = (String) ((Map<String, Object>) __args__).get("mCity");
com.baidu.mapapi.search.sug.SuggestionSearchOption __this__ = (com.baidu.mapapi.search.sug.SuggestionSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mCity = mCity;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionSearchOption::set_mKeyword", (__args__, __methodResult__) -> {
// ref arg
String mKeyword = (String) ((Map<String, Object>) __args__).get("mKeyword");
com.baidu.mapapi.search.sug.SuggestionSearchOption __this__ = (com.baidu.mapapi.search.sug.SuggestionSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mKeyword = mKeyword;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionSearchOption::set_mLocation", (__args__, __methodResult__) -> {
// ref arg
com.baidu.mapapi.model.LatLng mLocation = (com.baidu.mapapi.model.LatLng) ((Map<String, Object>) __args__).get("mLocation");
com.baidu.mapapi.search.sug.SuggestionSearchOption __this__ = (com.baidu.mapapi.search.sug.SuggestionSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mLocation = mLocation;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.sug.SuggestionSearchOption::set_mCityLimit", (__args__, __methodResult__) -> {
// ref arg
Boolean mCityLimit = (Boolean) ((Map<String, Object>) __args__).get("mCityLimit");
com.baidu.mapapi.search.sug.SuggestionSearchOption __this__ = (com.baidu.mapapi.search.sug.SuggestionSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mCityLimit = mCityLimit;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.busline.BusLineSearchOption::set_mUid", (__args__, __methodResult__) -> {
// ref arg
String mUid = (String) ((Map<String, Object>) __args__).get("mUid");
com.baidu.mapapi.search.busline.BusLineSearchOption __this__ = (com.baidu.mapapi.search.busline.BusLineSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mUid = mUid;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.busline.BusLineSearchOption::set_mCity", (__args__, __methodResult__) -> {
// ref arg
String mCity = (String) ((Map<String, Object>) __args__).get("mCity");
com.baidu.mapapi.search.busline.BusLineSearchOption __this__ = (com.baidu.mapapi.search.busline.BusLineSearchOption) ((Map<String, Object>) __args__).get("__this__");
__this__.mCity = mCity;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiName", (__args__, __methodResult__) -> {
// ref arg
String parentPoiName = (String) ((Map<String, Object>) __args__).get("parentPoiName");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiName = parentPoiName;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiTag", (__args__, __methodResult__) -> {
// ref arg
String parentPoiTag = (String) ((Map<String, Object>) __args__).get("parentPoiTag");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiTag = parentPoiTag;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiAddress", (__args__, __methodResult__) -> {
// ref arg
String parentPoiAddress = (String) ((Map<String, Object>) __args__).get("parentPoiAddress");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiAddress = parentPoiAddress;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiLocation", (__args__, __methodResult__) -> {
// ref arg
com.baidu.mapapi.model.LatLng parentPoiLocation = (com.baidu.mapapi.model.LatLng) ((Map<String, Object>) __args__).get("parentPoiLocation");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiLocation = parentPoiLocation;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiDirection", (__args__, __methodResult__) -> {
// ref arg
String parentPoiDirection = (String) ((Map<String, Object>) __args__).get("parentPoiDirection");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiDirection = parentPoiDirection;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiDistance", (__args__, __methodResult__) -> {
// ref arg
Number parentPoiDistance = (Number) ((Map<String, Object>) __args__).get("parentPoiDistance");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiDistance = parentPoiDistance.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo::set_parentPoiUid", (__args__, __methodResult__) -> {
// ref arg
String parentPoiUid = (String) ((Map<String, Object>) __args__).get("parentPoiUid");
com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo.ParentPoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.parentPoiUid = parentPoiUid;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.SearchResult::set_error", (__args__, __methodResult__) -> {
// enum arg
com.baidu.mapapi.search.core.SearchResult.ERRORNO error = com.baidu.mapapi.search.core.SearchResult.ERRORNO.values()[(int) ((Map<String, Object>) __args__).get("error")];
com.baidu.mapapi.search.core.SearchResult __this__ = (com.baidu.mapapi.search.core.SearchResult) ((Map<String, Object>) __args__).get("__this__");
__this__.error = error;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.SearchResult::set_status", (__args__, __methodResult__) -> {
// ref arg
Number status = (Number) ((Map<String, Object>) __args__).get("status");
com.baidu.mapapi.search.core.SearchResult __this__ = (com.baidu.mapapi.search.core.SearchResult) ((Map<String, Object>) __args__).get("__this__");
__this__.status = status.intValue();
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_name", (__args__, __methodResult__) -> {
// ref arg
String name = (String) ((Map<String, Object>) __args__).get("name");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.name = name;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_uid", (__args__, __methodResult__) -> {
// ref arg
String uid = (String) ((Map<String, Object>) __args__).get("uid");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.uid = uid;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_address", (__args__, __methodResult__) -> {
// ref arg
String address = (String) ((Map<String, Object>) __args__).get("address");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.address = address;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_province", (__args__, __methodResult__) -> {
// ref arg
String province = (String) ((Map<String, Object>) __args__).get("province");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.province = province;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_city", (__args__, __methodResult__) -> {
// ref arg
String city = (String) ((Map<String, Object>) __args__).get("city");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.city = city;
__methodResult__.success("success");
});
// setter
put("com.baidu.mapapi.search.core.PoiInfo::set_area", (__args__, __methodResult__) -> {
// ref arg
String area = (String) ((Map<String, Object>) __args__).get("area");
com.baidu.mapapi.search.core.PoiInfo __this__ = (com.baidu.mapapi.search.core.PoiInfo) ((Map<String, Object>) __args__).get("__this__");
__this__.area = area;
__methodResult__.success("success");
});
}};
}
}
| 1.265625 | 1 |
Android/Android_Source/SpringBootHomeWork/app/src/main/java/com/niit/pojo/Leave.java | hmmmq/StudentManagementSystem-Android | 0 | 249 | package com.niit.pojo;
import java.util.Date;
public class Leave {
private Integer leaveid;
private Integer stuid;
private Date leavedate;
private String reason;
public Integer getLeaveid() {
return leaveid;
}
public void setLeaveid(Integer leaveid) {
this.leaveid = leaveid;
}
public Integer getStuid() {
return stuid;
}
public void setStuid(Integer stuid) {
this.stuid = stuid;
}
public Date getLeavedate() {
return leavedate;
}
public void setLeavedate(Date leavedate) {
this.leavedate = leavedate;
}
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
@Override
public String toString() {
return "Leave{" +
"leaveid=" + leaveid +
", stuid=" + stuid +
", leavedate=" + leavedate +
", reason='" + reason + '\'' +
'}';
}
public Leave(Integer leaveid, Integer stuid, Date leavedate, String reason) {
this.leaveid = leaveid;
this.stuid = stuid;
this.leavedate = leavedate;
this.reason = reason;
}
public Leave(){}
} | 1.289063 | 1 |
core/src/test/java/com/redshoes/metamodel/query/parser/QueryParserTest.java | wuxingjie/metamodel | 0 | 257 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.redshoes.metamodel.query.parser;
import java.util.Arrays;
import java.util.List;
import com.redshoes.metamodel.query.FilterClause;
import com.redshoes.metamodel.query.FilterItem;
import com.redshoes.metamodel.query.FunctionType;
import com.redshoes.metamodel.query.SelectItem;
import com.redshoes.metamodel.MetaModelException;
import com.redshoes.metamodel.MetaModelHelper;
import com.redshoes.metamodel.MockDataContext;
import com.redshoes.metamodel.query.FromItem;
import com.redshoes.metamodel.query.OperatorType;
import com.redshoes.metamodel.query.OrderByItem;
import com.redshoes.metamodel.query.OrderByItem.Direction;
import com.redshoes.metamodel.query.Query;
import com.redshoes.metamodel.schema.ColumnType;
import com.redshoes.metamodel.schema.MutableColumn;
import junit.framework.TestCase;
public class QueryParserTest extends TestCase {
private MockDataContext dc;
@Override
protected void setUp() throws Exception {
super.setUp();
dc = new MockDataContext("sch", "tbl", "foo");
// set 'baz' column to an integer column (to influence query generation)
MutableColumn col = (MutableColumn) dc.getColumnByQualifiedLabel("tbl.baz");
col.setType(ColumnType.INTEGER);
};
public void testQueryWithParenthesis() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"select foo from sch.tbl where (foo= 1) and (foo=2)");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.foo = '1' AND tbl.foo = '2'",
q.toSql());
}
public void testQueryWithParenthesisAnd() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "select foo from sch.tbl where (foo= 1) and (foo=2)");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.foo = '1' AND tbl.foo = '2'", q.toSql());
}
public void testQueryInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"select a.foo as f from sch.tbl a inner join sch.tbl b on a.foo=b.foo order by a.foo asc");
assertEquals("SELECT a.foo AS f FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo ORDER BY a.foo ASC",
q.toSql());
}
public void testParseScalarFunctions() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "select TO_NUM(a.foo) from sch.tbl a WHERE BOOLEAN(a.bar) = false");
assertEquals("SELECT TO_NUMBER(a.foo) FROM sch.tbl a WHERE TO_BOOLEAN(a.bar) = FALSE", q.toSql());
}
public void testSelectMapValueUsingDotNotation() throws Exception {
// set 'baz' column to a MAP column
MutableColumn col = (MutableColumn) dc.getColumnByQualifiedLabel("tbl.baz");
col.setType(ColumnType.MAP);
Query q = MetaModelHelper.parseQuery(dc,
"SELECT sch.tbl.baz.foo.bar, baz.helloworld, baz.hello.world FROM sch.tbl");
assertEquals(
"SELECT MAP_VALUE(tbl.baz,'foo.bar'), MAP_VALUE(tbl.baz,'helloworld'), MAP_VALUE(tbl.baz,'hello.world') FROM sch.tbl",
q.toSql());
}
public void testWhereMapValueUsingDotNotation() throws Exception {
// set 'baz' column to a MAP column
MutableColumn col = (MutableColumn) dc.getColumnByQualifiedLabel("tbl.baz");
col.setType(ColumnType.MAP);
Query q = MetaModelHelper.parseQuery(dc,
"SELECT baz.lorem, baz.ipsum FROM sch.tbl WHERE baz.hello = 'world'");
final SelectItem whereSelectItem = q.getWhereClause().getItem(0).getSelectItem();
assertEquals(whereSelectItem.getScalarFunction(), FunctionType.MAP_VALUE);
assertEquals(col, whereSelectItem.getColumn());
assertEquals("[hello]", Arrays.toString(whereSelectItem.getFunctionParameters()));
assertEquals(
"SELECT MAP_VALUE(tbl.baz,'lorem'), MAP_VALUE(tbl.baz,'ipsum') FROM sch.tbl WHERE MAP_VALUE(tbl.baz,'hello') = 'world'",
q.toSql());
}
public void testSelectEverythingFromTable() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT * FROM sch.tbl");
assertEquals("SELECT tbl.foo, tbl.bar, tbl.baz FROM sch.tbl", q.toSql());
}
public void testSelectEverythingFromJoin() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT * FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo");
assertEquals(
"SELECT a.foo, a.bar, a.baz, b.foo, b.bar, b.baz FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo",
q.toSql());
q = MetaModelHelper.parseQuery(dc, "SELECT a.foo, b.* FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo");
assertEquals("SELECT a.foo, b.foo, b.bar, b.baz FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo",
q.toSql());
}
public void testSelectColumnWithDotInName() throws Exception {
MutableColumn col = (MutableColumn) dc.getTableByQualifiedLabel("tbl").getColumn(0);
col.setName("fo.o");
Query q = MetaModelHelper.parseQuery(dc, "SELECT fo.o AS f FROM sch.tbl");
assertEquals("SELECT tbl.fo.o AS f FROM sch.tbl", q.toSql());
}
public void testApproximateCountQuery() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT APPROXIMATE COUNT(*) FROM sch.tbl");
assertEquals("SELECT APPROXIMATE COUNT(*) FROM sch.tbl", q.toSql());
assertTrue(q.getSelectClause().getItem(0).isFunctionApproximationAllowed());
}
public void testSelectAlias() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo AS f FROM sch.tbl");
assertEquals("SELECT tbl.foo AS f FROM sch.tbl", q.toSql());
q = MetaModelHelper.parseQuery(dc, "SELECT a.foo AS foobarbaz FROM sch.tbl a WHERE foobarbaz = '123'");
assertEquals("SELECT a.foo AS foobarbaz FROM sch.tbl a WHERE a.foo = '123'", q.toSql());
// assert that the referred "foobarbaz" is in fact the same select item
// (that's not visible from the toSql() call since there
// WhereItem.toSql() method will not use the alias)
SelectItem selectItem1 = q.getSelectClause().getItem(0);
SelectItem selectItem2 = q.getWhereClause().getItem(0).getSelectItem();
assertSame(selectItem1, selectItem2);
}
public void testSelectDistinct() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT DISTINCT foo, bar AS f FROM sch.tbl");
assertEquals("SELECT DISTINCT tbl.foo, tbl.bar AS f FROM sch.tbl", q.toSql());
}
public void testSelectDistinctInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT distinct foo, bar AS f FROM sch.tbl");
assertEquals("SELECT DISTINCT tbl.foo, tbl.bar AS f FROM sch.tbl", q.toSql());
}
public void testSelectMinInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT min(tbl.foo) FROM sch.tbl");
assertEquals("SELECT MIN(tbl.foo) FROM sch.tbl", q.toSql());
}
public void testSelectEmptySpacesBeforeAs() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM sch.tbl");
assertEquals("SELECT tbl.foo AS alias FROM sch.tbl", q.toSql());
}
/**
* This will test differences cases for tablename
*
* @throws Exception
*/
public void testTableName() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM sch.tbl");
assertEquals("SELECT tbl.foo AS alias FROM sch.tbl", q.toSql());
// Missing ] Bracket
try {
MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM [sch.tbl");
fail("Exception expected");
} catch (MetaModelException e) {
assertEquals("Not capable of parsing FROM token: [sch.tbl. Expected end ]", e.getMessage());
}
try {
MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM \"sch.tbl");
fail("Exception expected");
} catch (MetaModelException e) {
assertEquals("Not capable of parsing FROM token: \"sch.tbl. Expected end \"", e.getMessage());
}
// Test Delimiter in tablename
try {
MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM \"sch.tbl");
fail("Exception expected");
} catch (MetaModelException e) {
assertEquals("Not capable of parsing FROM token: \"sch.tbl. Expected end \"", e.getMessage());
}
// Positive test case
q = MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM [sch.tbl]");
assertEquals("SELECT tbl.foo AS alias FROM sch.tbl", q.toSql());
q = MetaModelHelper.parseQuery(dc, "SELECT tbl.foo AS alias FROM \"sch.tbl\"");
assertEquals("SELECT tbl.foo AS alias FROM sch.tbl", q.toSql());
}
public void testSelectAvgInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT avg(tbl.foo) FROM sch.tbl");
assertEquals("SELECT AVG(tbl.foo) FROM sch.tbl", q.toSql());
}
public void testSimpleSelectFrom() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo\nFROM sch.tbl");
assertEquals("SELECT tbl.foo FROM sch.tbl", q.toSql());
assertEquals(1, q.getFromClause().getItemCount());
FromItem fromItem = q.getFromClause().getItem(0);
assertNull("FROM item was an expression based item, which indicates it was not parsed",
fromItem.getExpression());
assertNotNull(fromItem.getTable());
assertEquals("tbl", fromItem.getTable().getName());
assertEquals(1, q.getSelectClause().getItemCount());
SelectItem selectItem = q.getSelectClause().getItem(0);
assertNull("SELECT item was an expression based item, which indicates it was not parsed",
selectItem.getExpression());
assertNotNull(selectItem.getColumn());
assertEquals("foo", selectItem.getColumn().getName());
assertNull(q.getFirstRow());
assertNull(q.getMaxRows());
}
public void testCarthesianProduct() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
" SELECT a.foo,b.bar FROM sch.tbl a, sch.tbl b \t WHERE a.foo = b.foo");
assertEquals("SELECT a.foo, b.bar FROM sch.tbl a, sch.tbl b WHERE a.foo = b.foo", q.toSql());
List<FromItem> fromItems = q.getFromClause().getItems();
assertNotNull(fromItems.get(0).getTable());
assertNotNull(fromItems.get(1).getTable());
List<FilterItem> whereItems = q.getWhereClause().getItems();
assertNotNull(whereItems.get(0).getSelectItem().getColumn());
assertNotNull(whereItems.get(0).getSelectItem().getFromItem().getTable());
}
public void testJoin() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"SELECT a.foo,b.bar FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo");
assertEquals("SELECT a.foo, b.bar FROM sch.tbl a INNER JOIN sch.tbl b ON a.foo = b.foo", q.toSql());
q = MetaModelHelper.parseQuery(dc,
"SELECT COUNT(*) FROM sch.tbl a LEFT JOIN sch.tbl b ON a.foo = b.foo AND a.bar = b.baz");
assertEquals("SELECT COUNT(*) FROM sch.tbl a LEFT JOIN sch.tbl b ON a.foo = b.foo AND a.bar = b.baz",
q.toSql());
}
public void testSimpleSelectFromWhere() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE bar = 'baz' AND baz > 5");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.bar = 'baz' AND tbl.baz > 5", q.toSql());
FilterClause whereClause = q.getWhereClause();
assertEquals(2, whereClause.getItemCount());
assertNull("WHERE item was an expression based item, which indicates it was not parsed",
whereClause.getItem(0).getExpression());
assertEquals(2, whereClause.getItemCount());
assertNull("WHERE item was an expression based item, which indicates it was not parsed",
whereClause.getItem(1).getExpression());
assertEquals("baz", whereClause.getItem(0).getOperand());
assertEquals(Integer.class, whereClause.getItem(1).getOperand().getClass());
}
public void testWhereStringEscaped() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE bar = 'ba\\'z'");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.bar = 'ba'z'", q.toSql());
}
public void testWhereOperandIsBoolean() throws Exception {
// set 'baz' column to an integer column (to influence query generation)
MutableColumn col = (MutableColumn) dc.getColumnByQualifiedLabel("tbl.baz");
col.setType(ColumnType.BOOLEAN);
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE baz = TRUE");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.baz = TRUE", q.toSql());
}
public void testWhereOperandIsDate() throws Exception {
// set 'baz' column to an integer column (to influence query generation)
MutableColumn col = (MutableColumn) dc.getColumnByQualifiedLabel("tbl.baz");
col.setType(ColumnType.TIME);
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE baz = 10:24");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.baz = TIME '10:24:00'", q.toSql());
}
public void testCoumpoundWhereClause() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"SELECT foo FROM sch.tbl WHERE (bar = 'baz' OR (baz > 5 AND baz < 7))");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE (tbl.bar = 'baz' OR (tbl.baz > 5 AND tbl.baz < 7))", q.toSql());
FilterClause wc = q.getWhereClause();
assertEquals(1, wc.getItemCount());
FilterItem item = wc.getItem(0);
assertTrue(item.isCompoundFilter());
FilterItem[] childItems = item.getChildItems();
assertEquals(2, childItems.length);
FilterItem bazConditions = childItems[1];
assertTrue(bazConditions.isCompoundFilter());
}
public void testCoumpoundWhereClauseDelimInLoweCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"SELECT foo FROM sch.tbl WHERE (bar = 'baz' OR (baz > 5 and baz < 7))");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE (tbl.bar = 'baz' OR (tbl.baz > 5 AND tbl.baz < 7))", q.toSql());
FilterClause wc = q.getWhereClause();
assertEquals(1, wc.getItemCount());
FilterItem item = wc.getItem(0);
assertTrue(item.isCompoundFilter());
FilterItem[] childItems = item.getChildItems();
assertEquals(2, childItems.length);
FilterItem bazConditions = childItems[1];
assertTrue(bazConditions.isCompoundFilter());
}
public void testWhereSomethingIsNull() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE bar IS NULL");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.bar IS NULL", q.toSql());
assertEquals(1, q.getWhereClause().getItemCount());
assertNull("WHERE item was an expression based item, which indicates it was not parsed",
q.getWhereClause().getItem(0).getExpression());
assertNull(q.getWhereClause().getItem(0).getOperand());
assertEquals(OperatorType.EQUALS_TO, q.getWhereClause().getItem(0).getOperator());
}
public void testWhereSomethingIsNotNull() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE bar IS NOT NULL");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.bar IS NOT NULL", q.toSql());
assertEquals(1, q.getWhereClause().getItemCount());
assertNull("WHERE item was an expression based item, which indicates it was not parsed",
q.getWhereClause().getItem(0).getExpression());
assertNull(q.getWhereClause().getItem(0).getOperand());
assertEquals(OperatorType.DIFFERENT_FROM, q.getWhereClause().getItem(0).getOperator());
}
public void testLimitAndOffset() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl LIMIT 1234 OFFSET 5");
assertEquals("SELECT tbl.foo FROM sch.tbl", q.toSql());
assertEquals(1234, q.getMaxRows().intValue());
assertEquals(6, q.getFirstRow().intValue());
}
public void testWhereIn() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE foo IN ('a','b',5)");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.foo IN ('a' , 'b' , '5')", q.toSql());
FilterItem whereItem = q.getWhereClause().getItem(0);
assertEquals(OperatorType.IN, whereItem.getOperator());
Object operand = whereItem.getOperand();
assertTrue(operand instanceof List);
assertEquals("a", ((List<?>) operand).get(0));
assertEquals("b", ((List<?>) operand).get(1));
assertEquals(5, ((List<?>) operand).get(2));
}
public void testWhereInInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE foo in ('a','b',5)");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.foo IN ('a' , 'b' , '5')", q.toSql());
FilterItem whereItem = q.getWhereClause().getItem(0);
assertEquals(OperatorType.IN, whereItem.getOperator());
Object operand = whereItem.getOperand();
assertTrue(operand instanceof List);
assertEquals("a", ((List<?>) operand).get(0));
assertEquals("b", ((List<?>) operand).get(1));
assertEquals(5, ((List<?>) operand).get(2));
}
public void testWhereLikeInLowerCase() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT foo FROM sch.tbl WHERE foo like 'a%'");
assertEquals("SELECT tbl.foo FROM sch.tbl WHERE tbl.foo LIKE 'a%'", q.toSql());
FilterItem whereItem = q.getWhereClause().getItem(0);
assertEquals(OperatorType.LIKE, whereItem.getOperator());
Object operand = whereItem.getOperand();
assertTrue(operand instanceof String);
assertEquals("a%", operand);
}
public void testSimpleSubQuery() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT f.foo AS fo FROM (SELECT * FROM sch.tbl) f");
assertEquals("SELECT f.foo AS fo FROM (SELECT tbl.foo, tbl.bar, tbl.baz FROM sch.tbl) f", q.toSql());
}
public void testSelectEverythingFromSubQuery() throws Exception {
Query q = MetaModelHelper.parseQuery(dc, "SELECT * FROM (SELECT foo, bar FROM sch.tbl) f");
assertEquals("SELECT f.foo, f.bar FROM (SELECT tbl.foo, tbl.bar FROM sch.tbl) f", q.toSql());
}
public void testGetIndicesVanillaScenario() throws Exception {
QueryParser qp = new QueryParser(dc, "SELECT ... FROM ... BAR BAZ");
assertEquals("[0, 7]", Arrays.toString(qp.indexesOf("SELECT ", null)));
assertEquals("[10, 16]", Arrays.toString(qp.indexesOf(" FROM ", null)));
}
public void testGetIndicesIgnoreWhiteSpaceAndCaseDifferences() throws Exception {
QueryParser qp = new QueryParser(dc, " \t\r\n select ... from ... BAR BAZ");
assertEquals("[0, 7]", Arrays.toString(qp.indexesOf("SELECT ", null)));
assertEquals("[10, 16]", Arrays.toString(qp.indexesOf(" FROM ", null)));
}
public void testInvalidQueries() throws Exception {
try {
MetaModelHelper.parseQuery(dc, "foobar");
fail("Exception expected");
} catch (MetaModelException e) {
assertEquals("SELECT not found in query: foobar", e.getMessage());
}
try {
MetaModelHelper.parseQuery(dc, "SELECT foobar");
fail("Exception expected");
} catch (MetaModelException e) {
assertEquals("FROM not found in query: SELECT foobar", e.getMessage());
}
}
public void testFullQuery() throws Exception {
Query q = MetaModelHelper.parseQuery(dc,
"SELECT foo, COUNT(* ), MAX( baz ) FROM sch.tbl WHERE bar = 'baz' AND foo = bar AND baz > 5 "
+ "GROUP BY foo HAVING COUNT(*) > 2 ORDER BY foo LIMIT 20 OFFSET 10");
assertEquals(
"SELECT tbl.foo, COUNT(*), MAX(tbl.baz) FROM sch.tbl WHERE tbl.bar = 'baz' AND tbl.foo = tbl.bar AND tbl.baz > 5 "
+ "GROUP BY tbl.foo HAVING COUNT(*) > 2 ORDER BY tbl.foo ASC",
q.toSql());
assertEquals(20, q.getMaxRows().intValue());
assertEquals(11, q.getFirstRow().intValue());
// SELECT ...
// tbl.foo
assertNotNull("SelectItem 1 should be a column", q.getSelectClause().getItem(0).getColumn());
// COUNT(*)
assertNotNull("SelectItem 2 should be a Function", q.getSelectClause().getItem(1).getAggregateFunction());
assertNotNull("SelectItem 2 should be a Function of '*'", q.getSelectClause().getItem(1).getExpression());
// MAX(tbl.baz)
assertNotNull("SelectItem 3 should be a Function", q.getSelectClause().getItem(2).getAggregateFunction());
assertNotNull("SelectItem 4 should be a Function of a column", q.getSelectClause().getItem(2).getColumn());
// FROM tbl.foo
assertNotNull(q.getFromClause().getItem(0).getTable());
// GROUP BY tbl.foo
assertNotNull(q.getGroupByClause().getItem(0).getSelectItem().getColumn());
// HAVING COUNT(*) > 2
FilterItem havingItem = q.getHavingClause().getItem(0);
assertNull(havingItem.getExpression());
assertNotNull(havingItem.getSelectItem().getAggregateFunction());
assertEquals("*", havingItem.getSelectItem().getExpression());
// ORDER BY tbl.foo ASC
OrderByItem orderByItem = q.getOrderByClause().getItem(0);
assertNull(orderByItem.getSelectItem().getExpression());
assertNotNull(orderByItem.getSelectItem().getColumn());
assertEquals(Direction.ASC, orderByItem.getDirection());
}
}
| 1.125 | 1 |
src/main/java/com/yh/antlr/ParseTrees.java | prs1022/a_antlr_plugin | 4 | 265 | /*
* Copyright (c) 2012 <NAME>, Tunnel Vision Laboratories LLC
* All rights reserved.
*
* The source code of this document is proprietary work, and is not licensed for
* distribution. For information about licensing, contact <NAME> at:
* <EMAIL>
*/
package java.com.yh.antlr;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.misc.IntervalSet;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.RuleNode;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.apache.commons.lang3.NotImplementedException;
/**
* @author <NAME>
*/
public final class ParseTrees {
public static Interval getSourceInterval(ParserRuleContext context) {
int startIndex = context.start.getStartIndex();
Token stopSymbol = getStopSymbol(context);
if (stopSymbol == null) {
return new Interval(startIndex, startIndex - 1);
}
int stopIndex;
if (stopSymbol.getType() != Token.EOF) {
stopIndex = stopSymbol.getStopIndex();
} else {
TokenSource tokenSource = context.getStart().getTokenSource();
CharStream inputStream = tokenSource != null ? tokenSource.getInputStream() : null;
if (inputStream != null) {
stopIndex = inputStream.size() - 1;
} else {
stopIndex = context.start.getStartIndex() - 1;
}
}
stopIndex = Math.max(stopIndex, startIndex - 1);
return new Interval(startIndex, stopIndex);
}
public static Interval getSourceInterval(ParseTree context) {
if (context instanceof TerminalNode) {
TerminalNode terminalNode = (TerminalNode) context;
Token token = terminalNode.getSymbol();
return new Interval(token.getStartIndex(), token.getStopIndex());
} else if (context instanceof RuleNode) {
RuleNode ruleNode = (RuleNode) context;
RuleContext ruleContext = ruleNode.getRuleContext();
if (ruleContext instanceof ParserRuleContext) {
return getSourceInterval((ParserRuleContext) ruleContext);
} else {
Token startSymbol = getStartSymbol(context);
Token stopSymbol = getStopSymbol(context);
if (startSymbol == null || stopSymbol == null) {
return Interval.INVALID;
}
return new Interval(startSymbol.getStartIndex(), stopSymbol.getStopIndex());
}
} else {
return Interval.INVALID;
}
}
public static Token getStopSymbol(ParserRuleContext context) {
if (context.stop != null) {
return context.stop;
}
for (int i = context.getChildCount() - 1; i >= 0; i--) {
Token symbol = getStopSymbol(context.getChild(i));
if (symbol != null) {
return symbol;
}
}
return context.start;
}
public static Token getStopSymbol(ParseTree context) {
if (context instanceof ParserRuleContext) {
return getStopSymbol((ParserRuleContext) context);
} else if (context instanceof TerminalNode) {
return ((TerminalNode) context).getSymbol();
}
return null;
}
public static TerminalNode getStartNode(ParseTree context) {
if (context == null) {
return null;
}
if (context instanceof TerminalNode) {
return (TerminalNode) context;
}
for (int i = 0; i < context.getChildCount(); i++) {
TerminalNode startNode = getStartNode(context.getChild(i));
if (startNode != null) {
return startNode;
}
}
return null;
}
public static Token getStartSymbol(ParseTree context) {
TerminalNode node = getStartNode(context);
if (node != null) {
return node.getSymbol();
}
if (!(context instanceof RuleNode)) {
return null;
}
RuleContext ruleContext = ((RuleNode) context).getRuleContext();
if (ruleContext instanceof ParserRuleContext) {
return ((ParserRuleContext) ruleContext).getStart();
}
return null;
}
public static TerminalNode getStopNode(ParseTree context) {
if (context == null) {
return null;
}
if (context instanceof TerminalNode) {
return (TerminalNode) context;
}
for (int i = context.getChildCount() - 1; i >= 0; i--) {
TerminalNode stopNode = getStopNode(context.getChild(i));
if (stopNode != null) {
return stopNode;
}
}
return null;
}
public static boolean isInContexts(ParserRuleContext context, boolean allowGaps, int... stack) {
if (allowGaps) {
throw new UnsupportedOperationException("Not implemented yet.");
}
ParserRuleContext currentContext = context;
for (int element : stack) {
if (currentContext.getRuleIndex() != element) {
return false;
}
currentContext = currentContext.getParent();
if (currentContext == null) {
return false;
}
}
return true;
}
public static <T extends Token> boolean isInAnyContext(Parser parser, RuleContext context, IntervalSet values) {
return isInAnyContext(parser, context, values, true);
}
public static <T extends Token> boolean isInAnyContext(Parser parser, RuleContext context, IntervalSet values, boolean checkTop) {
return findTopContext(parser, context, values, checkTop) != null;
}
public static boolean isInAnyContext(ParserRuleContext context, IntervalSet values) {
return isInAnyContext(context, values, true);
}
public static boolean isInAnyContext(ParserRuleContext context, IntervalSet values, boolean checkTop) {
return findTopContext(context, values, checkTop) != null;
}
public static RuleContext findTopContext(Parser parser, RuleContext context, IntervalSet values) {
return findTopContext(parser, context, values, true);
}
public static RuleContext findTopContext(Parser parser, RuleContext context, IntervalSet values, boolean checkTop) {
if (checkTop && values.contains(context.getRuleIndex())) {
return context;
}
if (context.isEmpty()) {
return null;
}
if (values.contains(parser.getATN().states.get(context.invokingState).ruleIndex)) {
return context.parent;
}
return findTopContext(parser, context.parent, values, false);
}
public static ParserRuleContext findTopContext(ParserRuleContext context, IntervalSet values) {
return findTopContext(context, values, true);
}
public static ParserRuleContext findTopContext(ParserRuleContext context, IntervalSet values, boolean checkTop) {
if (checkTop && values.contains(context.getRuleIndex())) {
return context;
}
if (context.isEmpty()) {
return null;
}
return findTopContext((ParserRuleContext) context.parent, values, true);
}
public static TerminalNode findTerminalNode(ParseTree node, Token symbol) {
if (symbol == null) {
return null;
}
if (node instanceof TerminalNode) {
TerminalNode terminalNode = (TerminalNode) node;
if (terminalNode.getSymbol().equals(symbol)) {//todo Utils.equals(o1,o2)无此方法
return terminalNode;
}
return null;
}
for (int i = 0; i < node.getChildCount(); i++) {
ParseTree child = node.getChild(i);
TerminalNode stopNode = ParseTrees.getStopNode(child);
if (stopNode == null) {
continue;
}
Token stopSymbol = stopNode.getSymbol();
if (stopSymbol.getStopIndex() < symbol.getStartIndex()) {
continue;
}
TerminalNode startNode = ParseTrees.getStartNode(child);
assert startNode != null;
stopSymbol = startNode.getSymbol();
if (stopSymbol == null || stopSymbol.getStartIndex() > symbol.getStopIndex()) {
break;
}
if (stopSymbol.equals(symbol)) {
return startNode;
}
TerminalNode terminalNode = findTerminalNode(child, symbol);
if (terminalNode != null) {
return terminalNode;
}
}
return null;
}
public static TerminalNode findTerminalNode(Collection<? extends ParseTree> children, Token symbol) {
for (ParseTree element : children) {
if (!(element instanceof TerminalNode)) {
continue;
}
TerminalNode node = (TerminalNode) element;
if (node.getSymbol() == symbol) {
return node;
}
}
return null;
}
public static int getInvokingRule(ATN atn, RuleContext context) {
int invokingState = context.invokingState;
if (invokingState < 0 || invokingState >= atn.states.size()) {
return -1;
}
return atn.states.get(invokingState).ruleIndex;
}
public static <T> List<T> emptyIfNull(List<T> list) {
if (list == null) {
return Collections.emptyList();
}
return list;
}
/**
* Return a list of all ancestors of this node. The first node of
* list is the root and the last is the parent of this node.
*
* @param <T>
* @param t
* @return
*/
public static List<? extends ParseTree> getAncestors(ParseTree t) {
if (t.getParent() == null) {
return Collections.emptyList();
}
List<ParseTree> ancestors = new ArrayList<>();
t = t.getParent();
while (t != null) {
ancestors.add(0, t); // insert at start
t = t.getParent();
}
return ancestors;
}
public static RuleNode findAncestor(ParseTree tree, int ruleIndex) {
for (ParseTree current = tree; current != null; current = current.getParent()) {
if (!(current instanceof RuleNode)) {
continue;
}
RuleNode ruleNode = (RuleNode) current;
if (ruleNode.getRuleContext().getRuleIndex() == ruleIndex) {
return ruleNode;
}
}
return null;
}
public static RuleNode findAncestor(ParseTree tree, BitSet ruleIndexes) {
for (ParseTree current = tree; current != null; current = current.getParent()) {
if (!(current instanceof RuleNode)) {
continue;
}
RuleNode ruleNode = (RuleNode) current;
int ruleIndex = ruleNode.getRuleContext().getRuleIndex();
if (ruleIndex < 0) {
continue;
}
if (ruleIndexes.get(ruleIndex)) {
return ruleNode;
}
}
return null;
}
public static <ContextClass> ContextClass findAncestor(ParseTree tree, Class<ContextClass> nodeType) {
for (ParseTree current = tree; current != null; current = current.getParent()) {
if (!(current instanceof RuleNode)) {
continue;
}
RuleNode ruleNode = (RuleNode) current;
RuleContext ruleContext = ruleNode.getRuleContext();
if (nodeType.isInstance(ruleContext)) {
return nodeType.cast(ruleContext);
}
}
return null;
}
/**
* Gets whether or not {@code tree} is an epsilon non-terminal in the parse
* tree. An epsilon tree is a node which does not contain any
* {@link TerminalNode} descendants.
*
* @param tree A node in a parse tree.
* @return {@code true} if {@code tree} is an epsilon node in the parse
* tree, otherwise {@code false}.
*/
public static boolean isEpsilon(ParseTree tree) {
if (tree instanceof TerminalNode) {
return false;
}
Interval sourceInterval = tree.getSourceInterval();
return sourceInterval.b < sourceInterval.a;
}
/**
* Gets whether or not {@code a} starts after the start of {@code b}.
*
* @param a The first tree.
* @param b The second tree.
* @return {@code true} if {@code a} starts after the start of {@code b}, otherwise {@code false}.
*/
public static boolean startsAfterStartOf(ParseTree a, ParseTree b) {
//TerminalNode<? extends Token> startNodeA = getStartNode(a);
//TerminalNode<? extends Token> startNodeB = getStartNode(b);
//if (startNodeA == null || startNodeB == null) {
// throw new NotImplementedException();
//}
Interval sourceIntervalA = a.getSourceInterval();
Interval sourceIntervalB = b.getSourceInterval();
//if (sourceIntervalA.a == sourceIntervalB.a) {
// if (isAncestorOf(a, b)) {
// return true;
// }
//
// if (isEpsilon(a) || isEpsilon(b)) {
// // b could be a child of a later sibling of some ancestor of a
// throw new NotImplementedException();
// }
//}
return sourceIntervalA.a > sourceIntervalB.a;
}
/**
* Gets whether or not {@code a} starts before the start of {@code b}.
*
* @param a The first tree.
* @param b The second tree.
* @return {@code true} if {@code a} starts before the start of {@code b}, otherwise {@code false}.
*/
public static boolean startsBeforeStartOf(ParseTree a, ParseTree b) {
Interval sourceIntervalA = a.getSourceInterval();
Interval sourceIntervalB = b.getSourceInterval();
return sourceIntervalA.a < sourceIntervalB.a;
}
/**
* Gets whether or not {@code a} ends after the end of {@code b}.
*
* @param a The first tree.
* @param b The second tree.
* @return {@code true} if {@code a} ends after the end of {@code b}, otherwise {@code false}.
*/
public static boolean endsAfterEndOf(ParseTree a, ParseTree b) {
Interval sourceIntervalA = a.getSourceInterval();
Interval sourceIntervalB = b.getSourceInterval();
return sourceIntervalA.b > sourceIntervalB.b;
}
/**
* Gets whether or not {@code a} ends before the end of {@code b}.
*
* @param a The first tree.
* @param b The second tree.
* @return {@code true} if {@code a} ends before the end of {@code b}, otherwise {@code false}.
*/
public static boolean endsBeforeEndOf(ParseTree a, ParseTree b) {
Interval sourceIntervalA = a.getSourceInterval();
Interval sourceIntervalB = b.getSourceInterval();
return sourceIntervalA.b < sourceIntervalB.b;
}
/**
* Gets whether or not {@code a} is an ancestor of or equal to {@code b}.
*
* @param a The first tree.
* @param b The second tree.
* @return {@code true} if {@code a} is an ancestor of or is equal to {@code b}, otherwise {@code false}.
*/
public static boolean isAncestorOf(ParseTree a, ParseTree b) {
for (ParseTree current = b; current != null; current = current.getParent()) {
if (current.equals(a)) {
return true;
}
}
return false;
}
/**
* Gets whether or not the first symbol of {@code tree} is the first
* non-whitespace symbol on a line.
*
* @param tree The parse tree to test.
* @return {@code true} if the only characters appearing before the first
* token of {@code tree} on the line where {@code tree} starts are
* whitespace characters according to {@link Character#isWhitespace}.
*/
public static boolean elementStartsLine(ParseTree tree) {
TerminalNode symbol = ParseTrees.getStartNode(tree);
if (symbol == null) {
throw new NotImplementedException("");
}
return elementStartsLine(symbol.getSymbol());
}
/**
* Gets whether or not {@code token} is the first non-whitespace symbol on a
* line.
*
* @param token The token to test.
* @return {@code true} if the only characters appearing before
* {@code token} on the same line are whitespace characters according to
* {@link Character#isWhitespace}.
*/
public static boolean elementStartsLine(Token token) {
String beginningOfLineText = token.getTokenSource().getInputStream().getText(new Interval(token.getStartIndex() - token.getCharPositionInLine(), token.getStartIndex() - 1));
for (int i = 0; i < beginningOfLineText.length(); i++) {
if (!Character.isWhitespace(beginningOfLineText.charAt(i))) {
return false;
}
}
return true;
}
/**
* Gets the symbol type of a parse tree terminal node. If the node is not a
* terminal node, this method returns {@link Token#INVALID_TYPE}.
*
* @param node The parse tree node.
* @return The symbol type of the terminal node. If {@code node} does not
* implement {@link TerminalNode}, this method returns
* {@link Token#INVALID_TYPE}.
*/
public static int getTerminalNodeType(ParseTree node) {
if (!(node instanceof TerminalNode)) {
return Token.INVALID_TYPE;
}
return ((TerminalNode) node).getSymbol().getType();
}
/**
* Gets a typed rule context from a parse tree node. If {@code node} is a
* {@link RuleNode}, this method gets the {@link RuleContext} instance from
* the node and attempts to cast the result to {@code clazz}. If
* {@code node} is not a {@code RuleNode}, or if the context is not of type
* {@code clazz}, this method returns {@code null}.
*
* @param <T> The specific rule context type.
* @param node The parse tree node.
* @param clazz The specific rule context type.
* @return A typed rule context object, or {@code null} if the parse tree
* node does not represent a rule node of this specific type.
*/
public static <T extends ParserRuleContext> T getTypedRuleContext(ParseTree node, Class<T> clazz) {
if (!(node instanceof RuleNode)) {
return null;
}
RuleContext ruleContext = ((RuleNode) node).getRuleContext();
if (clazz.isInstance(ruleContext)) {
return clazz.cast(ruleContext);
}
return null;
}
private ParseTrees() {
}
} | 1.546875 | 2 |
jeecg-boot/src/main/java/com/wochanye/mjob/codegenerate/mapper/EnterprisePositionMapper.java | Smallflyfly/jeecg-boot-demo | 2 | 273 | package com.fangpf.mjob.codegenerate.mapper;
import java.util.List;
import org.apache.ibatis.annotations.Param;
import com.fangpf.mjob.codegenerate.entity.EnterprisePosition;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* @Description: 职位相关
* @Author: jeecg-boot
* @Date: 2020-02-24
* @Version: V1.0
*/
public interface EnterprisePositionMapper extends BaseMapper<EnterprisePosition> {
}
| 0.65625 | 1 |
unitils-test/src/test/java/org/unitils/database/SqlUnitilsGetStringListIntegrationTest.java | Silvermedia/unitils | 0 | 281 | /*
* Copyright 2013, Unitils.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.unitils.database;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.unitils.core.UnitilsException;
import java.util.List;
import static java.util.Arrays.asList;
import static org.junit.Assert.*;
import static org.unitils.database.SqlUnitils.executeUpdate;
import static org.unitils.database.SqlUnitils.executeUpdateQuietly;
import static org.unitils.reflectionassert.ReflectionAssert.assertLenientEquals;
/**
* @author <NAME>
*/
public class SqlUnitilsGetStringListIntegrationTest {
@Before
public void initialize() {
cleanup();
executeUpdate("create table my_table (value varchar)");
executeUpdate("create table my_table (value varchar)", "database2");
executeUpdate("insert into my_table (value) values ('111')");
executeUpdate("insert into my_table (value) values ('222')");
executeUpdate("insert into my_table (value) values ('333')", "database2");
}
@After
public void cleanup() {
executeUpdateQuietly("drop table my_table", "database1");
executeUpdateQuietly("drop table my_table", "database2");
}
@Test
public void defaultDatabase() throws Exception {
List<String> result = SqlUnitils.getStringList("select value from my_table");
assertLenientEquals(asList("111", "222"), result);
}
@Test
public void namedDatabase() throws Exception {
List<String> result = SqlUnitils.getStringList("select value from my_table", "database2");
assertLenientEquals(asList("333"), result);
}
@Test
public void nullValue() throws Exception {
executeUpdate("update my_table set value = null");
List<String> result = SqlUnitils.getStringList("select value from my_table");
assertLenientEquals(asList(null, null), result);
}
@Test
public void emptyListWhenNoResultsFound() throws Exception {
List<String> result = SqlUnitils.getStringList("select value from my_table where value = '999'");
assertTrue(result.isEmpty());
}
@Test
public void exceptionWhenDatabaseNameNotFound() throws Exception {
try {
SqlUnitils.getStringList("select value from my_table", "xxx");
fail("UnitilsException expected");
} catch (UnitilsException e) {
assertEquals("No configuration found for database with name 'xxx'", e.getMessage());
}
}
@Test
public void defaultDatabaseWhenNullDatabaseName() throws Exception {
List<String> result = SqlUnitils.getStringList("select value from my_table", null);
assertLenientEquals(asList("111", "222"), result);
}
@Test
public void exceptionWhenInvalidStatement() throws Exception {
try {
SqlUnitils.getStringList("xxx");
fail("UnitilsException expected");
} catch (UnitilsException e) {
assertEquals("Unable to execute statement: 'xxx'.\n" +
"Reason: BadSqlGrammarException: StatementCallback; bad SQL grammar [xxx]; nested exception is java.sql.SQLSyntaxErrorException: unexpected token: XXX", e.getMessage());
}
}
}
| 1.304688 | 1 |
src/main/java/com/cisco/app/dbconnector/model/ConnectionPoolC3p0.java | CiscoDevNet/webexcc-dbconnector | 4 | 289 | /**
* Copyright (c) 2020 Cisco Systems, Inc. See LICENSE file.
*/
package com.cisco.app.dbconnector.model;
import java.io.Serializable;
/**
* C3p0 connection pool
* @author jiwyatt
* @since 12/12/2020
*
*/
public class ConnectionPoolC3p0 implements Serializable, ConnectionPoolDb {
/**
*
*/
private static final long serialVersionUID = 4851890835088791994L;
private String initialPoolSize = "10";
private String minPoolSize = "10";
private String acquireIncrement = "1";
private String maxPoolSize = "20";
private String maxStatements = "10";
private String unreturnedConnectionTimeout = "7";
public ConnectionPoolC3p0() {
super();
}
public static void main(String[] args) {
}
@Override
public String getInitialPoolSize() {
return initialPoolSize;
}
@Override
public void setInitialPoolSize(String initialPoolSize) {
this.initialPoolSize = initialPoolSize;
}
@Override
public String getMinPoolSize() {
return minPoolSize;
}
@Override
public void setMinPoolSize(String minPoolSize) {
this.minPoolSize = minPoolSize;
}
@Override
public String getAcquireIncrement() {
return acquireIncrement;
}
@Override
public void setAcquireIncrement(String acquireIncrement) {
this.acquireIncrement = acquireIncrement;
}
@Override
public String getMaxPoolSize() {
return maxPoolSize;
}
@Override
public void setMaxPoolSize(String maxPoolSize) {
this.maxPoolSize = maxPoolSize;
}
@Override
public String getMaxStatements() {
return maxStatements;
}
@Override
public void setMaxStatements(String maxStatements) {
this.maxStatements = maxStatements;
}
@Override
public String getUnreturnedConnectionTimeout() {
return unreturnedConnectionTimeout;
}
@Override
public void setUnreturnedConnectionTimeout(String unreturnedConnectionTimeout) {
this.unreturnedConnectionTimeout = unreturnedConnectionTimeout;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
@Override
public String toString() {
return "ConnectionPoolC3p0 [initialPoolSize=" + initialPoolSize + ", minPoolSize=" + minPoolSize
+ ", acquireIncrement=" + acquireIncrement + ", maxPoolSize=" + maxPoolSize + ", maxStatements="
+ maxStatements + ", unreturnedConnectionTimeout=" + unreturnedConnectionTimeout + "]";
}
}
| 1.179688 | 1 |
components/identity/org.wso2.carbon.identity.tools.saml.validator/src/main/java/org/wso2/carbon/identity/tools/saml/validator/processors/SAMLAuthnRequestValidator.java | thariyarox/ORG-carbon-identity | 0 | 297 | /*
* Copyright 2005-2014 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.tools.saml.validator.processors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opensaml.common.SAMLVersion;
import org.opensaml.saml2.core.AuthnRequest;
import org.opensaml.saml2.core.Issuer;
import org.opensaml.saml2.core.RequestAbstractType;
import org.opensaml.saml2.core.Subject;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.model.SAMLSSOServiceProviderDO;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.sso.saml.util.SAMLSSOUtil;
import org.wso2.carbon.identity.tools.saml.validator.dto.ValidatedItemDTO;
import org.wso2.carbon.identity.tools.saml.validator.util.SAMLValidatorConstants;
import org.wso2.carbon.identity.tools.saml.validator.util.SAMLValidatorUtil;
import java.util.List;
public class SAMLAuthnRequestValidator {
private static Log log = LogFactory.getLog(SAMLAuthnRequestValidator.class);
private AuthnRequest authnRequest;
private boolean isPost = false;
private String queryString = null;
private String issuerStr = null;
public SAMLAuthnRequestValidator(AuthnRequest authnRequest) {
this.setAuthnRequest(authnRequest);
}
/**
* Validate SP initiated SAML AuthnRequest
*
* @param validatedItems
* @throws IdentityException
*/
public void validate(List<ValidatedItemDTO> validatedItems) throws IdentityException {
// Validate version - version must be SAML 2.0
if (authnRequest.getVersion().equals(SAMLVersion.VERSION_20)) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_VERSION,
true,
SAMLValidatorConstants.ValidationMessage.VAL_VERSION_SUCCESS));
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_VERSION,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_VERSION_FAIL,
authnRequest.getVersion())));
throw new IdentityException(SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
Issuer issuer = authnRequest.getIssuer();
Subject subject = authnRequest.getSubject();
// Validate Issuer/ProviderName - at least one should not be null
if (issuer.getValue() == null && issuer.getSPProvidedID() == null) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_ISSUER,
false,
SAMLValidatorConstants.ValidationMessage.VAL_ISSUER_FAIL));
throw new IdentityException(SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
} else {
issuerStr = issuer.getValue() != null ? issuer.getValue() : issuer.getSPProvidedID();
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_ISSUER,
true,
SAMLValidatorConstants.ValidationMessage.VAL_ISSUER_SUCCESS));
}
if (issuer.getFormat() != null) {
if (issuer.getFormat().equals(SAMLValidatorConstants.Attribute.ISSUER_FORMAT)) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_ISSUER_FORMAT,
true,
SAMLValidatorConstants.ValidationMessage.VAL_ISSUER_FMT_SUCCESS));
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_ISSUER_FORMAT,
false,
SAMLValidatorConstants.ValidationMessage.VAL_ISSUER_FMT_FAIL));
throw new IdentityException(
SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
}
// Load SSO IdP configuration for issuer
SAMLSSOServiceProviderDO ssoIdPConfigs = null;
try {
ssoIdPConfigs = SAMLValidatorUtil.getServiceProviderConfig(issuer.getValue());
} catch (IdentityException e) {
log.error(e.getMessage());
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_IDP_CONFIGS,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_IDP_CONFIGS_FAIL,
authnRequest.getIssuer()
.getValue())));
throw new IdentityException(SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
if (ssoIdPConfigs == null) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_IDP_CONFIGS,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_IDP_CONFIGS_FAIL,
authnRequest.getIssuer()
.getValue())));
throw new IdentityException(SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_IDP_CONFIGS,
true,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_IDP_CONFIGS_SUCCESS,
authnRequest.getIssuer()
.getValue())));
}
// Validating Assertion Consumer URL
String consumerServiceURL = authnRequest.getAssertionConsumerServiceURL();
if (consumerServiceURL != null &&
ssoIdPConfigs.getAssertionConsumerUrl().equals(consumerServiceURL)) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_CONSUM_URL,
true,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_CONSUM_URL_SUCCESS,
consumerServiceURL)));
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_CONSUM_URL,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_CONSUM_URL_FAIL,
consumerServiceURL,
ssoIdPConfigs.getAssertionConsumerUrl())));
throw new IdentityException(SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
// Validating SubjectID format
if (subject != null && subject.getNameID() != null &&
subject.getNameID().getFormat() != null && ssoIdPConfigs.getNameIDFormat() != null &&
subject.getNameID().getFormat().equals(ssoIdPConfigs.getNameIDFormat())) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_SUB_NAMEID_FMT,
true,
SAMLValidatorConstants.ValidationMessage.VAL_SUB_NAMEID_SUCCESS));
}
// Subject Confirmation methods should NOT be available
if (subject != null && subject.getSubjectConfirmations() != null) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_SUB_CONF_MTHD,
false,
SAMLValidatorConstants.ValidationMessage.VAL_SUB_CONF_MTHD_FAIL));
}
if (ssoIdPConfigs.isDoValidateSignatureInRequests()) {
// Validate Destination
String idPUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_IDP_URL);
if (authnRequest.getDestination() != null &&
idPUrl.equals(authnRequest.getDestination())) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_DESTINATION,
true,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_DESTINATION_SUCCESS,
authnRequest.getDestination())));
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_DESTINATION,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_DESTINATION_FAIL,
authnRequest.getDestination(),
idPUrl)));
throw new IdentityException(
SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
// Validate Signature
String alias = ssoIdPConfigs.getCertAlias();
String domainName = CarbonContext.getThreadLocalCarbonContext().getTenantDomain();
try {
boolean isValid = false;
if (isPost) {
isValid =
SAMLSSOUtil.validateXMLSignature((RequestAbstractType) authnRequest,
alias, domainName);
} else {
isValid =
SAMLSSOUtil.validateDeflateSignature(queryString, issuerStr, alias,
domainName);
}
if (isValid) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_SIGNATURE,
true,
SAMLValidatorConstants.ValidationMessage.VAL_SIGNATURE_SUCCESS));
} else {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_SIGNATURE,
false,
SAMLValidatorConstants.ValidationMessage.VAL_SIGNATURE_FAIL));
}
} catch (IdentityException e) {
validatedItems.add(new ValidatedItemDTO(
SAMLValidatorConstants.ValidationType.VAL_SIGNATURE,
false,
String.format(SAMLValidatorConstants.ValidationMessage.VAL_SIGNATURE_ERROR,
e.getMessage())));
throw new IdentityException(
SAMLValidatorConstants.ValidationMessage.EXIT_WITH_ERROR);
}
}
}
public AuthnRequest getAuthnRequest() {
return authnRequest;
}
public void setAuthnRequest(AuthnRequest authnRequest) {
this.authnRequest = authnRequest;
}
public void setPost(boolean isPost) {
this.isPost = isPost;
}
public void setQueryString(String queryString) {
this.queryString = queryString;
}
}
| 1.0625 | 1 |
mozu-java-core/src/main/java/com/mozu/api/contracts/sitesettings/order/OrderProcessingSettings.java | Mozu/mozu-java | 4 | 305 | /**
* This code was auto-generated by a Codezu.
*
* Changes to this file may cause incorrect behavior and will be lost if
* the code is regenerated.
*/
package com.mozu.api.contracts.sitesettings.order;
import java.util.List;
import java.util.HashMap;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.joda.time.DateTime;
import java.io.IOException;
import java.lang.ClassNotFoundException;
import com.mozu.api.contracts.core.AuditInfo;
import com.mozu.api.contracts.sitesettings.order.ThirdPartyPaymentSetting;
/**
* Settings that determine how orders are processed and order payments are authorized for the site.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class OrderProcessingSettings implements Serializable
{
// Default Serial Version UID
private static final long serialVersionUID = 1L;
/**
* Abandoned order threshold in minutes indicates the number of minutes until a pending order is considered abandoned.
*/
protected Integer abandonedOrderThresholdInMinutes;
public Integer getAbandonedOrderThresholdInMinutes() {
return this.abandonedOrderThresholdInMinutes;
}
public void setAbandonedOrderThresholdInMinutes(Integer abandonedOrderThresholdInMinutes) {
this.abandonedOrderThresholdInMinutes = abandonedOrderThresholdInMinutes;
}
/**
* The gift card payment type being used to perform this purchase.
*/
protected String giftCardProcessingType;
public String getGiftCardProcessingType() {
return this.giftCardProcessingType;
}
public void setGiftCardProcessingType(String giftCardProcessingType) {
this.giftCardProcessingType = giftCardProcessingType;
}
protected String paymentProcessingFlowType;
public String getPaymentProcessingFlowType() {
return this.paymentProcessingFlowType;
}
public void setPaymentProcessingFlowType(String paymentProcessingFlowType) {
this.paymentProcessingFlowType = paymentProcessingFlowType;
}
protected Boolean useOverridePriceToCalculateDiscounts;
public Boolean getUseOverridePriceToCalculateDiscounts() {
return this.useOverridePriceToCalculateDiscounts;
}
public void setUseOverridePriceToCalculateDiscounts(Boolean useOverridePriceToCalculateDiscounts) {
this.useOverridePriceToCalculateDiscounts = useOverridePriceToCalculateDiscounts;
}
protected AuditInfo auditInfo;
public AuditInfo getAuditInfo() {
return this.auditInfo;
}
public void setAuditInfo(AuditInfo auditInfo) {
this.auditInfo = auditInfo;
}
/**
* The settings of the third party payment gateways as configured by the user.
*/
protected List<ThirdPartyPaymentSetting> thirdPartyPaymentSettings;
public List<ThirdPartyPaymentSetting> getThirdPartyPaymentSettings() {
return this.thirdPartyPaymentSettings;
}
public void setThirdPartyPaymentSettings(List<ThirdPartyPaymentSetting> thirdPartyPaymentSettings) {
this.thirdPartyPaymentSettings = thirdPartyPaymentSettings;
}
}
| 1.242188 | 1 |
TeamCode/src/main/java/org/firstinspires/ftc/teamcode/Link.java | FTC-9974-THOR/vv-competition-code | 0 | 313 | package org.firstinspires.ftc.teamcode;
/**
* Created by FTC on 12/3/2016.
*/
public class Link {
private NeuralNode input;
private NeuralNode output;
public Link(NeuralNode in, NeuralNode out) {
input = in;
output = out;
}
}
| 0.458984 | 0 |
src/main/java/com/techgrains/service/EmployeeService.java | techgrains/TGRESTful | 0 | 321 | package com.techgrains.service;
import com.techgrains.dao.EmployeeDAO;
import com.techgrains.exception.TGConflictException;
import com.techgrains.exception.TGForbiddenException;
import com.techgrains.exception.TGNotFoundException;
import com.techgrains.model.Department;
import com.techgrains.model.Designation;
import com.techgrains.model.Employee;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.SerializationUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Service
public class EmployeeService {
@Autowired
EmployeeDAO employeeDAO;
public Employee findById(long id) throws TGNotFoundException {
Employee employee = employeeDAO.getEmployeeById(id);
if(employee == null)
throw new TGNotFoundException("Employee not found for id - " + id);
return employee;
}
public List<Employee> getAllEmployees() {
return employeeDAO.getAllEmployees();
}
public List<Department> getAllDepartments() {
return employeeDAO.getAllDepartments();
}
public List<Employee> listByNameDesignationAndDepartmentcode(String name, Designation designation, Integer departmentCode) {
List<Employee> employees = new ArrayList<>();
for(Employee employee : employeeDAO.getAllEmployees())
if(employee.hasNameDesignationDepartment(name, designation, departmentCode))
employees.add(employee);
return employees;
}
public Employee createEmployee(String name, Designation designation) {
Employee employee = employeeDAO.getEmployeeByName(name);
if(employee == null) {
return new Employee(999, name, designation);
} else {
throw new TGConflictException("Employee already exists for name - " + name);
}
}
public Employee addDepartments(long id, int departmentCode) {
Employee employee = findById(id);
if (!employeeDAO.isDepartmentExists(departmentCode))
throw new TGNotFoundException("Department not found for code - " + departmentCode);
if(employee.hasDepartment(departmentCode))
throw new TGForbiddenException("Employee already has department code - " + departmentCode);
employee = employee.copy(); // To avoid changes in static database instance
Department department = employeeDAO.getDepartment(departmentCode);
if(department!=null)
employee.addDepartment(department);
return employee;
}
public Employee update(long id, String name, Designation designation) {
Employee employee = findById(id).copy(); // To avoid changes in static database instance
if(name!=null && name.length()>0) {
Employee existing = employeeDAO.getEmployeeByName(name);
if(existing == null)
employee.setName(name);
else
throw new TGConflictException("Employee already exists for name - " + name);
}
if(designation!=null) {
if(employee.getDesignation().equals(Designation.Director))
throw new TGForbiddenException("System can't change designation of the Director.");
employee.setDesignation(designation);
}
return employee;
}
public void deleteEmployee(long id) {
Employee employee = findById(id);
// Do nothing - don't do any change in static database instance
}
public Employee deleteDepartment(long id, int departmentCode) {
Employee employee = findById(id);
if(employee.hasDepartment(departmentCode)) {
employee = employee.copy();
employee.removeDepartment(departmentCode);
} else {
throw new TGNotFoundException("Employee doesn't have department having code - " + departmentCode);
}
return employee;
}
}
| 1.429688 | 1 |
lucene/core/src/java/org/apache/lucene/util/packed/DirectMonotonicWriter.java | HuBlanker/lucene-solr-8.7.0 | 2 | 329 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util.packed;
import java.io.IOException;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
/**
* Write monotonically-increasing sequences of integers. This writer splits
* data into blocks and then for each block, computes the average slope, the
* minimum value and only encode the delta from the expected value using a
* {@link DirectWriter}.
* <p>
* average slope 还真是平均斜率
* <p>
* 写入单调递增的整数序列
* 这个类把数据分成块, 然后对每个块计算平均斜率,最小值,然后只使用 DirectWriter来 编码 期望值的delta.
* <p>
* <br/>
* <br/>
*
*
* <B>单调递增的数组写入,对单调递增的数据进行了增量编码,比如[100,101,102,103], 可以编码成[100,1,1,1] 每一个数是前一个数的增量。
* 这样有个好处,就是数字全部变小了,</B>
* <p>
* <br/>
* <br/>
*
* <B> 配合 DirectWriter 的数字越小压缩率越高,可以有效的压缩存储单调递增数组,
* 比如很合适的就是文件偏移量,因为文件一直写,一直增多,符合单调递增。</B>
*
* @lucene.internal
* @see DirectMonotonicReader
*/
public final class DirectMonotonicWriter {
// 一块有多少个int, 这里是 2的shift次方个
public static final int MIN_BLOCK_SHIFT = 2;
public static final int MAX_BLOCK_SHIFT = 22;
// 这个类, 其实不知道是为了谁写
// 但是仍然不妨碍一个记录元数据,一个记录真正的数据,
// 写field信息可以用,其他的docValue之类的也可以
final IndexOutput meta;
final IndexOutput data;
// 总数, 不区分chunk,block等等,对于这个类来说,就是你想要我写多少个。
final long numValues;
// data文件初始化的时候的文件写入地址.
final long baseDataPointer;
// 内部缓冲区
final long[] buffer;
// 当前已经buffer了多少个
int bufferSize;
// 总数计数,bufferSize会被清除的
long count;
boolean finished;
DirectMonotonicWriter(IndexOutput metaOut, IndexOutput dataOut, long numValues, int blockShift) {
if (blockShift < MIN_BLOCK_SHIFT || blockShift > MAX_BLOCK_SHIFT) {
throw new IllegalArgumentException("blockShift must be in [" + MIN_BLOCK_SHIFT + "-" + MAX_BLOCK_SHIFT + "], got " + blockShift);
}
if (numValues < 0) {
throw new IllegalArgumentException("numValues can't be negative, got " + numValues);
}
// 根据总数,以及每块的数据,来算总共需要的块的数量。 算法约等于,总数 / (2 ^ blockShift);
// 这里只是校验一下这两个数字的合法性,实际限制在
final long numBlocks = numValues == 0 ? 0 : ((numValues - 1) >>> blockShift) + 1;
if (numBlocks > ArrayUtil.MAX_ARRAY_LENGTH) {
throw new IllegalArgumentException("blockShift is too low for the provided number of values: blockShift=" + blockShift +
", numValues=" + numValues + ", MAX_ARRAY_LENGTH=" + ArrayUtil.MAX_ARRAY_LENGTH);
}
this.meta = metaOut;
this.data = dataOut;
this.numValues = numValues;
// blockSize算到了, 然后缓冲区的大小就是blockSize或者极限情况下很少,就是numValues.
final int blockSize = 1 << blockShift;
this.buffer = new long[(int) Math.min(numValues, blockSize)];
this.bufferSize = 0;
this.baseDataPointer = dataOut.getFilePointer();
}
/**
* // 一个块满了,或者最终调用finish了,就写一次
* <br/>
* <br/>
* <b>计算方法终于搞明白了,存储一个单调递增数组,要存储斜率,最小值,以及delta,再加上index就可以算出来</b>
* 举例 [100,101,108] 经过计算之后存储的[3,0,3], 斜率4.0. 最小值97.
* 开始计算:
* 1. 100 = 97 + 3 + 0 * 4.0
* 2. 101 = 97 + 0 + 1 * 4.0
* 3. 108 = 97 + 3 + 2 * 4.0
* 完美
* <br/>
* <br/>
* 一个block,这么搞一下
*
* @throws IOException
*/
private void flush() throws IOException {
assert bufferSize != 0;
// 斜率算法, 最大减去最小除以个数,常见算法
final float avgInc = (float) ((double) (buffer[bufferSize - 1] - buffer[0]) / Math.max(1, bufferSize - 1));
// 根据斜率,算出当前位置上的数字,比按照斜率算出来的数字,多了多少或者小了多少,这就是增量编码
// 当前存了个3,预期是500,那就存储-497.
// 有啥意义么? 能把大数字变成小数字?节省点空间?
// 这里会把单调递增的数字,算一条执行出来,首尾连接点. 然后每个数字对着线上对应点的偏移距离,画个图会好说很多,一个一元一次方程么?
for (int i = 0; i < bufferSize; ++i) {
final long expected = (long) (avgInc * (long) i);
buffer[i] -= expected;
}
// 但是存的不是真实值,而是偏移量
long min = buffer[0];
for (int i = 1; i < bufferSize; ++i) {
min = Math.min(buffer[i], min);
}
// 每个位置上存储的,不是偏移量了,而是偏移量与最小的值的偏移量
// 然后算个最大偏移量
long maxDelta = 0;
for (int i = 0; i < bufferSize; ++i) {
buffer[i] -= min;
// use | will change nothing when it comes to computing required bits
// but has the benefit of working fine with negative values too
// (in case of overflow)
maxDelta |= buffer[i];
}
// 元数据里面开始写, 最小值,平均斜率,data文件从开始到现在写了多少,
meta.writeLong(min);
meta.writeInt(Float.floatToIntBits(avgInc));
// 当前block, 相对于整个类开始写的时候, 的偏移量
meta.writeLong(data.getFilePointer() - baseDataPointer);
// 是不是意味着全是0, 也就是绝对的单调递增,等差数列的意思?
// 如果是等差数列,就不在data里面写了,直接在meta里面记一下最小值就完事了,之后等差就好了
if (maxDelta == 0) {
// 最大偏移量为,那就写个0
meta.writeByte((byte) 0);
} else {
// 最大需要多少位
final int bitsRequired = DirectWriter.unsignedBitsRequired(maxDelta);
// 把缓冲的数据实际的写到data文件去
DirectWriter writer = DirectWriter.getInstance(data, bufferSize, bitsRequired);
for (int i = 0; i < bufferSize; ++i) {
writer.add(buffer[i]);
}
writer.finish();
// 写一下算出来的最大需要多少位
meta.writeByte((byte) bitsRequired);
}
// 缓冲的数据归零,这样就能一直用内存里的buffer了
bufferSize = 0;
}
long previous = Long.MIN_VALUE;
/**
* Write a new value. Note that data might not make it to storage until
* {@link #finish()} is called.
*
* @throws IllegalArgumentException if values don't come in order
* 写一个新的值,
* 但是不一定立即存储,可能在finish的时候才存储
* 如果传入的值不是递增的,就报错
*/
public void add(long v) throws IOException {
// 检查是否是单调递增
if (v < previous) {
throw new IllegalArgumentException("Values do not come in order: " + previous + ", " + v);
}
// 内部缓冲区满,意味着,分块的一块满了, 缓冲区是之前根据分块大小算好的
if (bufferSize == buffer.length) {
flush();
}
// 缓冲区没满,先放到内存buffer里面
buffer[bufferSize++] = v;
previous = v;
count++;
}
/**
* This must be called exactly once after all values have been {@link #add(long) added}.
* 所有数字都被调用过all之后,
* 要调用且只能调用一次finish.
*/
public void finish() throws IOException {
if (count != numValues) {
throw new IllegalStateException("Wrong number of values added, expected: " + numValues + ", got: " + count);
}
// 保证只能调用一次
if (finished) {
throw new IllegalStateException("#finish has been called already");
}
// 调用finish的时候,有缓冲就直接写,反正也只能调用一次
if (bufferSize > 0) {
flush();
}
finished = true;
}
/**
* Returns an instance suitable for encoding {@code numValues} into monotonic
* blocks of 2<sup>{@code blockShift}</sup> values. Metadata will be written
* to {@code metaOut} and actual data to {@code dataOut}.
* <p>
* 返回一个适合编码 numValues 个数字 到 (2 ^ blockShift) 大小的递增块的实例
* 元数据写到meta, 实际的数据写到dataOut.
*/
public static DirectMonotonicWriter getInstance(IndexOutput metaOut, IndexOutput dataOut, long numValues, int blockShift) {
return new DirectMonotonicWriter(metaOut, dataOut, numValues, blockShift);
}
}
| 1.929688 | 2 |
core-java/retrofit/introduction-to-retrofit/SimpleLibraryApplication/src/main/java/com/reflectoring/library/config/SwaggerConfig.java | arpendu11/code-examples | 0 | 337 | package com.reflectoring.library.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.AuthorizationScope;
import springfox.documentation.service.BasicAuth;
import springfox.documentation.service.SecurityReference;
import springfox.documentation.service.SecurityScheme;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spi.service.contexts.SecurityContext;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
import java.util.ArrayList;
@Configuration
@EnableSwagger2
public class SwaggerConfig {
@Bean
public Docket api() {
SecurityReference securityReference = SecurityReference.builder()
.reference("basicAuth")
.scopes(new AuthorizationScope[0])
.build();
ArrayList<SecurityReference> reference = new ArrayList<>(1);
reference.add(securityReference);
ArrayList<SecurityContext> securityContexts = new ArrayList<>(1);
securityContexts.add(SecurityContext.builder().securityReferences(reference).build());
ArrayList<SecurityScheme> auth = new ArrayList<>(1);
auth.add(new BasicAuth("basicAuth"));
return new Docket(DocumentationType.SWAGGER_2)
.securitySchemes(auth)
.securityContexts(securityContexts)
.select()
.paths(PathSelectors.ant("/library/managed/books/**"))
.apis(RequestHandlerSelectors.basePackage("com.reflectoring.library"))
.build();
}
}
| 1.164063 | 1 |
velvetdb-core/src/test/java/com/zakgof/db/velvet/test/PutGetTest.java | zakgof/velvetdb | 13 | 345 | package com.zakgof.db.velvet.test;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import com.zakgof.db.velvet.entity.Entities;
import com.zakgof.db.velvet.entity.IEntityDef;
public class PutGetTest extends AVelvetTxnTest {
private static final int COUNT = 6;
private static final int HALFCOUNT = COUNT / 2;
private IEntityDef<String, TestEnt> ENTITY = Entities.create(TestEnt.class);
@Test
public void testSimplePutGet() {
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.001f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < COUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNotNull(ent);
Assert.assertEquals("key" + d, ent.getKey());
Assert.assertEquals(d * 0.001f, ent.getVal(), 1e-5);
}
Assert.assertNull(ENTITY.get(velvet, "key1001"));
List<TestEnt> allValues = ENTITY.batchGetAllList(velvet);
Assert.assertEquals(COUNT, allValues.size());
// cleanup
for (int d = 0; d < COUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
}
@Test
public void testUpdate() {
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.001f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.002f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < COUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNotNull(ent);
Assert.assertEquals("key" + d, ent.getKey());
Assert.assertEquals(d * 0.002f, ent.getVal(), 1e-5);
}
List<TestEnt> allValues = ENTITY.batchGetAllList(velvet);
Assert.assertEquals(COUNT, allValues.size());
// cleanup
for (int d = 0; d < COUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
}
@Test
public void testDeleteKey() {
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.001f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < HALFCOUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
List<TestEnt> allValues = ENTITY.batchGetAllList(velvet);
Assert.assertEquals(HALFCOUNT, allValues.size());
for (int d = 0; d < HALFCOUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNull(ent);
}
for (int d = HALFCOUNT; d < COUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNotNull(ent);
Assert.assertEquals("key" + d, ent.getKey());
Assert.assertEquals(d * 0.001f, ent.getVal(), 1e-5);
}
// cleanup
for (int d = HALFCOUNT; d < COUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
}
@Test
public void testDeleteValue() {
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.001f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < HALFCOUNT; d++) {
ENTITY.deleteValue(velvet, new TestEnt("key" + d, -1.0f));
}
for (int d = 0; d < HALFCOUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNull(ent);
}
for (int d = HALFCOUNT; d < COUNT; d++) {
TestEnt ent = ENTITY.get(velvet, "key" + d);
Assert.assertNotNull(ent);
Assert.assertEquals("key" + d, ent.getKey());
Assert.assertEquals(d * 0.001f, ent.getVal(), 1e-5);
}
// cleanup
for (int d = HALFCOUNT; d < COUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
}
@Test
public void testEntityAttrs() {
Assert.assertEquals("testent", ENTITY.getKind());
Assert.assertEquals(String.class, ENTITY.getKeyClass());
Assert.assertEquals(TestEnt.class, ENTITY.getValueClass());
Assert.assertEquals("k", ENTITY.keyOf(new TestEnt("k", 1.3f)));
}
@Test
public void testContains() {
for (int d = 0; d < COUNT; d++) {
TestEnt e = new TestEnt("key" + d, d * 0.001f);
ENTITY.put(velvet, e);
}
for (int d = 0; d < COUNT; d++) {
boolean testYes = ENTITY.containsKey(velvet, "key" + d);
Assert.assertTrue(testYes);
boolean testNo = ENTITY.containsKey(velvet, "nokey" + d);
Assert.assertFalse(testNo);
}
Assert.assertEquals(COUNT, ENTITY.size(velvet));
// cleanup
for (int d = 0; d < COUNT; d++) {
ENTITY.deleteKey(velvet, "key" + d);
}
}
}
| 1.296875 | 1 |
biemo-biz/modular-log/biz-support-log/src/main/java/com/biemo/cloud/biz/log/config/LogAutoConfiguration.java | biemo8/bbs-cloud | 74 | 353 | package com.biemo.cloud.biz.log.config;
import com.biemo.cloud.biz.log.core.db.CommonLogInitializer;
import com.biemo.cloud.biz.log.core.db.TraceLogInitializer;
import com.biemo.cloud.biz.log.core.listener.LogMessageListener;
import com.biemo.cloud.biz.log.modular.controller.LogController;
import com.biemo.cloud.biz.log.modular.provider.LogServiceProvider;
import com.biemo.cloud.biz.log.modular.service.CommonLogService;
import com.biemo.cloud.biz.log.modular.service.TraceLogService;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 日志服务的自动配置
*
*
* @date 2018-07-30-下午3:44
*/
@Configuration
public class LogAutoConfiguration {
/**
* 数据库初始化
*/
@Bean
public CommonLogInitializer commonLogInitializer() {
return new CommonLogInitializer();
}
@Bean
public TraceLogInitializer traceLogInitializer() {
return new TraceLogInitializer();
}
/**
* 日志的消息监听器
*/
@Bean
public LogMessageListener logMessageListener() {
return new LogMessageListener();
}
/**
* 控制器
*/
@Bean
public LogController logController() {
return new LogController();
}
/**
* service服务
*/
@Bean
public CommonLogService commonLogService() {
return new CommonLogService();
}
@Bean
public TraceLogService traceLogService() {
return new TraceLogService();
}
/**
* 服务提供者
*/
@Bean
public LogServiceProvider logServiceProvider() {
return new LogServiceProvider();
}
}
| 0.988281 | 1 |
noark-orm/src/main/java/xyz/noark/orm/accessor/sql/mysql/adaptor/BooleanAdaptor.java | 397786756/noark3 | 17 | 361 | /*
* Copyright © 2018 www.noark.xyz All Rights Reserved.
*
* 感谢您选择Noark框架,希望我们的努力能为您提供一个简单、易用、稳定的服务器端框架 !
* 除非符合Noark许可协议,否则不得使用该文件,您可以下载许可协议文件:
*
* http://www.noark.xyz/LICENSE
*
* 1.未经许可,任何公司及个人不得以任何方式或理由对本框架进行修改、使用和传播;
* 2.禁止在本项目或任何子项目的基础上发展任何派生版本、修改版本或第三方版本;
* 3.无论你对源代码做出任何修改和改进,版权都归Noark研发团队所有,我们保留所有权利;
* 4.凡侵犯Noark版权等知识产权的,必依法追究其法律责任,特此郑重法律声明!
*/
package xyz.noark.orm.accessor.sql.mysql.adaptor;
import xyz.noark.orm.FieldMapping;
import xyz.noark.orm.accessor.sql.PreparedStatementProxy;
import java.sql.ResultSet;
/**
* Boolean类型属性
*
* @author 小流氓[<EMAIL>]
* @since 3.0
*/
class BooleanAdaptor extends AbstractValueAdaptor<Boolean> {
@Override
protected void toPreparedStatement(FieldMapping fm, PreparedStatementProxy pstmt, Boolean value, int parameterIndex) throws Exception {
pstmt.setBoolean(parameterIndex, value);
}
@Override
protected Object toParameter(FieldMapping fm, ResultSet rs) throws Exception {
return rs.getBoolean(fm.getColumnName());
}
} | 1.257813 | 1 |
app/src/test/java/com/anysoftkeyboard/AnySoftKeyboardViewRelatedTest.java | up781212/AnySoftKeyboard-Python | 0 | 369 | package com.anysoftkeyboard;
import android.app.AlertDialog;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.FrameLayout;
import com.anysoftkeyboard.api.KeyCodes;
import com.anysoftkeyboard.ime.InputViewBinder;
import com.anysoftkeyboard.keyboards.views.AnyKeyboardView;
import com.anysoftkeyboard.keyboards.views.KeyboardViewContainerView;
import com.menny.android.anysoftkeyboard.R;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.robolectric.Shadows;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowAlertDialog;
@RunWith(AnySoftKeyboardRobolectricTestRunner.class)
public class AnySoftKeyboardViewRelatedTest extends AnySoftKeyboardBaseTest {
@Test
public void testOnCreateInputView() throws Exception {
View mainKeyboardView = mAnySoftKeyboardUnderTest.getInputViewContainer();
Assert.assertNotNull(mainKeyboardView);
Assert.assertTrue(mainKeyboardView instanceof KeyboardViewContainerView);
KeyboardViewContainerView containerView = (KeyboardViewContainerView) mainKeyboardView;
Assert.assertEquals(1, containerView.getChildCount());
final View inputView = containerView.getChildAt(0);
Assert.assertNotNull(inputView);
Assert.assertTrue(inputView instanceof AnyKeyboardView);
Assert.assertSame(inputView, containerView.getStandardKeyboardView());
Mockito.verify(containerView.getStandardKeyboardView(), Mockito.atLeastOnce()).setWatermark(Mockito.anyList());
}
@Test
public void testSettingsBasic() throws Exception {
Assert.assertNull(ShadowAlertDialog.getLatestAlertDialog());
mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.SETTINGS);
final AlertDialog latestAlertDialog = ShadowAlertDialog.getLatestAlertDialog();
Assert.assertNotNull(latestAlertDialog);
final ShadowAlertDialog shadowAlertDialog = Shadows.shadowOf(latestAlertDialog);
Assert.assertEquals("AnySoftKeyboard", shadowAlertDialog.getTitle());
Assert.assertEquals(4, shadowAlertDialog.getItems().length);
}
@Test
public void testSettingsIncognito() throws Exception {
//initial watermark
ViewTestUtils.assertCurrentWatermarkDoesNotHaveDrawable(mAnySoftKeyboardUnderTest.getInputView(), R.drawable.ic_watermark_incognito);
Mockito.reset(mAnySoftKeyboardUnderTest.getInputView());
Assert.assertNull(ShadowAlertDialog.getLatestAlertDialog());
mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.SETTINGS);
final AlertDialog latestAlertDialog = ShadowAlertDialog.getLatestAlertDialog();
final ShadowAlertDialog shadowAlertDialog = Shadows.shadowOf(latestAlertDialog);
Assert.assertEquals("\uD83D\uDD75️ Incognito Mode", shadowAlertDialog.getItems()[3]);
Assert.assertFalse(mAnySoftKeyboardUnderTest.getSpiedSuggest().isIncognitoMode());
Assert.assertFalse(mAnySoftKeyboardUnderTest.getQuickKeyHistoryRecords().isIncognitoMode());
shadowAlertDialog.clickOnItem(3);
Assert.assertTrue(mAnySoftKeyboardUnderTest.getSpiedSuggest().isIncognitoMode());
Assert.assertTrue(mAnySoftKeyboardUnderTest.getQuickKeyHistoryRecords().isIncognitoMode());
ViewTestUtils.assertCurrentWatermarkHasDrawable(mAnySoftKeyboardUnderTest.getInputView(), R.drawable.ic_watermark_incognito);
Mockito.reset(mAnySoftKeyboardUnderTest.getInputView());
mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.SETTINGS);
Shadows.shadowOf(ShadowAlertDialog.getLatestAlertDialog()).clickOnItem(3);
Assert.assertFalse(mAnySoftKeyboardUnderTest.getSpiedSuggest().isIncognitoMode());
Assert.assertFalse(mAnySoftKeyboardUnderTest.getQuickKeyHistoryRecords().isIncognitoMode());
ViewTestUtils.assertCurrentWatermarkDoesNotHaveDrawable(mAnySoftKeyboardUnderTest.getInputView(), R.drawable.ic_watermark_incognito);
}
@Test
public void testSettingsOverrideDictionary() throws Exception {
mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.SETTINGS);
final AlertDialog settingsAlertDialog = ShadowAlertDialog.getLatestAlertDialog();
final ShadowAlertDialog shadowSettingsAlertDialog = Shadows.shadowOf(settingsAlertDialog);
Assert.assertEquals("Override default dictionary", shadowSettingsAlertDialog.getItems()[1]);
shadowSettingsAlertDialog.clickOnItem(1);
final AlertDialog dictionaryAlertDialog = ShadowAlertDialog.getLatestAlertDialog();
Assert.assertNotSame(dictionaryAlertDialog, settingsAlertDialog);
final ShadowAlertDialog shadowDictionaryAlertDialog = Shadows.shadowOf(dictionaryAlertDialog);
Assert.assertEquals("Override English dictionary", shadowDictionaryAlertDialog.getTitle());
View.OnClickListener positiveListener = Shadows.shadowOf(dictionaryAlertDialog.getButton(AlertDialog.BUTTON_POSITIVE)).getOnClickListener();
View.OnClickListener negativeListener = Shadows.shadowOf(dictionaryAlertDialog.getButton(AlertDialog.BUTTON_NEGATIVE)).getOnClickListener();
View.OnClickListener clearListener = Shadows.shadowOf(dictionaryAlertDialog.getButton(AlertDialog.BUTTON_NEUTRAL)).getOnClickListener();
Assert.assertNotNull(positiveListener);
Assert.assertNotNull(negativeListener);
Assert.assertNotNull(clearListener);
}
@Test
public void testSetInputViewClippingIssues() throws Exception {
Assert.assertFalse(mAnySoftKeyboardUnderTest.isFullscreenMode());
final Window window = mAnySoftKeyboardUnderTest.getWindow().getWindow();
Assert.assertNotNull(window);
Assert.assertEquals(ViewGroup.LayoutParams.MATCH_PARENT, window.getAttributes().height);
final View inputArea = window.findViewById(android.R.id.inputArea);
Assert.assertNotNull(inputArea);
Assert.assertNotNull(inputArea.getParent());
final View parentView = (View) inputArea.getParent();
Assert.assertEquals(ViewGroup.LayoutParams.WRAP_CONTENT, parentView.getLayoutParams().height);
Assert.assertEquals(Gravity.BOTTOM, ((FrameLayout.LayoutParams) parentView.getLayoutParams()).gravity);
}
@Test
@Config(qualifiers = "land")
public void testSetInputViewClippingIssuesInLandscape() throws Exception {
Assert.assertTrue(mAnySoftKeyboardUnderTest.isFullscreenMode());
final Window window = mAnySoftKeyboardUnderTest.getWindow().getWindow();
Assert.assertNotNull(window);
Assert.assertEquals(ViewGroup.LayoutParams.MATCH_PARENT, window.getAttributes().height);
final View inputArea = window.findViewById(android.R.id.inputArea);
Assert.assertNotNull(inputArea);
Assert.assertNotNull(inputArea.getParent());
final View parentView = (View) inputArea.getParent();
Assert.assertEquals(ViewGroup.LayoutParams.MATCH_PARENT, parentView.getLayoutParams().height);
Assert.assertEquals(Gravity.BOTTOM, ((FrameLayout.LayoutParams) parentView.getLayoutParams()).gravity);
}
@Test
public void testResetViewOnAddOnChange() throws Exception {
final InputViewBinder inputView = mAnySoftKeyboardUnderTest.getInputView();
Assert.assertNotNull(inputView);
Mockito.reset(inputView);
mAnySoftKeyboardUnderTest.onAddOnsCriticalChange();
Assert.assertNotNull(mAnySoftKeyboardUnderTest.getInputView());
Assert.assertSame(inputView, mAnySoftKeyboardUnderTest.getInputView());
}
} | 1.375 | 1 |
opba-banking-protocol-facade/src/main/java/de/adorsys/opba/protocol/facade/util/logresolver/domain/context/ServiceContextLog.java | golden-dimension/open-banking-gateway | 118 | 377 | package de.adorsys.opba.protocol.facade.util.logresolver.domain.context;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import de.adorsys.opba.protocol.api.dto.NotSensitiveData;
import de.adorsys.opba.protocol.api.dto.context.Context;
import de.adorsys.opba.protocol.api.dto.request.FacadeServiceableGetter;
import de.adorsys.opba.protocol.facade.util.logresolver.domain.request.RequestLog;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import static de.adorsys.opba.protocol.api.Constants.NULL;
@Getter
@RequiredArgsConstructor
public class ServiceContextLog<REQUEST extends FacadeServiceableGetter> implements NotSensitiveData {
private static final ObjectMapper MAPPER = new ObjectMapper()
.registerModule(new JavaTimeModule())
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
private final Context<REQUEST> ctx;
@Override
public String getNotSensitiveData() {
if (null == ctx) {
return NULL;
}
return "ServiceContextLog("
+ "contextClass=" + ctx.getClass()
+ ", serviceSessionId=" + ctx.getServiceSessionId()
+ ", authContext=" + ctx.getAuthContext()
+ ", " + ctx.loggableBankId()
+ ", " + getRequestNotSensetiveData()
+ ")";
}
@SneakyThrows
@Override
public String toString() {
String json = MAPPER.writeValueAsString(ctx);
return "ServiceContextLog{"
+ "contextClass=" + (null != ctx ? ctx.getClass() : NULL)
+ ", context=" + json
+ '}';
}
private String getRequestNotSensetiveData() {
RequestLog<REQUEST> requestLog = new RequestLog<>(ctx.getRequest());
return requestLog.getNotSensitiveData();
}
}
| 1.109375 | 1 |
plugins/hazelcast/gui/src/main/java/com/github/rmannibucau/sirona/plugin/hazelcast/gui/HazelcastEndpoints.java | olamy/sirona | 0 | 385 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.rmannibucau.sirona.plugin.hazelcast.gui;
import com.github.rmannibucau.sirona.counters.Unit;
import com.github.rmannibucau.sirona.reporting.web.plugin.api.MapBuilder;
import com.github.rmannibucau.sirona.reporting.web.plugin.api.Regex;
import com.github.rmannibucau.sirona.reporting.web.plugin.api.Template;
import com.github.rmannibucau.sirona.repositories.Repositories;
import com.github.rmannibucau.sirona.repositories.Repository;
import static com.github.rmannibucau.sirona.reporting.web.plugin.api.graph.Graphs.generateReport;
public class HazelcastEndpoints {
@Regex
public Template home() {
return new Template("hazelcast/home.vm");
}
@Regex("/partitions")
public Template partitions() {
return hazelcastTemplate("Partition number", "partitions");
}
@Regex("/members")
public Template members() {
return hazelcastTemplate("Members number", "members");
}
@Regex("/([^/]*)/([0-9]*)/([0-9]*)")
public String jsonDetail(final String role, final long start, final long end) {
return generateReport(role, Repository.INSTANCE.findGaugeRole(role), start, end);
}
private static Template hazelcastTemplate(final String title, final String name) {
return new Template("hazelcast/gauges.vm",
new MapBuilder<String, Object>()
.set ("title", title)
.set("members", Repositories.names(Repositories.findByPrefixAndUnit("hazelcast-" + name + "-", Unit.UNARY)))
.build());
}
}
| 1.3125 | 1 |
critter/src/main/java/com/udacity/jdnd/course3/critter/service/CustomerService.java | ftabbal/jwd-c3 | 0 | 393 | package com.udacity.jdnd.course3.critter.service;
import com.udacity.jdnd.course3.critter.data.entity.Customer;
import com.udacity.jdnd.course3.critter.data.entity.Pet;
import com.udacity.jdnd.course3.critter.data.repository.CustomerRepository;
import com.udacity.jdnd.course3.critter.data.repository.PetRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
@Service
@Transactional
public class CustomerService {
private CustomerRepository customerRepository;
private PetRepository petRepository;
public CustomerService(CustomerRepository repo, PetRepository petRepository) {
customerRepository = repo;
this.petRepository = petRepository;
}
public Customer save(Customer customer) {
return customerRepository.save(customer);
}
public Customer getCustomer(long id) {
Optional<Customer> optionalCustomer = customerRepository.findById(id);
if (optionalCustomer.isPresent()) {
return optionalCustomer.get();
} else {
throw new ObjectNotFoundException("Cannot find customer with id: " + id);
}
}
public List<Customer> getAllCustomers() {
return customerRepository.findAll();
}
public Customer getByPetId(long petId) {
Optional<Pet> optionalPet = petRepository.findById(petId);
if (!optionalPet.isPresent()) {
throw new ObjectNotFoundException("Cannot find pet with id: " + petId);
}
return optionalPet.get().getOwner();
}
}
| 1.382813 | 1 |
modules/core/src/test/java/org/apache/ignite/spi/communication/GridAbstractCommunicationSelfTest.java | venky1963/ignite | 1 | 401 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.communication;
import java.net.BindException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.managers.communication.GridIoMessageFactory;
import org.apache.ignite.internal.util.typedef.CO;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteRunnable;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.spi.IgniteSpiAdapter;
import org.apache.ignite.testframework.GridSpiTestContext;
import org.apache.ignite.testframework.GridTestNode;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.IgniteMock;
import org.apache.ignite.testframework.junits.IgniteTestResources;
import org.apache.ignite.testframework.junits.spi.GridSpiAbstractTest;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_MACS;
/**
* Super class for all communication self tests.
* @param <T> Type of communication SPI.
*/
@SuppressWarnings({"JUnitAbstractTestClassNamingConvention"})
public abstract class GridAbstractCommunicationSelfTest<T extends CommunicationSpi> extends GridSpiAbstractTest<T> {
/** */
private static long msgId = 1;
/** */
private static final Collection<IgniteTestResources> spiRsrcs = new ArrayList<>();
/** */
private static final Map<UUID, Set<UUID>> msgDestMap = new HashMap<>();
/** */
protected static final Map<UUID, CommunicationSpi<Message>> spis = new HashMap<>();
/** */
protected static final Collection<ClusterNode> nodes = new ArrayList<>();
/** */
private static final Object mux = new Object();
/** */
protected boolean useSsl = false;
/**
*
*/
static {
GridIoMessageFactory.registerCustom(GridTestMessage.DIRECT_TYPE, new CO<Message>() {
@Override public Message apply() {
return new GridTestMessage();
}
});
}
/** */
@SuppressWarnings({"deprecation"})
private class MessageListener implements CommunicationListener<Message> {
/** */
private final UUID locNodeId;
/**
* @param locNodeId Local node ID.
*/
MessageListener(UUID locNodeId) {
assert locNodeId != null;
this.locNodeId = locNodeId;
}
/** {@inheritDoc} */
@Override public void onMessage(UUID nodeId, Message msg, IgniteRunnable msgC) {
info("Received message [locNodeId=" + locNodeId + ", nodeId=" + nodeId +
", msg=" + msg + ']');
msgC.run();
if (msg instanceof GridTestMessage) {
GridTestMessage testMsg = (GridTestMessage)msg;
if (!testMsg.getSourceNodeId().equals(nodeId))
fail("Listener nodeId not equals to message nodeId.");
synchronized (mux) {
// Get list of all recipients for the message.
Set<UUID> recipients = msgDestMap.get(testMsg.getSourceNodeId());
if (recipients != null) {
// Remove this node from a list of recipients.
if (!recipients.remove(locNodeId))
fail("Received unknown message [locNodeId=" + locNodeId + ", msg=" + testMsg + ']');
// If all recipients received their messages,
// remove source nodes from sent messages map.
if (recipients.isEmpty())
msgDestMap.remove(testMsg.getSourceNodeId());
if (msgDestMap.isEmpty())
mux.notifyAll();
}
else
fail("Received unknown message [locNodeId=" + locNodeId + ", msg=" + testMsg + ']');
}
}
}
/** {@inheritDoc} */
@Override public void onDisconnected(UUID nodeId) {
// No-op.
}
}
/** */
protected GridAbstractCommunicationSelfTest() {
super(false);
}
/**
* @throws Exception If failed.
*/
public void testSendToOneNode() throws Exception {
info(">>> Starting send to one node test. <<<");
msgDestMap.clear();
for (Entry<UUID, CommunicationSpi<Message>> entry : spis.entrySet()) {
for (ClusterNode node : nodes) {
synchronized (mux) {
if (!msgDestMap.containsKey(entry.getKey()))
msgDestMap.put(entry.getKey(), new HashSet<UUID>());
msgDestMap.get(entry.getKey()).add(node.id());
}
entry.getValue().sendMessage(node, new GridTestMessage(entry.getKey(), msgId++, 0));
}
}
long now = System.currentTimeMillis();
long endTime = now + getMaxTransmitMessagesTime();
synchronized (mux) {
while (now < endTime && !msgDestMap.isEmpty()) {
mux.wait(endTime - now);
now = System.currentTimeMillis();
}
if (!msgDestMap.isEmpty()) {
for (Entry<UUID, Set<UUID>> entry : msgDestMap.entrySet()) {
error("Failed to receive all messages [sender=" + entry.getKey() +
", dest=" + entry.getValue() + ']');
}
}
assert msgDestMap.isEmpty() : "Some messages were not received.";
}
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("WaitWithoutCorrespondingNotify")
public void testSendToManyNodes() throws Exception {
msgDestMap.clear();
// Send message from each SPI to all SPI's, including itself.
for (Entry<UUID, CommunicationSpi<Message>> entry : spis.entrySet()) {
UUID sndId = entry.getKey();
CommunicationSpi<Message> commSpi = entry.getValue();
for (ClusterNode node : nodes) {
synchronized (mux) {
if (!msgDestMap.containsKey(sndId))
msgDestMap.put(sndId, new HashSet<UUID>());
msgDestMap.get(sndId).add(node.id());
}
commSpi.sendMessage(node, new GridTestMessage(sndId, msgId++, 0));
}
}
long now = System.currentTimeMillis();
long endTime = now + getMaxTransmitMessagesTime();
synchronized (mux) {
while (now < endTime && !msgDestMap.isEmpty()) {
mux.wait(endTime - now);
now = System.currentTimeMillis();
}
if (!msgDestMap.isEmpty()) {
for (Entry<UUID, Set<UUID>> entry : msgDestMap.entrySet()) {
error("Failed to receive all messages [sender=" + entry.getKey() +
", dest=" + entry.getValue() + ']');
}
}
assert msgDestMap.isEmpty() : "Some messages were not received.";
}
}
/**
* @param idx Node index.
* @return Spi.
*/
protected abstract CommunicationSpi<Message> getSpi(int idx);
/**
* @return Spi count.
*/
protected int getSpiCount() {
return 2;
}
/**
* @return Max time for message delivery.
*/
protected int getMaxTransmitMessagesTime() {
return 20000;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
for (int i = 0; i < 3; i++) {
try {
startSpis();
break;
}
catch (IgniteCheckedException e) {
if (e.hasCause(BindException.class)) {
if (i < 2) {
info("Failed to start SPIs because of BindException, will retry after delay.");
afterTestsStopped();
U.sleep(30_000);
}
else
throw e;
}
else
throw e;
}
}
}
/**
* @throws Exception If failed.
*/
private void startSpis() throws Exception {
spis.clear();
nodes.clear();
spiRsrcs.clear();
Map<ClusterNode, GridSpiTestContext> ctxs = new HashMap<>();
for (int i = 0; i < getSpiCount(); i++) {
CommunicationSpi<Message> spi = getSpi(i);
GridTestUtils.setFieldValue(spi, IgniteSpiAdapter.class, "igniteInstanceName", "grid-" + i);
IgniteTestResources rsrcs = new IgniteTestResources();
GridTestNode node = new GridTestNode(rsrcs.getNodeId());
node.order(i);
GridSpiTestContext ctx = initSpiContext();
ctx.setLocalNode(node);
info(">>> Initialized context: nodeId=" + ctx.localNode().id());
spiRsrcs.add(rsrcs);
rsrcs.inject(spi);
if (useSsl) {
IgniteMock ignite = GridTestUtils.getFieldValue(spi, IgniteSpiAdapter.class, "ignite");
IgniteConfiguration cfg = ignite.configuration()
.setSslContextFactory(GridTestUtils.sslFactory());
ignite.setStaticCfg(cfg);
}
spi.setListener(new MessageListener(rsrcs.getNodeId()));
node.setAttributes(spi.getNodeAttributes());
node.setAttribute(ATTR_MACS, F.concat(U.allLocalMACs(), ", "));
nodes.add(node);
spi.spiStart(getTestIgniteInstanceName() + (i + 1));
spis.put(rsrcs.getNodeId(), spi);
spi.onContextInitialized(ctx);
ctxs.put(node, ctx);
}
// For each context set remote nodes.
for (Entry<ClusterNode, GridSpiTestContext> e : ctxs.entrySet()) {
for (ClusterNode n : nodes) {
if (!n.equals(e.getKey()))
e.getValue().remoteNodes().add(n);
}
}
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
for (CommunicationSpi<Message> spi : spis.values()) {
spi.onContextDestroyed();
spi.setListener(null);
spi.spiStop();
}
for (IgniteTestResources rsrcs : spiRsrcs)
rsrcs.stopThreads();
}
} | 1.21875 | 1 |
hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AbstractPeerProcedure.java | Mododo/hbase | 0 | 409 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.replication;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.procedure.PeerProcedureInterface;
import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
/**
* The base class for all replication peer related procedure.
*/
@InterfaceAudience.Private
public abstract class AbstractPeerProcedure<TState>
extends AbstractPeerNoLockProcedure<TState> implements PeerProcedureInterface {
// used to keep compatible with old client where we can only returns after updateStorage.
protected ProcedurePrepareLatch latch;
protected AbstractPeerProcedure() {
}
protected AbstractPeerProcedure(String peerId) {
super(peerId);
this.latch = ProcedurePrepareLatch.createLatch(2, 1);
}
public ProcedurePrepareLatch getLatch() {
return latch;
}
@Override
protected LockState acquireLock(MasterProcedureEnv env) {
if (env.getProcedureScheduler().waitPeerExclusiveLock(this, peerId)) {
return LockState.LOCK_EVENT_WAIT;
}
return LockState.LOCK_ACQUIRED;
}
@Override
protected void releaseLock(MasterProcedureEnv env) {
env.getProcedureScheduler().wakePeerExclusiveLock(this, peerId);
}
@Override
protected boolean holdLock(MasterProcedureEnv env) {
return true;
}
protected final void refreshPeer(MasterProcedureEnv env, PeerOperationType type) {
addChildProcedure(env.getMasterServices().getServerManager().getOnlineServersList().stream()
.map(sn -> new RefreshPeerProcedure(peerId, type, sn)).toArray(RefreshPeerProcedure[]::new));
}
// will be override in test to simulate error
@VisibleForTesting
protected void enablePeer(MasterProcedureEnv env) throws ReplicationException {
env.getReplicationPeerManager().enablePeer(peerId);
}
}
| 1.414063 | 1 |
external/dx/src/main/java/____/com/android/dx/dex/file/StringDataItem.java | deenu713/eide | 19 | 417 | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ____.com.android.dx.dex.file;
import ____.com.android.dex.Leb128;
import ____.com.android.dx.rop.cst.CstString;
import ____.com.android.dx.util.AnnotatedOutput;
import ____.com.android.dx.util.ByteArray;
import ____.com.android.dx.util.Hex;
/**
* Representation of string data for a particular string, in a Dalvik file.
*/
public final class StringDataItem extends OffsettedItem {
/** {@code non-null;} the string value */
private final CstString value;
/**
* Constructs an instance.
*
* @param value {@code non-null;} the string value
*/
public StringDataItem(CstString value) {
super(1, writeSize(value));
this.value = value;
}
/**
* Gets the write size for a given value.
*
* @param value {@code non-null;} the string value
* @return {@code >= 2}; the write size, in bytes
*/
private static int writeSize(CstString value) {
int utf16Size = value.getUtf16Size();
// The +1 is for the '\0' termination byte.
return Leb128.unsignedLeb128Size(utf16Size)
+ value.getUtf8Size() + 1;
}
/** {@inheritDoc} */
@Override
public ItemType itemType() {
return ItemType.TYPE_STRING_DATA_ITEM;
}
/** {@inheritDoc} */
@Override
public void addContents(DexFile file) {
// Nothing to do here.
}
/** {@inheritDoc} */
@Override
public void writeTo0(DexFile file, AnnotatedOutput out) {
ByteArray bytes = value.getBytes();
int utf16Size = value.getUtf16Size();
if (out.annotates()) {
out.annotate(Leb128.unsignedLeb128Size(utf16Size),
"utf16_size: " + Hex.u4(utf16Size));
out.annotate(bytes.size() + 1, value.toQuoted());
}
out.writeUleb128(utf16Size);
out.write(bytes);
out.writeByte(0);
}
/** {@inheritDoc} */
@Override
public String toHuman() {
return value.toQuoted();
}
/** {@inheritDoc} */
@Override
protected int compareTo0(OffsettedItem other) {
StringDataItem otherData = (StringDataItem) other;
return value.compareTo(otherData.value);
}
}
| 1.5625 | 2 |
java/org/apache/tomcat/util/net/AbstractEndpoint.java | whg333/apache-tomcat-8.5.73-src | 1 | 425 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.net;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import org.apache.juli.logging.Log;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.IntrospectionUtils;
import org.apache.tomcat.util.collections.SynchronizedStack;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.net.AbstractEndpoint.Acceptor.AcceptorState;
import org.apache.tomcat.util.res.StringManager;
import org.apache.tomcat.util.threads.LimitLatch;
import org.apache.tomcat.util.threads.ResizableExecutor;
import org.apache.tomcat.util.threads.TaskQueue;
import org.apache.tomcat.util.threads.TaskThreadFactory;
import org.apache.tomcat.util.threads.ThreadPoolExecutor;
/**
* @param <S> The type for the sockets managed by this endpoint.
*
* @author <NAME>
* @author <NAME>
*/
public abstract class AbstractEndpoint<S> {
// -------------------------------------------------------------- Constants
protected static final StringManager sm = StringManager.getManager(AbstractEndpoint.class);
public static interface Handler<S> {
/**
* Different types of socket states to react upon.
*/
public enum SocketState {
// TODO Add a new state to the AsyncStateMachine and remove
// ASYNC_END (if possible)
OPEN, CLOSED, LONG, ASYNC_END, SENDFILE, UPGRADING, UPGRADED, SUSPENDED
}
/**
* Process the provided socket with the given current status.
*
* @param socket The socket to process
* @param status The current socket status
*
* @return The state of the socket after processing
*/
public SocketState process(SocketWrapperBase<S> socket,
SocketEvent status);
/**
* Obtain the GlobalRequestProcessor associated with the handler.
*
* @return the GlobalRequestProcessor
*/
public Object getGlobal();
/**
* Obtain the currently open sockets.
*
* @return The sockets for which the handler is tracking a currently
* open connection
*/
public Set<S> getOpenSockets();
/**
* Release any resources associated with the given SocketWrapper.
*
* @param socketWrapper The socketWrapper to release resources for
*/
public void release(SocketWrapperBase<S> socketWrapper);
/**
* Inform the handler that the endpoint has stopped accepting any new
* connections. Typically, the endpoint will be stopped shortly
* afterwards but it is possible that the endpoint will be resumed so
* the handler should not assume that a stop will follow.
*/
public void pause();
/**
* Recycle resources associated with the handler.
*/
public void recycle();
}
protected enum BindState {
UNBOUND, BOUND_ON_INIT, BOUND_ON_START, SOCKET_CLOSED_ON_STOP
}
public abstract static class Acceptor implements Runnable {
public enum AcceptorState {
NEW, RUNNING, PAUSED, ENDED
}
protected volatile AcceptorState state = AcceptorState.NEW;
public final AcceptorState getState() {
return state;
}
private String threadName;
protected final void setThreadName(final String threadName) {
this.threadName = threadName;
}
protected final String getThreadName() {
return threadName;
}
}
private static final int INITIAL_ERROR_DELAY = 50;
private static final int MAX_ERROR_DELAY = 1600;
public static long toTimeout(long timeout) {
// Many calls can't do infinite timeout so use Long.MAX_VALUE if timeout is <= 0
return (timeout > 0) ? timeout : Long.MAX_VALUE;
}
// ----------------------------------------------------------------- Fields
/**
* Running state of the endpoint.
*/
protected volatile boolean running = false;
/**
* Will be set to true whenever the endpoint is paused.
*/
protected volatile boolean paused = false;
/**
* Are we using an internal executor
*/
protected volatile boolean internalExecutor = true;
/**
* counter for nr of connections handled by an endpoint
*/
private volatile LimitLatch connectionLimitLatch = null;
/**
* Socket properties
*/
protected SocketProperties socketProperties = new SocketProperties();
public SocketProperties getSocketProperties() {
return socketProperties;
}
/**
* Threads used to accept new connections and pass them to worker threads.
*/
protected Acceptor[] acceptors;
/**
* Cache for SocketProcessor objects
*/
protected SynchronizedStack<SocketProcessorBase<S>> processorCache;
private ObjectName oname = null;
// ----------------------------------------------------------------- Properties
private String defaultSSLHostConfigName = SSLHostConfig.DEFAULT_SSL_HOST_NAME;
/**
* @return The host name for the default SSL configuration for this endpoint
* - always in lower case.
*/
public String getDefaultSSLHostConfigName() {
return defaultSSLHostConfigName;
}
public void setDefaultSSLHostConfigName(String defaultSSLHostConfigName) {
this.defaultSSLHostConfigName = defaultSSLHostConfigName.toLowerCase(Locale.ENGLISH);
}
protected ConcurrentMap<String,SSLHostConfig> sslHostConfigs = new ConcurrentHashMap<>();
/**
* Add the given SSL Host configuration.
*
* @param sslHostConfig The configuration to add
*
* @throws IllegalArgumentException If the host name is not valid or if a
* configuration has already been provided
* for that host
*/
public void addSslHostConfig(SSLHostConfig sslHostConfig) throws IllegalArgumentException {
addSslHostConfig(sslHostConfig, false);
}
/**
* Add the given SSL Host configuration, optionally replacing the existing
* configuration for the given host.
*
* @param sslHostConfig The configuration to add
* @param replace If {@code true} replacement of an existing
* configuration is permitted, otherwise any such
* attempted replacement will trigger an exception
*
* @throws IllegalArgumentException If the host name is not valid or if a
* configuration has already been provided
* for that host and replacement is not
* allowed
*/
public void addSslHostConfig(SSLHostConfig sslHostConfig, boolean replace) throws IllegalArgumentException {
String key = sslHostConfig.getHostName();
if (key == null || key.length() == 0) {
throw new IllegalArgumentException(sm.getString("endpoint.noSslHostName"));
}
if (bindState != BindState.UNBOUND && bindState != BindState.SOCKET_CLOSED_ON_STOP &&
isSSLEnabled()) {
try {
createSSLContext(sslHostConfig);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
if (replace) {
SSLHostConfig previous = sslHostConfigs.put(key, sslHostConfig);
if (previous != null) {
unregisterJmx(sslHostConfig);
}
registerJmx(sslHostConfig);
// Do not release any SSLContexts associated with a replaced
// SSLHostConfig. They may still be in used by existing connections
// and releasing them would break the connection at best. Let GC
// handle the clean up.
} else {
SSLHostConfig duplicate = sslHostConfigs.putIfAbsent(key, sslHostConfig);
if (duplicate != null) {
releaseSSLContext(sslHostConfig);
throw new IllegalArgumentException(sm.getString("endpoint.duplicateSslHostName", key));
}
registerJmx(sslHostConfig);
}
}
/**
* Removes the SSL host configuration for the given host name, if such a
* configuration exists.
*
* @param hostName The host name associated with the SSL host configuration
* to remove
*
* @return The SSL host configuration that was removed, if any
*/
public SSLHostConfig removeSslHostConfig(String hostName) {
if (hostName == null) {
return null;
}
// Host names are case insensitive but stored/processed in lower case
// internally because they are used as keys in a ConcurrentMap where
// keys are compared in a case sensitive manner.
String hostNameLower = hostName.toLowerCase(Locale.ENGLISH);
if (hostNameLower.equals(getDefaultSSLHostConfigName())) {
throw new IllegalArgumentException(
sm.getString("endpoint.removeDefaultSslHostConfig", hostName));
}
SSLHostConfig sslHostConfig = sslHostConfigs.remove(hostNameLower);
unregisterJmx(sslHostConfig);
return sslHostConfig;
}
/**
* Re-read the configuration files for the SSL host and replace the existing
* SSL configuration with the updated settings. Note this replacement will
* happen even if the settings remain unchanged.
*
* @param hostName The SSL host for which the configuration should be
* reloaded. This must match a current SSL host
*/
public void reloadSslHostConfig(String hostName) {
// Host names are case insensitive but stored/processed in lower case
// internally because they are used as keys in a ConcurrentMap where
// keys are compared in a case sensitive manner.
// This method can be called via various paths so convert the supplied
// host name to lower case here to ensure the conversion occurs whatever
// the call path.
SSLHostConfig sslHostConfig = sslHostConfigs.get(hostName.toLowerCase(Locale.ENGLISH));
if (sslHostConfig == null) {
throw new IllegalArgumentException(
sm.getString("endpoint.unknownSslHostName", hostName));
}
addSslHostConfig(sslHostConfig, true);
}
/**
* Re-read the configuration files for all SSL hosts and replace the
* existing SSL configuration with the updated settings. Note this
* replacement will happen even if the settings remain unchanged.
*/
public void reloadSslHostConfigs() {
for (String hostName : sslHostConfigs.keySet()) {
reloadSslHostConfig(hostName);
}
}
public SSLHostConfig[] findSslHostConfigs() {
return sslHostConfigs.values().toArray(new SSLHostConfig[0]);
}
/**
* Create the SSLContextfor the the given SSLHostConfig.
*
* @param sslHostConfig The SSLHostConfig for which the SSLContext should be
* created
* @throws Exception If the SSLContext cannot be created for the given
* SSLHostConfig
*/
protected abstract void createSSLContext(SSLHostConfig sslHostConfig) throws Exception;
protected void destroySsl() throws Exception {
if (isSSLEnabled()) {
for (SSLHostConfig sslHostConfig : sslHostConfigs.values()) {
releaseSSLContext(sslHostConfig);
}
}
}
/**
* Release the SSLContext, if any, associated with the SSLHostConfig.
*
* @param sslHostConfig The SSLHostConfig for which the SSLContext should be
* released
*/
protected void releaseSSLContext(SSLHostConfig sslHostConfig) {
for (SSLHostConfigCertificate certificate : sslHostConfig.getCertificates(true)) {
if (certificate.getSslContext() != null) {
SSLContext sslContext = certificate.getSslContext();
if (sslContext != null) {
sslContext.destroy();
}
}
}
}
/**
* Look up the SSLHostConfig for the given host name. Lookup order is:
* <ol>
* <li>exact match</li>
* <li>wild card match</li>
* <li>default SSLHostConfig</li>
* </ol>
*
* @param sniHostName Host name - must be in lower case
*
* @return The SSLHostConfig for the given host name.
*/
protected SSLHostConfig getSSLHostConfig(String sniHostName) {
SSLHostConfig result = null;
if (sniHostName != null) {
// First choice - direct match
result = sslHostConfigs.get(sniHostName);
if (result != null) {
return result;
}
// Second choice, wildcard match
int indexOfDot = sniHostName.indexOf('.');
if (indexOfDot > -1) {
result = sslHostConfigs.get("*" + sniHostName.substring(indexOfDot));
}
}
// Fall-back. Use the default
if (result == null) {
result = sslHostConfigs.get(getDefaultSSLHostConfigName());
}
if (result == null) {
// Should never happen.
throw new IllegalStateException();
}
return result;
}
/**
* Has the user requested that send file be used where possible?
*/
private boolean useSendfile = true;
public boolean getUseSendfile() {
return useSendfile;
}
public void setUseSendfile(boolean useSendfile) {
this.useSendfile = useSendfile;
}
/**
* Time to wait for the internal executor (if used) to terminate when the
* endpoint is stopped in milliseconds. Defaults to 5000 (5 seconds).
*/
private long executorTerminationTimeoutMillis = 5000;
public long getExecutorTerminationTimeoutMillis() {
return executorTerminationTimeoutMillis;
}
public void setExecutorTerminationTimeoutMillis(
long executorTerminationTimeoutMillis) {
this.executorTerminationTimeoutMillis = executorTerminationTimeoutMillis;
}
/**
* Acceptor thread count.
*/
protected int acceptorThreadCount = 1;
public void setAcceptorThreadCount(int acceptorThreadCount) {
this.acceptorThreadCount = acceptorThreadCount;
}
public int getAcceptorThreadCount() { return acceptorThreadCount; }
/**
* Priority of the acceptor threads.
*/
protected int acceptorThreadPriority = Thread.NORM_PRIORITY;
public void setAcceptorThreadPriority(int acceptorThreadPriority) {
this.acceptorThreadPriority = acceptorThreadPriority;
}
public int getAcceptorThreadPriority() { return acceptorThreadPriority; }
private int maxConnections = 10000;
public void setMaxConnections(int maxCon) {
this.maxConnections = maxCon;
LimitLatch latch = this.connectionLimitLatch;
if (latch != null) {
// Update the latch that enforces this
if (maxCon == -1) {
releaseConnectionLatch();
} else {
latch.setLimit(maxCon);
}
} else if (maxCon > 0) {
initializeConnectionLatch();
}
}
public int getMaxConnections() { return this.maxConnections; }
/**
* Return the current count of connections handled by this endpoint, if the
* connections are counted (which happens when the maximum count of
* connections is limited), or <code>-1</code> if they are not. This
* property is added here so that this value can be inspected through JMX.
* It is visible on "ThreadPool" MBean.
*
* <p>The count is incremented by the Acceptor before it tries to accept a
* new connection. Until the limit is reached and thus the count cannot be
* incremented, this value is more by 1 (the count of acceptors) than the
* actual count of connections that are being served.
*
* @return The count
*/
public long getConnectionCount() {
LimitLatch latch = connectionLimitLatch;
if (latch != null) {
return latch.getCount();
}
return -1;
}
/**
* External Executor based thread pool.
*/
private Executor executor = null;
public void setExecutor(Executor executor) {
this.executor = executor;
this.internalExecutor = (executor == null);
}
public Executor getExecutor() { return executor; }
/**
* Server socket port.
*/
private int port;
public int getPort() { return port; }
public void setPort(int port ) { this.port=port; }
public final int getLocalPort() {
try {
InetSocketAddress localAddress = getLocalAddress();
if (localAddress == null) {
return -1;
}
return localAddress.getPort();
} catch (IOException ioe) {
return -1;
}
}
/**
* Address for the server socket.
*/
private InetAddress address;
public InetAddress getAddress() { return address; }
public void setAddress(InetAddress address) { this.address = address; }
/**
* Obtain the network address the server socket is bound to. This primarily
* exists to enable the correct address to be used when unlocking the server
* socket since it removes the guess-work involved if no address is
* specifically set.
*
* @return The network address that the server socket is listening on or
* null if the server socket is not currently bound.
*
* @throws IOException If there is a problem determining the currently bound
* socket
*/
protected abstract InetSocketAddress getLocalAddress() throws IOException;
/**
* Allows the server developer to specify the acceptCount (backlog) that
* should be used for server sockets. By default, this value
* is 100.
*/
private int acceptCount = 100;
public void setAcceptCount(int acceptCount) { if (acceptCount > 0) {
this.acceptCount = acceptCount;
} }
public int getAcceptCount() { return acceptCount; }
@Deprecated
public void setBacklog(int backlog) { setAcceptCount(backlog); }
@Deprecated
public int getBacklog() { return getAcceptCount(); }
/**
* Controls when the Endpoint binds the port. <code>true</code>, the default
* binds the port on {@link #init()} and unbinds it on {@link #destroy()}.
* If set to <code>false</code> the port is bound on {@link #start()} and
* unbound on {@link #stop()}.
*/
private boolean bindOnInit = true;
public boolean getBindOnInit() { return bindOnInit; }
public void setBindOnInit(boolean b) { this.bindOnInit = b; }
private volatile BindState bindState = BindState.UNBOUND;
/**
* Keepalive timeout, if not set the soTimeout is used.
*/
private Integer keepAliveTimeout = null;
public int getKeepAliveTimeout() {
if (keepAliveTimeout == null) {
return getConnectionTimeout();
} else {
return keepAliveTimeout.intValue();
}
}
public void setKeepAliveTimeout(int keepAliveTimeout) {
this.keepAliveTimeout = Integer.valueOf(keepAliveTimeout);
}
/**
* Socket TCP no delay.
*
* @return The current TCP no delay setting for sockets created by this
* endpoint
*/
public boolean getTcpNoDelay() { return socketProperties.getTcpNoDelay();}
public void setTcpNoDelay(boolean tcpNoDelay) { socketProperties.setTcpNoDelay(tcpNoDelay); }
/**
* Socket linger.
*
* @return The current socket linger time for sockets created by this
* endpoint
*/
public int getConnectionLinger() { return socketProperties.getSoLingerTime(); }
public void setConnectionLinger(int connectionLinger) {
socketProperties.setSoLingerTime(connectionLinger);
socketProperties.setSoLingerOn(connectionLinger>=0);
}
@Deprecated
public int getSoLinger() { return getConnectionLinger(); }
@Deprecated
public void setSoLinger(int soLinger) { setConnectionLinger(soLinger);}
/**
* Socket timeout.
*
* @return The current socket timeout for sockets created by this endpoint
*/
public int getConnectionTimeout() { return socketProperties.getSoTimeout(); }
public void setConnectionTimeout(int soTimeout) { socketProperties.setSoTimeout(soTimeout); }
@Deprecated
public int getSoTimeout() { return getConnectionTimeout(); }
@Deprecated
public void setSoTimeout(int soTimeout) { setConnectionTimeout(soTimeout); }
/**
* SSL engine.
*/
private boolean SSLEnabled = false;
public boolean isSSLEnabled() { return SSLEnabled; }
public void setSSLEnabled(boolean SSLEnabled) { this.SSLEnabled = SSLEnabled; }
/**
* Identifies if the endpoint supports ALPN. Note that a return value of
* <code>true</code> implies that {@link #isSSLEnabled()} will also return
* <code>true</code>.
*
* @return <code>true</code> if the endpoint supports ALPN in its current
* configuration, otherwise <code>false</code>.
*/
public abstract boolean isAlpnSupported();
private int minSpareThreads = 10;
public void setMinSpareThreads(int minSpareThreads) {
this.minSpareThreads = minSpareThreads;
Executor executor = this.executor;
if (internalExecutor && executor instanceof java.util.concurrent.ThreadPoolExecutor) {
// The internal executor should always be an instance of
// j.u.c.ThreadPoolExecutor but it may be null if the endpoint is
// not running.
// This check also avoids various threading issues.
((java.util.concurrent.ThreadPoolExecutor) executor).setCorePoolSize(minSpareThreads);
}
}
public int getMinSpareThreads() {
return Math.min(getMinSpareThreadsInternal(), getMaxThreads());
}
private int getMinSpareThreadsInternal() {
if (internalExecutor) {
return minSpareThreads;
} else {
return -1;
}
}
/**
* Maximum amount of worker threads.
*/
private int maxThreads = 200;
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
Executor executor = this.executor;
if (internalExecutor && executor instanceof java.util.concurrent.ThreadPoolExecutor) {
// The internal executor should always be an instance of
// j.u.c.ThreadPoolExecutor but it may be null if the endpoint is
// not running.
// This check also avoids various threading issues.
((java.util.concurrent.ThreadPoolExecutor) executor).setMaximumPoolSize(maxThreads);
}
}
public int getMaxThreads() {
if (internalExecutor) {
return maxThreads;
} else {
return -1;
}
}
/**
* Priority of the worker threads.
*/
protected int threadPriority = Thread.NORM_PRIORITY;
public void setThreadPriority(int threadPriority) {
// Can't change this once the executor has started
this.threadPriority = threadPriority;
}
public int getThreadPriority() {
if (internalExecutor) {
return threadPriority;
} else {
return -1;
}
}
/**
* Max keep alive requests
*/
private int maxKeepAliveRequests=100; // as in Apache HTTPD server
public int getMaxKeepAliveRequests() {
return maxKeepAliveRequests;
}
public void setMaxKeepAliveRequests(int maxKeepAliveRequests) {
this.maxKeepAliveRequests = maxKeepAliveRequests;
}
/**
* The maximum number of headers in a request that are allowed.
* 100 by default. A value of less than 0 means no limit.
*/
private int maxHeaderCount = 100; // as in Apache HTTPD server
public int getMaxHeaderCount() {
return maxHeaderCount;
}
public void setMaxHeaderCount(int maxHeaderCount) {
this.maxHeaderCount = maxHeaderCount;
}
/**
* Name of the thread pool, which will be used for naming child threads.
*/
private String name = "TP";
public void setName(String name) { this.name = name; }
public String getName() { return name; }
/**
* Name of domain to use for JMX registration.
*/
private String domain;
public void setDomain(String domain) { this.domain = domain; }
public String getDomain() { return domain; }
/**
* The default is true - the created threads will be
* in daemon mode. If set to false, the control thread
* will not be daemon - and will keep the process alive.
*/
private boolean daemon = true;
public void setDaemon(boolean b) { daemon = b; }
public boolean getDaemon() { return daemon; }
/**
* Expose asynchronous IO capability.
*/
private boolean useAsyncIO = true;
public void setUseAsyncIO(boolean useAsyncIO) { this.useAsyncIO = useAsyncIO; }
public boolean getUseAsyncIO() { return useAsyncIO; }
protected abstract boolean getDeferAccept();
protected final List<String> negotiableProtocols = new ArrayList<>();
public void addNegotiatedProtocol(String negotiableProtocol) {
negotiableProtocols.add(negotiableProtocol);
}
public boolean hasNegotiableProtocols() {
return (negotiableProtocols.size() > 0);
}
/**
* Handling of accepted sockets.
*/
private Handler<S> handler = null;
public void setHandler(Handler<S> handler ) { this.handler = handler; }
public Handler<S> getHandler() { return handler; }
/**
* Attributes provide a way for configuration to be passed to sub-components
* without the {@link org.apache.coyote.ProtocolHandler} being aware of the
* properties available on those sub-components.
*/
protected HashMap<String, Object> attributes = new HashMap<>();
/**
* Generic property setter called when a property for which a specific
* setter already exists within the
* {@link org.apache.coyote.ProtocolHandler} needs to be made available to
* sub-components. The specific setter will call this method to populate the
* attributes.
*
* @param name Name of property to set
* @param value The value to set the property to
*/
public void setAttribute(String name, Object value) {
if (getLog().isTraceEnabled()) {
getLog().trace(sm.getString("endpoint.setAttribute", name, value));
}
attributes.put(name, value);
}
/**
* Used by sub-components to retrieve configuration information.
*
* @param key The name of the property for which the value should be
* retrieved
*
* @return The value of the specified property
*/
public Object getAttribute(String key) {
Object value = attributes.get(key);
if (getLog().isTraceEnabled()) {
getLog().trace(sm.getString("endpoint.getAttribute", key, value));
}
return value;
}
public boolean setProperty(String name, String value) {
setAttribute(name, value);
final String socketName = "socket.";
try {
if (name.startsWith(socketName)) {
return IntrospectionUtils.setProperty(socketProperties, name.substring(socketName.length()), value);
} else {
return IntrospectionUtils.setProperty(this,name,value,false);
}
}catch ( Exception x ) {
getLog().error("Unable to set attribute \""+name+"\" to \""+value+"\"",x);
return false;
}
}
public String getProperty(String name) {
String value = (String) getAttribute(name);
final String socketName = "socket.";
if (value == null && name.startsWith(socketName)) {
Object result = IntrospectionUtils.getProperty(socketProperties, name.substring(socketName.length()));
if (result != null) {
value = result.toString();
}
}
return value;
}
/**
* Return the amount of threads that are managed by the pool.
*
* @return the amount of threads that are managed by the pool
*/
public int getCurrentThreadCount() {
Executor executor = this.executor;
if (executor != null) {
if (executor instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) executor).getPoolSize();
} else if (executor instanceof ResizableExecutor) {
return ((ResizableExecutor) executor).getPoolSize();
} else {
return -1;
}
} else {
return -2;
}
}
/**
* Return the amount of threads that are in use
*
* @return the amount of threads that are in use
*/
public int getCurrentThreadsBusy() {
Executor executor = this.executor;
if (executor != null) {
if (executor instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) executor).getActiveCount();
} else if (executor instanceof ResizableExecutor) {
return ((ResizableExecutor) executor).getActiveCount();
} else {
return -1;
}
} else {
return -2;
}
}
public boolean isRunning() {
return running;
}
public boolean isPaused() {
return paused;
}
public void createExecutor() {
internalExecutor = true;
TaskQueue taskqueue = new TaskQueue();
TaskThreadFactory tf = new TaskThreadFactory(getName() + "-exec-", daemon, getThreadPriority());
executor = new ThreadPoolExecutor(getMinSpareThreads(), getMaxThreads(), 60, TimeUnit.SECONDS,taskqueue, tf);
taskqueue.setParent( (ThreadPoolExecutor) executor);
}
public void shutdownExecutor() {
Executor executor = this.executor;
if (executor != null && internalExecutor) {
this.executor = null;
if (executor instanceof ThreadPoolExecutor) {
//this is our internal one, so we need to shut it down
ThreadPoolExecutor tpe = (ThreadPoolExecutor) executor;
tpe.shutdownNow();
long timeout = getExecutorTerminationTimeoutMillis();
if (timeout > 0) {
try {
tpe.awaitTermination(timeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
// Ignore
}
if (tpe.isTerminating()) {
getLog().warn(sm.getString("endpoint.warn.executorShutdown", getName()));
}
}
TaskQueue queue = (TaskQueue) tpe.getQueue();
queue.setParent(null);
}
}
}
/**
* Unlock the server socket accept using a bogus connection.
*/
protected void unlockAccept() {
// Only try to unlock the acceptor if it is necessary
int unlocksRequired = 0;
for (Acceptor acceptor : acceptors) {
if (acceptor.getState() == AcceptorState.RUNNING) {
unlocksRequired++;
}
}
if (unlocksRequired == 0) {
return;
}
InetSocketAddress unlockAddress = null;
InetSocketAddress localAddress = null;
try {
localAddress = getLocalAddress();
} catch (IOException ioe) {
getLog().debug(sm.getString("endpoint.debug.unlock.localFail", getName()), ioe);
}
if (localAddress == null) {
getLog().warn(sm.getString("endpoint.debug.unlock.localNone", getName()));
return;
}
try {
unlockAddress = getUnlockAddress(localAddress);
for (int i = 0; i < unlocksRequired; i++) {
try (java.net.Socket s = new java.net.Socket()) {
int stmo = 2 * 1000;
int utmo = 2 * 1000;
if (getSocketProperties().getSoTimeout() > stmo) {
stmo = getSocketProperties().getSoTimeout();
}
if (getSocketProperties().getUnlockTimeout() > utmo) {
utmo = getSocketProperties().getUnlockTimeout();
}
s.setSoTimeout(stmo);
s.setSoLinger(getSocketProperties().getSoLingerOn(),getSocketProperties().getSoLingerTime());
if (getLog().isDebugEnabled()) {
getLog().debug("About to unlock socket for:" + unlockAddress);
}
s.connect(unlockAddress,utmo);
if (getDeferAccept()) {
/*
* In the case of a deferred accept / accept filters we need to
* send data to wake up the accept. Send OPTIONS * to bypass
* even BSD accept filters. The Acceptor will discard it.
*/
OutputStreamWriter sw;
sw = new OutputStreamWriter(s.getOutputStream(), "ISO-8859-1");
sw.write("OPTIONS * HTTP/1.0\r\n" +
"User-Agent: Tomcat wakeup connection\r\n\r\n");
sw.flush();
}
if (getLog().isDebugEnabled()) {
getLog().debug("Socket unlock completed for:" + unlockAddress);
}
}
}
// Wait for upto 1000ms acceptor threads to unlock
long waitLeft = 1000;
for (Acceptor acceptor : acceptors) {
while (waitLeft > 0 &&
acceptor.getState() == AcceptorState.RUNNING) {
Thread.sleep(5);
waitLeft -= 5;
}
}
// Wait for up to 1000ms acceptor threads to unlock. Particularly
// for the unit tests, we want to exit this loop as quickly as
// possible. However, we also don't want to trigger excessive CPU
// usage if the unlock takes longer than expected. Therefore, we
// initially wait for the unlock in a tight loop but if that takes
// more than 1ms we start using short sleeps to reduce CPU usage.
long startTime = System.nanoTime();
for (Acceptor acceptor : acceptors) {
while (startTime + 1_000_000_000 > System.nanoTime() && acceptor.getState() == AcceptorState.RUNNING) {
if (startTime + 1_000_000 < System.nanoTime()) {
Thread.sleep(1);
}
}
}
} catch(Throwable t) {
ExceptionUtils.handleThrowable(t);
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("endpoint.debug.unlock.fail", "" + getPort()), t);
}
}
}
private static InetSocketAddress getUnlockAddress(InetSocketAddress localAddress) throws SocketException {
if (localAddress.getAddress().isAnyLocalAddress()) {
// Need a local address of the same type (IPv4 or IPV6) as the
// configured bind address since the connector may be configured
// to not map between types.
InetAddress loopbackUnlockAddress = null;
InetAddress linkLocalUnlockAddress = null;
Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces();
while (networkInterfaces.hasMoreElements()) {
NetworkInterface networkInterface = networkInterfaces.nextElement();
Enumeration<InetAddress> inetAddresses = networkInterface.getInetAddresses();
while (inetAddresses.hasMoreElements()) {
InetAddress inetAddress = inetAddresses.nextElement();
if (localAddress.getAddress().getClass().isAssignableFrom(inetAddress.getClass())) {
if (inetAddress.isLoopbackAddress()) {
if (loopbackUnlockAddress == null) {
loopbackUnlockAddress = inetAddress;
}
} else if (inetAddress.isLinkLocalAddress()) {
if (linkLocalUnlockAddress == null) {
linkLocalUnlockAddress = inetAddress;
}
} else {
// Use a non-link local, non-loop back address by default
return new InetSocketAddress(inetAddress, localAddress.getPort());
}
}
}
}
// Prefer loop back over link local since on some platforms (e.g.
// OSX) some link local addresses are not included when listening on
// all local addresses.
if (loopbackUnlockAddress != null) {
return new InetSocketAddress(loopbackUnlockAddress, localAddress.getPort());
}
if (linkLocalUnlockAddress != null) {
return new InetSocketAddress(linkLocalUnlockAddress, localAddress.getPort());
}
// Fallback
return new InetSocketAddress("localhost", localAddress.getPort());
} else {
return localAddress;
}
}
// ---------------------------------------------- Request processing methods
/**
* Process the given SocketWrapper with the given status. Used to trigger
* processing as if the Poller (for those endpoints that have one)
* selected the socket.
*
* @param socketWrapper The socket wrapper to process
* @param event The socket event to be processed
* @param dispatch Should the processing be performed on a new
* container thread
*
* @return if processing was triggered successfully
*/
public boolean processSocket(SocketWrapperBase<S> socketWrapper,
SocketEvent event, boolean dispatch) {
try {
if (socketWrapper == null) {
return false;
}
SocketProcessorBase<S> sc = processorCache.pop();
if (sc == null) {
sc = createSocketProcessor(socketWrapper, event);
} else {
sc.reset(socketWrapper, event);
}
Executor executor = getExecutor();
if (dispatch && executor != null) {
executor.execute(sc);
} else {
sc.run();
}
} catch (RejectedExecutionException ree) {
getLog().warn(sm.getString("endpoint.executor.fail", socketWrapper) , ree);
return false;
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
// This means we got an OOM or similar creating a thread, or that
// the pool and its queue are full
getLog().error(sm.getString("endpoint.process.fail"), t);
return false;
}
return true;
}
protected abstract SocketProcessorBase<S> createSocketProcessor(
SocketWrapperBase<S> socketWrapper, SocketEvent event);
// ------------------------------------------------------- Lifecycle methods
/*
* NOTE: There is no maintenance of state or checking for valid transitions
* within this class other than ensuring that bind/unbind are called in the
* right place. It is expected that the calling code will maintain state and
* prevent invalid state transitions.
*/
public abstract void bind() throws Exception;
public abstract void unbind() throws Exception;
public abstract void startInternal() throws Exception;
public abstract void stopInternal() throws Exception;
public void init() throws Exception {
if (bindOnInit) {
bind();
bindState = BindState.BOUND_ON_INIT;
}
if (this.domain != null) {
// Register endpoint (as ThreadPool - historical name)
oname = new ObjectName(domain + ":type=ThreadPool,name=\"" + getName() + "\"");
Registry.getRegistry(null, null).registerComponent(this, oname, null);
ObjectName socketPropertiesOname = new ObjectName(domain +
":type=SocketProperties,name=\"" + getName() + "\"");
socketProperties.setObjectName(socketPropertiesOname);
Registry.getRegistry(null, null).registerComponent(socketProperties, socketPropertiesOname, null);
for (SSLHostConfig sslHostConfig : findSslHostConfigs()) {
registerJmx(sslHostConfig);
}
}
}
private void registerJmx(SSLHostConfig sslHostConfig) {
if (domain == null) {
// Before init the domain is null
return;
}
ObjectName sslOname = null;
try {
sslOname = new ObjectName(domain + ":type=SSLHostConfig,ThreadPool=\"" +
getName() + "\",name=" + ObjectName.quote(sslHostConfig.getHostName()));
sslHostConfig.setObjectName(sslOname);
try {
Registry.getRegistry(null, null).registerComponent(sslHostConfig, sslOname, null);
} catch (Exception e) {
getLog().warn(sm.getString("endpoint.jmxRegistrationFailed", sslOname), e);
}
} catch (MalformedObjectNameException e) {
getLog().warn(sm.getString("endpoint.invalidJmxNameSslHost",
sslHostConfig.getHostName()), e);
}
for (SSLHostConfigCertificate sslHostConfigCert : sslHostConfig.getCertificates()) {
ObjectName sslCertOname = null;
try {
sslCertOname = new ObjectName(domain +
":type=SSLHostConfigCertificate,ThreadPool=\"" + getName() +
"\",Host=" + ObjectName.quote(sslHostConfig.getHostName()) +
",name=" + sslHostConfigCert.getType());
sslHostConfigCert.setObjectName(sslCertOname);
try {
Registry.getRegistry(null, null).registerComponent(
sslHostConfigCert, sslCertOname, null);
} catch (Exception e) {
getLog().warn(sm.getString("endpoint.jmxRegistrationFailed", sslCertOname), e);
}
} catch (MalformedObjectNameException e) {
getLog().warn(sm.getString("endpoint.invalidJmxNameSslHostCert",
sslHostConfig.getHostName(), sslHostConfigCert.getType()), e);
}
}
}
private void unregisterJmx(SSLHostConfig sslHostConfig) {
Registry registry = Registry.getRegistry(null, null);
registry.unregisterComponent(sslHostConfig.getObjectName());
for (SSLHostConfigCertificate sslHostConfigCert : sslHostConfig.getCertificates()) {
registry.unregisterComponent(sslHostConfigCert.getObjectName());
}
}
public final void start() throws Exception {
if (bindState == BindState.UNBOUND) {
bind();
bindState = BindState.BOUND_ON_START;
}
startInternal();
}
protected final void startAcceptorThreads() {
int count = getAcceptorThreadCount();
acceptors = new Acceptor[count];
for (int i = 0; i < count; i++) {
acceptors[i] = createAcceptor();
String threadName = getName() + "-Acceptor-" + i;
acceptors[i].setThreadName(threadName);
Thread t = new Thread(acceptors[i], threadName);
t.setPriority(getAcceptorThreadPriority());
t.setDaemon(getDaemon());
t.start();
}
}
/**
* Hook to allow Endpoints to provide a specific Acceptor implementation.
* @return the acceptor
*/
protected abstract Acceptor createAcceptor();
/**
* Pause the endpoint, which will stop it accepting new connections.
*/
public void pause() {
if (running && !paused) {
paused = true;
unlockAccept();
getHandler().pause();
}
}
/**
* Resume the endpoint, which will make it start accepting new connections
* again.
*/
public void resume() {
if (running) {
paused = false;
}
}
public final void stop() throws Exception {
stopInternal();
if (bindState == BindState.BOUND_ON_START || bindState == BindState.SOCKET_CLOSED_ON_STOP) {
unbind();
bindState = BindState.UNBOUND;
}
}
public final void destroy() throws Exception {
if (bindState == BindState.BOUND_ON_INIT) {
unbind();
bindState = BindState.UNBOUND;
}
Registry registry = Registry.getRegistry(null, null);
registry.unregisterComponent(oname);
registry.unregisterComponent(socketProperties.getObjectName());
for (SSLHostConfig sslHostConfig : findSslHostConfigs()) {
unregisterJmx(sslHostConfig);
}
}
protected abstract Log getLog();
protected LimitLatch initializeConnectionLatch() {
if (maxConnections==-1) {
return null;
}
if (connectionLimitLatch==null) {
connectionLimitLatch = new LimitLatch(getMaxConnections());
}
return connectionLimitLatch;
}
protected void releaseConnectionLatch() {
LimitLatch latch = connectionLimitLatch;
if (latch!=null) {
latch.releaseAll();
}
connectionLimitLatch = null;
}
protected void countUpOrAwaitConnection() throws InterruptedException {
if (maxConnections==-1) {
return;
}
LimitLatch latch = connectionLimitLatch;
if (latch!=null) {
latch.countUpOrAwait();
}
}
protected long countDownConnection() {
if (maxConnections==-1) {
return -1;
}
LimitLatch latch = connectionLimitLatch;
if (latch!=null) {
long result = latch.countDown();
if (result<0) {
getLog().warn(sm.getString("endpoint.warn.incorrectConnectionCount"));
}
return result;
} else {
return -1;
}
}
/**
* Provides a common approach for sub-classes to handle exceptions where a
* delay is required to prevent a Thread from entering a tight loop which
* will consume CPU and may also trigger large amounts of logging. For
* example, this can happen with the Acceptor thread if the ulimit for open
* files is reached.
*
* @param currentErrorDelay The current delay being applied on failure
* @return The delay to apply on the next failure
*/
protected int handleExceptionWithDelay(int currentErrorDelay) {
// Don't delay on first exception
if (currentErrorDelay > 0) {
try {
Thread.sleep(currentErrorDelay);
} catch (InterruptedException e) {
// Ignore
}
}
// On subsequent exceptions, start the delay at 50ms, doubling the delay
// on every subsequent exception until the delay reaches 1.6 seconds.
if (currentErrorDelay == 0) {
return INITIAL_ERROR_DELAY;
} else if (currentErrorDelay < MAX_ERROR_DELAY) {
return currentErrorDelay * 2;
} else {
return MAX_ERROR_DELAY;
}
}
/**
* Close the server socket (to prevent further connections) if the server
* socket was originally bound on {@link #start()} (rather than on
* {@link #init()}).
*
* @see #getBindOnInit()
*/
public final void closeServerSocketGraceful() {
if (bindState == BindState.BOUND_ON_START) {
bindState = BindState.SOCKET_CLOSED_ON_STOP;
try {
doCloseServerSocket();
} catch (IOException ioe) {
getLog().warn(sm.getString("endpoint.serverSocket.closeFailed", getName()), ioe);
}
}
}
/**
* Actually close the server socket but don't perform any other clean-up.
*
* @throws IOException If an error occurs closing the socket
*/
protected abstract void doCloseServerSocket() throws IOException;
}
| 1.210938 | 1 |
psychometrics-ctt/src/main/java/com/itemanalysis/psychometrics/reliability/ScoreReliability.java | Learnosity/lib-psychometrics | 2 | 433 | /*
* Copyright 2012 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itemanalysis.psychometrics.reliability;
import com.itemanalysis.psychometrics.data.VariableAttributes;
import java.util.ArrayList;
/**
*
* @author <NAME> <meyerjp at itemanalysis.com>
*/
public interface ScoreReliability {
/**
* An array of reliability estimates without the item indexed by the position in the array.
* For example, rel[0] = is the reliability estimate without the item at position 0.
* Similarly, rel[1] = is the reliability estimate without the item at position 1.
*
* @return
*/
public double[] itemDeletedReliability();
/**
* Estimates reliability.
*
* @return estimate of reliability
*/
public double value();
/**
* A String representation of all item deleted reliability estimates.
*
* @param var DefaultVariableAttributes that provide the variable names.
* @return
*/
public String printItemDeletedSummary(ArrayList<VariableAttributes> var);
/**
* Type of reliability estimate
*
* @return type of reliability estimate.
*/
public ScoreReliabilityType getType();
/**
* Confidence interval for the reliability estimate computed using the F-quadrature.
* This computation is only correct for Coefficient Alpha because the sampling
* quadrature for other reliability estimates is unknown. As such, this method
* returns an approximation, at best, of the reliability estimate for all reliability
* estimates other than coefficient alpha. Note that the confidence interval is computed
* using the largest sample size in the covariance matrix. This value would be the sample
* size for pairwise deletion.
*
* @return a confidence interval for the reliability estimate.
*/
public double[] confidenceInterval(double numberOfExaminees);
/**
* Creates a String representation of the confidence interval. It is used for
* displaying results.
*
* @param confidenceInterval an array with the lower [0] and upper [1] bounds of the confidence interval
* @return a String representation of the confidence interval.
*/
public String confidenceIntervalToString(double[] confidenceInterval);
// /**
// * Set the unbiased flag.
// *
// * @param unbiased true if variance calculations should use N-1 in the denominator, and false otherwise.
// */
// public void isUnbiased(boolean unbiased);
/**
* Total observed score variance. It is the sum of all values in the covariance matrix.
* @return
*/
public double totalVariance();
}
| 2.390625 | 2 |
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java | panfeiyy/ambari | 5 | 441 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.dao.GroupDAO;
import org.apache.ambari.server.orm.dao.MemberDAO;
import org.apache.ambari.server.orm.dao.PermissionDAO;
import org.apache.ambari.server.orm.dao.PrincipalDAO;
import org.apache.ambari.server.orm.dao.PrivilegeDAO;
import org.apache.ambari.server.orm.dao.ResourceDAO;
import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
import org.apache.ambari.server.orm.dao.UserDAO;
import org.apache.ambari.server.orm.dao.ViewDAO;
import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
import org.apache.ambari.server.orm.entities.PermissionEntity;
import org.apache.ambari.server.orm.entities.PrincipalEntity;
import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
import org.apache.ambari.server.orm.entities.PrivilegeEntity;
import org.apache.ambari.server.orm.entities.ResourceEntity;
import org.apache.ambari.server.orm.entities.UserEntity;
import org.apache.ambari.server.orm.entities.ViewEntity;
import org.apache.ambari.server.orm.entities.ViewEntityTest;
import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
import org.apache.ambari.server.orm.entities.ViewInstanceEntityTest;
import org.apache.ambari.server.security.SecurityHelper;
import org.apache.ambari.server.security.TestAuthenticationFactory;
import org.apache.ambari.server.view.ViewInstanceHandlerList;
import org.apache.ambari.server.view.ViewRegistry;
import org.apache.ambari.server.view.ViewRegistryTest;
import org.apache.ambari.view.ViewDefinition;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.security.core.context.SecurityContextHolder;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
/**
* ViewPrivilegeResourceProvider tests.
*/
public class ViewPrivilegeResourceProviderTest {
private final static PrivilegeDAO privilegeDAO = createStrictMock(PrivilegeDAO.class);
private final static UserDAO userDAO = createStrictMock(UserDAO.class);
private final static GroupDAO groupDAO = createStrictMock(GroupDAO.class);
private final static PrincipalDAO principalDAO = createStrictMock(PrincipalDAO.class);
private final static PermissionDAO permissionDAO = createStrictMock(PermissionDAO.class);
private final static ResourceDAO resourceDAO = createStrictMock(ResourceDAO.class);
private static final ViewDAO viewDAO = createMock(ViewDAO.class);
private static final ViewInstanceDAO viewInstanceDAO = createNiceMock(ViewInstanceDAO.class);
private static final MemberDAO memberDAO = createNiceMock(MemberDAO.class);
private static final ResourceTypeDAO resourceTypeDAO = createNiceMock(ResourceTypeDAO.class);
private static final SecurityHelper securityHelper = createNiceMock(SecurityHelper.class);
private static final ViewInstanceHandlerList handlerList = createNiceMock(ViewInstanceHandlerList.class);
@BeforeClass
public static void initClass() {
PrivilegeResourceProvider.init(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO);
}
@Before
public void resetGlobalMocks() {
ViewRegistry.initInstance(ViewRegistryTest.getRegistry(viewDAO, viewInstanceDAO, userDAO,
memberDAO, privilegeDAO, permissionDAO, resourceDAO, resourceTypeDAO, securityHelper, handlerList, null, null, null));
reset(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, handlerList);
}
@Test
public void testGetResources() throws Exception {
ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
ViewInstanceEntity viewInstanceDefinition = ViewInstanceEntityTest.getViewInstanceEntity();
viewDefinition.addInstanceDefinition(viewInstanceDefinition);
viewInstanceDefinition.setViewEntity(viewDefinition);
viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
ViewRegistry registry = ViewRegistry.getInstance();
registry.addDefinition(viewDefinition);
registry.addInstanceDefinition(viewDefinition, viewInstanceDefinition);
List<PrivilegeEntity> privilegeEntities = new LinkedList<PrivilegeEntity>();
PrivilegeEntity privilegeEntity = createNiceMock(PrivilegeEntity.class);
ResourceEntity resourceEntity = createNiceMock(ResourceEntity.class);
UserEntity userEntity = createNiceMock(UserEntity.class);
PrincipalEntity principalEntity = createNiceMock(PrincipalEntity.class);
PrincipalTypeEntity principalTypeEntity = createNiceMock(PrincipalTypeEntity.class);
PermissionEntity permissionEntity = createNiceMock(PermissionEntity.class);
List<PrincipalEntity> principalEntities = new LinkedList<PrincipalEntity>();
principalEntities.add(principalEntity);
List<UserEntity> userEntities = new LinkedList<UserEntity>();
userEntities.add(userEntity);
privilegeEntities.add(privilegeEntity);
expect(privilegeDAO.findAll()).andReturn(privilegeEntities);
expect(privilegeEntity.getResource()).andReturn(resourceEntity).anyTimes();
expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
expect(privilegeEntity.getPermission()).andReturn(permissionEntity).anyTimes();
expect(resourceEntity.getId()).andReturn(20L).anyTimes();
expect(principalEntity.getId()).andReturn(20L).anyTimes();
expect(userEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
expect(userEntity.getUserName()).andReturn("joe").anyTimes();
expect(permissionEntity.getPermissionName()).andReturn("VIEW.USER").anyTimes();
expect(permissionEntity.getPermissionLabel()).andReturn("View User").anyTimes();
expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).anyTimes();
expect(principalTypeEntity.getName()).andReturn("USER").anyTimes();
expect(permissionDAO.findById(PermissionEntity.VIEW_USER_PERMISSION)).andReturn(permissionEntity);
expect(userDAO.findUsersByPrincipal(principalEntities)).andReturn(userEntities);
replay(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, privilegeEntity, resourceEntity,
userEntity, principalEntity, permissionEntity, principalTypeEntity);
SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator("admin"));
PrivilegeResourceProvider provider = new ViewPrivilegeResourceProvider();
Set<Resource> resources = provider.getResources(PropertyHelper.getReadRequest(), null);
Assert.assertEquals(1, resources.size());
Resource resource = resources.iterator().next();
Assert.assertEquals("VIEW.USER", resource.getPropertyValue(AmbariPrivilegeResourceProvider.PERMISSION_NAME_PROPERTY_ID));
Assert.assertEquals("View User", resource.getPropertyValue(AmbariPrivilegeResourceProvider.PERMISSION_LABEL_PROPERTY_ID));
Assert.assertEquals("joe", resource.getPropertyValue(AmbariPrivilegeResourceProvider.PRINCIPAL_NAME_PROPERTY_ID));
Assert.assertEquals("USER", resource.getPropertyValue(AmbariPrivilegeResourceProvider.PRINCIPAL_TYPE_PROPERTY_ID));
verify(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, privilegeEntity, resourceEntity,
userEntity, principalEntity, permissionEntity, principalTypeEntity);
}
}
| 1.125 | 1 |
abstracto-application/abstracto-modules/moderation/moderation-impl/src/main/java/dev/sheldan/abstracto/moderation/command/UnBan.java | Sheldan/abstracto | 5 | 449 | package dev.sheldan.abstracto.moderation.command;
import dev.sheldan.abstracto.core.command.condition.AbstractConditionableCommand;
import dev.sheldan.abstracto.core.command.condition.CommandCondition;
import dev.sheldan.abstracto.core.command.config.CommandConfiguration;
import dev.sheldan.abstracto.core.command.config.HelpInfo;
import dev.sheldan.abstracto.core.command.config.Parameter;
import dev.sheldan.abstracto.core.command.execution.CommandContext;
import dev.sheldan.abstracto.core.command.execution.CommandResult;
import dev.sheldan.abstracto.core.config.FeatureDefinition;
import dev.sheldan.abstracto.core.templating.service.TemplateService;
import dev.sheldan.abstracto.moderation.config.ModerationModuleDefinition;
import dev.sheldan.abstracto.moderation.config.feature.ModerationFeatureDefinition;
import dev.sheldan.abstracto.moderation.service.BanService;
import lombok.extern.slf4j.Slf4j;
import net.dv8tion.jda.api.entities.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
@Component
@Slf4j
public class UnBan extends AbstractConditionableCommand {
@Autowired
private BanService banService;
@Autowired
private TemplateService templateService;
@Override
public CompletableFuture<CommandResult> executeAsync(CommandContext commandContext) {
List<Object> parameters = commandContext.getParameters().getParameters();
User user = (User) parameters.get(0);
return banService.unBanUserWithNotification(user, commandContext.getAuthor())
.thenApply(aVoid -> CommandResult.fromSuccess());
}
@Override
public CommandConfiguration getConfiguration() {
List<Parameter> parameters = new ArrayList<>();
parameters.add(Parameter.builder().name("user").templated(true).type(User.class).build());
HelpInfo helpInfo = HelpInfo.builder().templated(true).build();
return CommandConfiguration.builder()
.name("unBan")
.module(ModerationModuleDefinition.MODERATION)
.templated(true)
.async(true)
.supportsEmbedException(true)
.causesReaction(true)
.parameters(parameters)
.help(helpInfo)
.build();
}
@Override
public FeatureDefinition getFeature() {
return ModerationFeatureDefinition.MODERATION;
}
@Override
public List<CommandCondition> getConditions() {
List<CommandCondition> conditions = super.getConditions();
conditions.add(immuneUserCondition);
return conditions;
}
}
| 1.28125 | 1 |
nepxion-swing/src/com/nepxion/swing/textcomponent/TextPopupMenuAdapter.java | Nepxion/Marvel | 10 | 457 | package com.nepxion.swing.textcomponent;
/**
* <p>Title: Nepxion Swing</p>
* <p>Description: Nepxion Swing Repository</p>
* <p>Copyright: Copyright (c) 2010</p>
* <p>Company: Nepxion</p>
* @author Neptune
* @email <EMAIL>
* @version 1.0
*/
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.IOException;
import javax.swing.text.JTextComponent;
import com.nepxion.swing.icon.IconFactory;
import com.nepxion.swing.keystroke.KeyStrokeManager;
import com.nepxion.swing.locale.SwingLocale;
import com.nepxion.swing.menuitem.JBasicMenuItem;
import com.nepxion.swing.popupmenu.JBasicPopupMenu;
public class TextPopupMenuAdapter
implements MouseListener
{
/**
* The text component.
*/
private JTextComponent textComponent;
/**
* The copy menu item.
*/
private JBasicMenuItem copyMenuItem;
/**
* The paste menu item.
*/
private JBasicMenuItem pasteMenuItem;
/**
* The cut menu item.
*/
private JBasicMenuItem cutMenuItem;
/**
* The clear menu item.
*/
private JBasicMenuItem clearMenuItem;
/**
* The select all menu item.
*/
private JBasicMenuItem selectAllMenuItem;
/**
* The popup menu.
*/
private JBasicPopupMenu popupMenu;
/**
* Constructs with the specified initial text component.
* @param textComponent the instance of JTextComponent
*/
public TextPopupMenuAdapter(final JTextComponent textComponent)
{
this.textComponent = textComponent;
this.textComponent.addMouseListener(this);
popupMenu = new JBasicPopupMenu();
copyMenuItem = new JBasicMenuItem(SwingLocale.getString("copy"), IconFactory.getSwingIcon("copy.png"), SwingLocale.getString("copy"));
KeyStrokeManager.registerButton(copyMenuItem, KeyEvent.VK_C, KeyEvent.CTRL_MASK, 'C');
copyMenuItem.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
textComponent.copy();
}
}
);
popupMenu.add(copyMenuItem);
pasteMenuItem = new JBasicMenuItem(SwingLocale.getString("paste"), IconFactory.getSwingIcon("paste.png"), SwingLocale.getString("paste"));
KeyStrokeManager.registerButton(pasteMenuItem, KeyEvent.VK_V, KeyEvent.CTRL_MASK, 'V');
pasteMenuItem.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
textComponent.paste();
}
}
);
popupMenu.add(pasteMenuItem);
cutMenuItem = new JBasicMenuItem(SwingLocale.getString("cut"), IconFactory.getSwingIcon("cut.png"), SwingLocale.getString("cut"));
KeyStrokeManager.registerButton(cutMenuItem, KeyEvent.VK_X, KeyEvent.CTRL_MASK, 'X');
cutMenuItem.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
textComponent.cut();
}
}
);
popupMenu.add(cutMenuItem);
clearMenuItem = new JBasicMenuItem(SwingLocale.getString("clean"), IconFactory.getSwingIcon("clear.png"), SwingLocale.getString("clear"));
ActionListener clearActionListener = new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
textComponent.setText("");
}
};
clearMenuItem.addActionListener(clearActionListener);
KeyStrokeManager.registerComponent(textComponent, clearActionListener, KeyEvent.VK_R, KeyEvent.CTRL_MASK);
KeyStrokeManager.registerButton(clearMenuItem, KeyEvent.VK_R, KeyEvent.CTRL_MASK, 'R');
popupMenu.add(clearMenuItem);
selectAllMenuItem = new JBasicMenuItem(SwingLocale.getString("select_all"), IconFactory.getSwingIcon("select_all.png"), SwingLocale.getString("select_all"));
KeyStrokeManager.registerButton(selectAllMenuItem, KeyEvent.VK_A, KeyEvent.CTRL_MASK, 'A');
selectAllMenuItem.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
textComponent.requestFocusInWindow();
textComponent.selectAll();
}
}
);
popupMenu.add(selectAllMenuItem);
}
/**
* Returns true if the clip board has content.
* @return true if the clip board has content
*/
private boolean hasContent()
{
boolean hasContent = false;
Clipboard clipboard = textComponent.getToolkit().getSystemClipboard();
Transferable content = clipboard.getContents(textComponent);
try
{
hasContent = content.getTransferData(DataFlavor.stringFlavor) instanceof String;
}
catch (UnsupportedFlavorException e)
{
}
catch (IOException e)
{
}
return hasContent;
}
/**
* Returns true if it can copy.
* @return true if it can copy
*/
private boolean canCopy()
{
return textComponent.getSelectionStart() != textComponent.getSelectionEnd();
}
/**
* Returns true if it can delete.
* @return true if it can delete
*/
private boolean canDelete()
{
return !textComponent.getText().equals("");
}
/**
* Gets the popup menu.
* @return the instance of JBasicPopupMenu
*/
public JBasicPopupMenu getPopupMenu()
{
return popupMenu;
}
/**
* Gets the copy menu item.
* @return the instance of JBasicMenuItem
*/
public JBasicMenuItem getCopyMenuItem()
{
return copyMenuItem;
}
/**
* Gets the paste menu item.
* @return the instance of JBasicMenuItem
*/
public JBasicMenuItem getPasteMenuItem()
{
return pasteMenuItem;
}
/**
* Gets the cut menu item.
* @return the instance of JBasicMenuItem
*/
public JBasicMenuItem getCutMenuItem()
{
return cutMenuItem;
}
/**
* Gets the clear menu item.
* @return the instance of JBasicMenuItem
*/
public JBasicMenuItem getClearMenuItem()
{
return clearMenuItem;
}
/**
* Gets the select all item.
* @return the instance of JBasicMenuItem
*/
public JBasicMenuItem getSelectAllMenuItem()
{
return selectAllMenuItem;
}
/**
* Invoked when the mouse button has been clicked (pressed and released) on a component.
* @param e the instance of MouseEvent
*/
public void mouseClicked(MouseEvent e)
{
}
/**
* Invoked when the mouse button has been pressed on a component.
* @param e the instance of MouseEvent
*/
public void mousePressed(MouseEvent e)
{
}
/**
* Invoked when the mouse button has been released on a component.
* @param e the instance of MouseEvent
*/
public void mouseReleased(MouseEvent e)
{
if (!e.isPopupTrigger())
{
return;
}
if (!textComponent.isEnabled())
{
return;
}
if (textComponent.isEditable())
{
copyMenuItem.setEnabled(canCopy());
pasteMenuItem.setEnabled(hasContent());
cutMenuItem.setEnabled(canCopy());
clearMenuItem.setEnabled(canDelete());
}
else
{
copyMenuItem.setEnabled(canCopy());
pasteMenuItem.setEnabled(false);
cutMenuItem.setEnabled(false);
clearMenuItem.setEnabled(false);
}
popupMenu.show(e.getComponent(), e.getX(), e.getY());
}
/**
* Invoked when the mouse enters a component.
* @param e the instance of MouseEvent
*/
public void mouseEntered(MouseEvent e)
{
}
/**
* Invoked when the mouse exits a component.
* @param e the instance of MouseEvent
*/
public void mouseExited(MouseEvent e)
{
}
} | 1.421875 | 1 |
junit5/src/test/java/org/jboss/weld/junit5/explicitInjection/ExplicitParameterInjectionViaClassAnnotationTest.java | philippkunz/weld-junit | 0 | 465 | /*
* JBoss, Home of Professional Open Source
* Copyright 2017, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.junit5.explicitInjection;
import javax.enterprise.inject.Default;
import org.jboss.weld.junit5.ExplicitParamInjection;
import org.jboss.weld.junit5.WeldJunit5Extension;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
/**
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
@ExtendWith(WeldJunit5Extension.class)
@ExplicitParamInjection
public class ExplicitParameterInjectionViaClassAnnotationTest {
@Test
@ExtendWith(CustomExtension.class)
public void testParametersNeedExtraAnnotation(@Default Foo foo, Bar bar, @MyQualifier BeanWithQualifier bean) {
// Bar should be resolved by another extension
Assertions.assertNotNull(bar);
Assertions.assertEquals(CustomExtension.class.getSimpleName(), bar.ping());
// Foo should be resolved as usual
Assertions.assertNotNull(foo);
Assertions.assertEquals(Foo.class.getSimpleName(), foo.ping());
// BeanWithQualifier should be resolved
Assertions.assertNotNull(bean);
Assertions.assertEquals(BeanWithQualifier.class.getSimpleName(), bean.ping());
}
}
| 1.242188 | 1 |
biz.aQute.launcher/src/aQute/launcher/constants/LauncherConstants.java | dhumeniuk/bnd | 0 | 473 | package aQute.launcher.constants;
import java.io.*;
import java.util.*;
public class LauncherConstants {
public final static String LAUNCHER_PROPERTIES = "launcher.properties";
public final static String DEFAULT_LAUNCHER_PROPERTIES = "launcher.properties";
public final static String LAUNCHER_ARGUMENTS = "launcher.arguments";
public final static String LAUNCHER_READY = "launcher.ready";
// MUST BE ALIGNED WITH ProjectLauncher! Donot want to create coupling
// so cannot refer.
public final static int OK = 0;
public final static int ERROR = -2;
public final static int WARNING = -1;
public final static int TIMEDOUT = -3;
public final static int UPDATE_NEEDED = -4;
public final static int CANCELED = -5;
public final static int DUPLICATE_BUNDLE = -6;
public final static int RESOLVE_ERROR = -7;
public final static int ACTIVATOR_ERROR = -8;
public static final int STOPPED = -9;
// Start custom errors from here
public final static int CUSTOM_LAUNCHER = -128;
// Local names
final static String LAUNCH_SERVICES = "launch.services";
final static String LAUNCH_STORAGE_DIR = "launch.storage.dir";
final static String LAUNCH_KEEP = "launch.keep";
final static String LAUNCH_RUNBUNDLES = "launch.bundles";
final static String LAUNCH_SYSTEMPACKAGES = "launch.system.packages";
final static String LAUNCH_SYSTEMCAPABILITIES = "launch.system.capabilities";
final static String LAUNCH_TRACE = "launch.trace";
final static String LAUNCH_TIMEOUT = "launch.timeout";
final static String LAUNCH_ACTIVATORS = "launch.activators";
final static String LAUNCH_EMBEDDED = "launch.embedded";
final static String LAUNCH_NAME = "launch.name";
final static String LAUNCH_NOREFERENCES = "launch.noreferences";
final static String LAUNCH_NOTIFICATION_PORT = "launch.notificationPort";
/**
* The command line arguments of the launcher. Launcher are not supposed to
* eat any arguments, they should use -D VM arguments so that applications
* can leverage the command line. The launcher must register itself as a
* service under its impl. class with this property set to a String[].
*/
public boolean services;
public boolean noreferences;
public File storageDir;
public boolean keep;
public final List<String> runbundles = new ArrayList<String>();
public String systemPackages;
public String systemCapabilities;
public boolean trace;
public long timeout;
public final List<String> activators = new ArrayList<String>();
public Map<String,String> runProperties = new HashMap<String,String>();
public boolean embedded = false;
public String name;
public int notificationPort = -1;
/**
* Translate a constants to properties.
*
* @return
*/
public Properties getProperties(Properties p) {
p.setProperty(LAUNCH_NOREFERENCES, noreferences + "");
p.setProperty(LAUNCH_SERVICES, services + "");
if (storageDir != null)
p.setProperty(LAUNCH_STORAGE_DIR, storageDir.getAbsolutePath());
p.setProperty(LAUNCH_KEEP, keep + "");
p.setProperty(LAUNCH_RUNBUNDLES, join(runbundles, ","));
if (systemPackages != null)
p.setProperty(LAUNCH_SYSTEMPACKAGES, systemPackages + "");
if (systemCapabilities != null)
p.setProperty(LAUNCH_SYSTEMCAPABILITIES, systemCapabilities + "");
p.setProperty(LAUNCH_TRACE, trace + "");
p.setProperty(LAUNCH_TIMEOUT, timeout + "");
p.setProperty(LAUNCH_ACTIVATORS, join(activators, ","));
p.setProperty(LAUNCH_EMBEDDED, embedded + "");
if (name != null)
p.setProperty(LAUNCH_NAME, name);
p.setProperty(LAUNCH_NOTIFICATION_PORT, String.valueOf(notificationPort));
for (Map.Entry<String,String> entry : runProperties.entrySet()) {
if (entry.getValue() == null) {
if (entry.getKey() != null)
p.remove(entry.getKey());
} else {
p.put(entry.getKey(), entry.getValue());
}
}
return p;
}
/**
* Empty constructor for the plugin
*/
public LauncherConstants() {}
/**
* Create a constants from properties.
*
* @param p
*/
public LauncherConstants(Properties p) {
services = Boolean.valueOf(p.getProperty(LAUNCH_SERVICES));
if (p.getProperty(LAUNCH_STORAGE_DIR) != null)
storageDir = new File(p.getProperty(LAUNCH_STORAGE_DIR));
noreferences = Boolean.valueOf(p.getProperty(LAUNCH_NOREFERENCES));
keep = Boolean.valueOf(p.getProperty(LAUNCH_KEEP));
runbundles.addAll(split(p.getProperty(LAUNCH_RUNBUNDLES), ","));
systemPackages = p.getProperty(LAUNCH_SYSTEMPACKAGES);
systemCapabilities = p.getProperty(LAUNCH_SYSTEMCAPABILITIES);
trace = Boolean.valueOf(p.getProperty(LAUNCH_TRACE));
timeout = Long.parseLong(p.getProperty(LAUNCH_TIMEOUT));
activators.addAll(split(p.getProperty(LAUNCH_ACTIVATORS), " ,"));
String s = p.getProperty(LAUNCH_EMBEDDED);
embedded = s != null && Boolean.parseBoolean(s);
name = p.getProperty(LAUNCH_NAME);
notificationPort = Integer.valueOf(p.getProperty(LAUNCH_NOTIFICATION_PORT, "-1"));
@SuppressWarnings("unchecked")
Map<String,String> map = (Map) p;
runProperties.putAll(map);
}
private Collection< ? extends String> split(String property, String string) {
List<String> result = new ArrayList<String>();
StringTokenizer st = new StringTokenizer(property, string);
while (st.hasMoreTokens()) {
result.add(st.nextToken());
}
return result;
}
private static String join(List< ? > runbundles2, String string) {
StringBuilder sb = new StringBuilder();
String del = "";
for (Object r : runbundles2) {
sb.append(del);
sb.append(r);
del = string;
}
return sb.toString();
}
}
| 1.09375 | 1 |
plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageHttpStatsCollector.java | dial-workable/elasticsearch | 0 | 481 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.gcs;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpResponseInterceptor;
import org.elasticsearch.common.collect.List;
import java.util.Locale;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static java.lang.String.format;
final class GoogleCloudStorageHttpStatsCollector implements HttpResponseInterceptor {
// The specification for the current API (v1) endpoints can be found at:
// https://cloud.google.com/storage/docs/json_api/v1
private static final java.util.List<Function<String, HttpRequestTracker>> trackerFactories =
List.of(
(bucket) ->
HttpRequestTracker.get(format(Locale.ROOT, "/download/storage/v1/b/%s/o/.+", bucket),
GoogleCloudStorageOperationsStats::trackGetOperation),
(bucket) ->
HttpRequestTracker.get(format(Locale.ROOT, "/storage/v1/b/%s/o/.+", bucket),
GoogleCloudStorageOperationsStats::trackGetOperation),
(bucket) ->
HttpRequestTracker.get(format(Locale.ROOT, "/storage/v1/b/%s/o", bucket),
GoogleCloudStorageOperationsStats::trackListOperation)
);
private final GoogleCloudStorageOperationsStats gcsOperationStats;
private final java.util.List<HttpRequestTracker> trackers;
GoogleCloudStorageHttpStatsCollector(final GoogleCloudStorageOperationsStats gcsOperationStats) {
this.gcsOperationStats = gcsOperationStats;
this.trackers = trackerFactories.stream()
.map(trackerFactory -> trackerFactory.apply(gcsOperationStats.getTrackedBucket()))
.collect(Collectors.toList());
}
@Override
public void interceptResponse(final HttpResponse response) {
// TODO keep track of unsuccessful requests in different entries
if (!response.isSuccessStatusCode())
return;
final HttpRequest request = response.getRequest();
for (HttpRequestTracker tracker : trackers) {
if (tracker.track(request, gcsOperationStats)) {
return;
}
}
}
/**
* Http request tracker that allows to track certain HTTP requests based on the following criteria:
* <ul>
* <li>The HTTP request method</li>
* <li>An URI path regex expression</li>
* </ul>
*
* The requests that match the previous criteria are tracked using the {@code statsTracker} function.
*/
private static final class HttpRequestTracker {
private final String method;
private final Pattern pathPattern;
private final Consumer<GoogleCloudStorageOperationsStats> statsTracker;
private HttpRequestTracker(final String method,
final String pathPattern,
final Consumer<GoogleCloudStorageOperationsStats> statsTracker) {
this.method = method;
this.pathPattern = Pattern.compile(pathPattern);
this.statsTracker = statsTracker;
}
private static HttpRequestTracker get(final String pathPattern,
final Consumer<GoogleCloudStorageOperationsStats> statsConsumer) {
return new HttpRequestTracker("GET", pathPattern, statsConsumer);
}
/**
* Tracks the provided http request if it matches the criteria defined by this tracker.
*
* @param httpRequest the http request to be tracked
* @param stats the operation tracker
*
* @return {@code true} if the http request was tracked, {@code false} otherwise.
*/
private boolean track(final HttpRequest httpRequest, final GoogleCloudStorageOperationsStats stats) {
if (matchesCriteria(httpRequest) == false)
return false;
statsTracker.accept(stats);
return true;
}
private boolean matchesCriteria(final HttpRequest httpRequest) {
return method.equalsIgnoreCase(httpRequest.getRequestMethod()) &&
pathMatches(httpRequest.getUrl());
}
private boolean pathMatches(final GenericUrl url) {
return pathPattern.matcher(url.getRawPath()).matches();
}
}
}
| 1.25 | 1 |
modules/web-widgets/src/com/haulmont/cuba/web/widgets/CubaCssActionsLayout.java | mitring/cuba | 1,337 | 489 | /*
* Copyright (c) 2008-2017 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.web.widgets;
import com.haulmont.cuba.web.widgets.client.cssactionslayout.CubaCssActionsLayoutState;
import com.vaadin.event.Action;
import com.vaadin.event.ActionManager;
import com.vaadin.event.ShortcutListener;
import com.vaadin.server.PaintException;
import com.vaadin.server.PaintTarget;
import com.vaadin.shared.Registration;
import com.vaadin.shared.ui.MarginInfo;
import com.vaadin.ui.CssLayout;
import com.vaadin.ui.Layout;
import com.vaadin.ui.LegacyComponent;
import java.util.Map;
/**
* CssLayout with separate action manager for shortcuts
*/
public class CubaCssActionsLayout extends CssLayout implements Action.Container, LegacyComponent,
Layout.MarginHandler, Layout.SpacingHandler {
protected ActionManager actionManager;
@Override
protected CubaCssActionsLayoutState getState() {
return (CubaCssActionsLayoutState) super.getState();
}
@Override
public void addActionHandler(Action.Handler actionHandler) {
getActionManager().addActionHandler(actionHandler);
markAsDirty();
}
@Override
public void removeActionHandler(Action.Handler actionHandler) {
if (actionManager != null) {
actionManager.removeActionHandler(actionHandler);
markAsDirty();
}
}
@Override
public Registration addShortcutListener(ShortcutListener listener) {
getActionManager().addAction(listener);
return () -> getActionManager().removeAction(listener);
}
@Override
public void removeShortcutListener(ShortcutListener listener) {
getActionManager().removeAction(listener);
}
@Override
protected ActionManager getActionManager() {
if (actionManager == null) {
actionManager = new ActionManager(this);
}
return actionManager;
}
@Override
public void paintContent(PaintTarget target) throws PaintException {
if (actionManager != null) {
actionManager.paintActions(null, target);
}
}
@Override
public void changeVariables(Object source, Map<String, Object> variables) {
if (actionManager != null) {
actionManager.handleActions(variables, this);
}
}
@Override
public void setMargin(boolean enabled) {
setMargin(new MarginInfo(enabled));
}
@Override
public void setMargin(MarginInfo marginInfo) {
getState().marginsBitmask = marginInfo.getBitMask();
}
@Override
public MarginInfo getMargin() {
return new MarginInfo(getState().marginsBitmask);
}
@Override
public void setSpacing(boolean enabled) {
getState().spacing = enabled;
}
@Override
public boolean isSpacing() {
return getState().spacing;
}
} | 1.421875 | 1 |
src/main/java/kaptainwutax/seedcrackerX/finder/BlockUpdateQueue.java | Crsuh2er0/SeedcrackerX | 0 | 497 | package kaptainwutax.seedcrackerX.finder;
import net.minecraft.client.MinecraftClient;
import net.minecraft.network.packet.c2s.play.PlayerActionC2SPacket;
import net.minecraft.util.Pair;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.Direction;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Queue;
public class BlockUpdateQueue {
private final Queue<Pair<Thread, ArrayList<BlockPos>>> blocksAndAction = new LinkedList<>();
private final HashSet<BlockPos> alreadyChecked = new HashSet<>();
public boolean add(ArrayList<BlockPos> blockPoses, BlockPos originPos, Thread operationAtEnd) {
if (alreadyChecked.add(originPos)) {
blocksAndAction.add(new Pair<>(operationAtEnd, blockPoses));
return true;
}
return false;
}
public void tick() {
if (blocksAndAction.isEmpty()) return;
Pair<Thread, ArrayList<BlockPos>> current = blocksAndAction.peek();
ArrayList<BlockPos> currentBlocks = current.getRight();
for (int i = 0; i < 5; i++) {
if (currentBlocks.isEmpty()) {
current.getLeft().start();
blocksAndAction.remove();
if (blocksAndAction.isEmpty()) {
return;
} else {
current = blocksAndAction.peek();
currentBlocks = current.getRight();
}
}
if (MinecraftClient.getInstance().getNetworkHandler() == null) {
blocksAndAction.clear();
return;
}
PlayerActionC2SPacket p = new PlayerActionC2SPacket(PlayerActionC2SPacket.Action.ABORT_DESTROY_BLOCK, currentBlocks.remove(0),
Direction.DOWN);
MinecraftClient.getInstance().getNetworkHandler().sendPacket(p);
}
}
}
| 1.710938 | 2 |
src/roadgraph/DijkstraGrader.java | MaximDzhezhelo/UCSDGraphs | 73 | 505 | package roadgraph;
import geography.GeographicPoint;
import util.GraphLoader;
import java.util.List;
/**
* @author UCSD MOOC Development Team
* Grader for Module 4, Part 1.
*/
public class DijkstraGrader implements Runnable {
public String feedback;
public int correct;
private static final int TESTS = 4;
/** Format readable feedback */
public static String printOutput(double score, String feedback) {
return "Score: " + score + "\nFeedback: " + feedback;
}
/** Format test number and description */
public static String appendFeedback(int num, String test) {
return "\n** Test #" + num + ": " + test + "...";
}
public static void main(String[] args) {
DijkstraGrader grader = new DijkstraGrader();
// Infinite loop detection
Thread thread = new Thread(grader);
thread.start();
long endTime = System.currentTimeMillis() + 10000;
boolean infinite = false;
while(thread.isAlive()) {
// Stop thread after 10 seconds
if (System.currentTimeMillis() > endTime) {
thread.stop();
infinite = true;
break;
}
}
if (infinite) {
System.out.println(printOutput((double)grader.correct / TESTS, grader.feedback + "\nYour program entered an infinite loop."));
}
}
/** Run a test case on an adjacency list and adjacency matrix.
* @param i The graph number
* @param file The file to read from
* @param desc A description of the graph
* @param start The point to start from
* @param end The point to end at
*/
public void runTest(int i, String file, String desc, GeographicPoint start, GeographicPoint end) {
MapGraph graph = new MapGraph();
feedback += "\n\n" + desc;
GraphLoader.loadRoadMap("data/graders/mod3/" + file, graph);
CorrectAnswer corr = new CorrectAnswer("data/graders/mod3/" + file + ".answer", false);
judge(i, graph, corr, start, end);
}
/** Compare the user's result with the right answer.
* @param i The graph number
* @param result The user's graph
* @param corr The correct answer
* @param start The point to start from
* @param end The point to end at
*/
public void judge(int i, MapGraph result, CorrectAnswer corr, GeographicPoint start, GeographicPoint end) {
// Correct if paths are same length and have the same elements
feedback += appendFeedback(i, "Running Dijkstra's algorithm from (" + start.getX() + ", " + start.getY() + ") to (" + end.getX() + ", " + end.getY() + ")");
List<GeographicPoint> path = result.dijkstra(start, end);
if (path == null) {
if (corr.path == null) {
feedback += "PASSED.";
correct++;
} else {
feedback += "FAILED. Your implementation returned null; expected \n" + printPath(corr.path) + ".";
}
} else if (path.size() != corr.path.size() || !corr.path.containsAll(path)) {
feedback += "FAILED. Expected: \n" + printPath(corr.path) + "Got: \n" + printPath(path);
if (path.size() != corr.path.size()) {
feedback += "Your result has size " + path.size() + "; expected " + corr.path.size() + ".";
} else {
feedback += "Correct size, but incorrect path.";
}
} else {
feedback += "PASSED.";
correct++;
}
}
/** Print a search path in readable form */
public String printPath(List<GeographicPoint> path) {
String ret = "";
for (GeographicPoint point : path) {
ret += point + "\n";
}
return ret;
}
/** Run the grader */
public void run() {
feedback = "";
correct = 0;
try {
runTest(1, "map1.txt", "MAP: Straight line (-3 <- -2 <- -1 <- 0 -> 1 -> 2-> 3 ->...)", new GeographicPoint(0, 0), new GeographicPoint(6, 6));
runTest(2, "map2.txt", "MAP: Example map from the writeup", new GeographicPoint(7, 3), new GeographicPoint(4, -1));
runTest(3, "map3.txt", "MAP: Right triangle (with a little detour)", new GeographicPoint(0, 0), new GeographicPoint(0, 4));
runTest(4, "ucsd.map", "UCSD MAP: Intersections around UCSD", new GeographicPoint(32.8709815, -117.2434254), new GeographicPoint(32.8742087, -117.2381344));
if (correct == TESTS)
feedback = "All tests passed. Great job!" + feedback;
else
feedback = "Some tests failed. Check your code for errors, then try again:" + feedback;
} catch (Exception e) {
feedback += "\nError during runtime: " + e;
e.printStackTrace();
}
System.out.println(printOutput((double)correct / TESTS, feedback));
}
}
| 2.296875 | 2 |
0.4.2.1/JFP-Framework-Core/src/main/java/org/isotope/jfp/framework/beans/user/LoginerBean.java | qq744788292/cnsoft | 0 | 513 | package org.isotope.jfp.framework.beans.user;
/**
* 用户登录信息
*
* @author Spook
* @version 2.0.1 2015/07/07
* @version 1.1.0 2014/12/15
* @version 0.1.0 2014/05/30
* @since 0.1.0 2014/5/30
*/
public class LoginerBean extends TokenBean {
/**
* 账户
*/
private String account;
/**
* 密码
*/
private String passWord;
/**
* 安全码
*/
private String securityCode;
/**
* 登录来源类别(0APP1网页2手机3微信4支付宝)
*/
private String clientType;
/**
* 登录页面访问地址
*/
private String loginUrl;
/**
* 回调地址
*/
private String callBackUrl;
/**
* 第三方登录ID
*
* @return
*/
private String openId;
/**
* 登录账户类别(0游客1普通用户2vip用户)
*/
private String accountType;
/**
* 登陆结果<br>
* 0:成功 1:密码错误 2:用户不存在 3:二次登录8:用户类型未知 9:用户异常锁定
*/
private String loginStatus;
public String getAccount() {
return account;
}
public void setAccount(String account) {
this.account = account;
}
public String getPassWord() {
return passWord;
}
public void setPassWord(String passWord) {
this.passWord = passWord;
}
public String getSecurityCode() {
return securityCode;
}
public void setSecurityCode(String securityCode) {
this.securityCode = securityCode;
}
public String getClientType() {
return clientType;
}
public void setClientType(String clientType) {
this.clientType = clientType;
}
public String getLoginUrl() {
return loginUrl;
}
public void setLoginUrl(String loginUrl) {
this.loginUrl = loginUrl;
}
public String getCallBackUrl() {
return callBackUrl;
}
public void setCallBackUrl(String callBackUrl) {
this.callBackUrl = callBackUrl;
}
public String getOpenId() {
return openId;
}
public void setOpenId(String openId) {
this.openId = openId;
}
public String getAccountType() {
return accountType;
}
public void setAccountType(String accountType) {
this.accountType = accountType;
}
public String getLoginStatus() {
return loginStatus;
}
public void setLoginStatus(String loginStatus) {
this.loginStatus = loginStatus;
}
}
| 1.367188 | 1 |
oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/LocalDiffCacheTest.java | rafiyasirin/jackrabbit-oak | 288 | 521 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.HashMap;
import java.util.Map;
import com.google.common.collect.Maps;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.plugins.document.LocalDiffCache.Diff;
import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class LocalDiffCacheTest {
DocumentNodeStore store;
@After
public void dispose() {
if (store != null) {
store.dispose();
store = null;
}
}
@Test
public void simpleDiff() throws Exception{
TestNodeObserver o = new TestNodeObserver("/");
store = createMK().getNodeStore();
store.addObserver(o);
o.reset();
DiffCache cache = store.getDiffCache();
Iterable<CacheStats> stats = cache.getStats();
NodeBuilder builder = store.getRoot().builder();
builder.child("a").child("a2").setProperty("foo", "bar");
builder.child("b");
merge(store, builder);
assertTrue(getHitCount(stats) > 0);
assertEquals(0, getMissCount(stats));
assertEquals(3, o.added.size());
builder = store.getRoot().builder();
builder.child("a").child("a2").removeProperty("foo");
o.reset();
resetStats(stats);
merge(store, builder);
assertTrue(getHitCount(stats) > 0);
assertEquals(0, getMissCount(stats));
assertEquals(1, o.changed.size());
}
@Test
public void diffFromAsString() {
Map<Path, String> changes = Maps.newHashMap();
changes.put(Path.ROOT, "+\"foo\":{}^\"bar\":{}-\"baz\"");
changes.put(Path.fromString("/foo"), "");
changes.put(Path.fromString("/bar"), "+\"qux\"");
changes.put(Path.fromString("/bar/qux"), "");
Diff diff = new Diff(changes, 0);
assertEquals(changes, Diff.fromString(diff.asString()).getChanges());
}
@Test
public void emptyDiff() throws Exception{
Map<Path, String> changes = new HashMap<>();
Diff diff = new Diff(changes, 100);
String asString = diff.asString();
Diff diff2 = Diff.fromString(asString);
assertEquals(diff, diff2);
}
private static DocumentNodeState merge(NodeStore store, NodeBuilder builder)
throws CommitFailedException {
return (DocumentNodeState) store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private static DocumentMK createMK(){
return create(new MemoryDocumentStore(), 0);
}
private static DocumentMK create(DocumentStore ds, int clusterId){
return new DocumentMK.Builder()
.setAsyncDelay(0)
.setDocumentStore(ds)
.setClusterId(clusterId)
.setPersistentCache("target/persistentCache,time")
.open();
}
private static long getHitCount(Iterable<CacheStats> stats) {
long hitCount = 0;
for (CacheStats cs : stats) {
hitCount += cs.getHitCount();
}
return hitCount;
}
private static long getMissCount(Iterable<CacheStats> stats) {
long missCount = 0;
for (CacheStats cs : stats) {
missCount += cs.getMissCount();
}
return missCount;
}
private static void resetStats(Iterable<CacheStats> stats) {
for (CacheStats cs : stats) {
cs.resetStats();
}
}
}
| 1.226563 | 1 |
src/io/samsungsami/android/SamiStack.java | ianychoi/sami-android-demo | 0 | 529 | package io.samsungsami.android;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
public class SamiStack {
/**
* Default stack configuration
*/
private String clientId = "";
private String redirectUri = "";
private String connectorsUrl = "https://api.samsungsami.io/v1.1";
private String accountsUrl = "https://accounts.samsungsami.io";
private String gaiaUrl= "https://api.samsungsami.io/v1.1";
private String chronosUrl= "https://api.samsungsami.io/v1.1";
private String websocketUrl = "wss://api.samsungsami.io/v1.1";
private String echoUrl = "wss://api.samsungsami.io/v1.1";
// ---------------------------------------------------------------
private String ACCOUNTS_QUERY_STRING_WEB_LOGIN = "";
private String ACCOUNTS_QUERY_STRING_HIDDEN_LOGOUT= "";
/**
* Creates an object that represents the SAMI stack
* This constructor is PROD-only
* @param clientId
* @param redirectUri
*/
public SamiStack(String clientId, String redirectUri){
this.clientId = clientId;
this.redirectUri = redirectUri;
init();
}
/**
* Creates an object that represents the SAMI stack
* You need to use this constructor on LOCALHOST because Connectors,
* Chronos, Echo and Gaia run on diferent ports
* @param clientId unique app id for SAMI Accounts service
* @param redirectUri URL redirection after login for this application, as specified in SAMI server
* @param connectorsUrl the URL to the API, example https://someserver/v1.1
* @param accountsUrl the URL to access Accounts web service
* @param gaiaUrl the base URL to access the internal API.
* @param chronosUrl the base URL to access the Chronos API.
* @param websocketUrl the Connectors base URL for websocket connections
* @param echoUrl the Echo base URL for websocket connections.
*/
public SamiStack(String clientId,
String redirectUri,
String connectorsUrl,
String accountsUrl,
String gaiaUrl,
String chronosUrl,
String websocketUrl,
String echoUrl){
this.clientId = clientId;
this.redirectUri = redirectUri;
this.connectorsUrl = connectorsUrl;
this.accountsUrl = accountsUrl;
this.websocketUrl = websocketUrl;
this.gaiaUrl = gaiaUrl;
this.chronosUrl = chronosUrl;
this.echoUrl = echoUrl;
init();
}
/**
* Sets internal URLs
*/
void init(){
ACCOUNTS_QUERY_STRING_WEB_LOGIN = "/authorize?response_type=token&client_id="+clientId+"&client=mobile";
try {
ACCOUNTS_QUERY_STRING_HIDDEN_LOGOUT = "/logout?redirect_uri="+URLEncoder.encode(redirectUri, "UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
public String getLoginUrl() {
return ACCOUNTS_QUERY_STRING_WEB_LOGIN;
}
public String getLogoutUrl(){
return ACCOUNTS_QUERY_STRING_HIDDEN_LOGOUT;
}
public String getClientId() {
return clientId;
}
public String getRedirectUri() {
return redirectUri;
}
public String getConnectorsUrl() {
return connectorsUrl;
}
/**
* Use this getter if you don't need to connect to localhost
* @return
*/
public String getUrl() {
return gaiaUrl;
}
public String getGaiaUrl() {
return gaiaUrl;
}
public String getAccountsUrl() {
return accountsUrl;
}
public String getWebsocketUrl() {
return websocketUrl;
}
public String getChronosUrl() {
return chronosUrl;
}
public String getLiveUrl() {
return echoUrl;
}
}
| 1.210938 | 1 |
src/test/java/org/oddjob/arooa/convert/convertlets/FloatConvertletsTest.java | robjg/arooa | 0 | 537 | /*
* (c) <NAME> 2006
*/
package org.oddjob.arooa.convert.convertlets;
import org.junit.Test;
import java.math.BigDecimal;
import org.junit.Assert;
import org.oddjob.arooa.convert.ConversionFailedException;
import org.oddjob.arooa.convert.ConversionPath;
import org.oddjob.arooa.convert.DefaultConversionRegistry;
public class FloatConvertletsTest extends Assert {
@Test
public void testNumberToFloat() throws ConversionFailedException {
DefaultConversionRegistry registry = new DefaultConversionRegistry();
new FloatConvertlets().registerWith(registry);
ConversionPath<Number, Float> path = registry.findConversion(
Number.class, Float.class);
Float result = path.convert(new BigDecimal(42.24), null);
assertEquals(42.24, result, 0.001);
}
@Test
public void testStringToFloat() throws ConversionFailedException {
DefaultConversionRegistry registry = new DefaultConversionRegistry();
new FloatConvertlets().registerWith(registry);
ConversionPath<String, Float> path = registry.findConversion(
String.class, Float.class);
Float result = path.convert("42.24", null);
assertEquals(42.24, result, 0.001);
}
}
| 1.226563 | 1 |
src/main/java/fr/voltariuss/diagonia/model/entity/RankChallengeProgression.java | Voltariuss/diagonia-plugin | 0 | 545 | /*
* Copyright (c) 2022 - <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.voltariuss.diagonia.model.entity;
import fr.voltariuss.diagonia.model.entity.converter.UUIDConverter;
import java.util.UUID;
import javax.persistence.Column;
import javax.persistence.Convert;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.bukkit.Material;
@Entity
@Table(name = "diagonia_rankup_challenge_progression")
@ToString
@Getter
@NoArgsConstructor(access = AccessLevel.PROTECTED)
@RequiredArgsConstructor
public class RankChallengeProgression {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "rankup_challenge_id", nullable = false, updatable = false)
private long id;
@Column(name = "rankup_player_uuid", nullable = false, updatable = false)
@Convert(converter = UUIDConverter.class)
@Setter
@NonNull
private UUID playerUuid;
@Column(name = "rankup_rank_id", nullable = false, updatable = false)
@Setter
@NonNull
private String rankId;
@Column(name = "rankup_challenge_material", nullable = false, updatable = false)
@Enumerated(EnumType.STRING)
@Setter
@NonNull
private Material challengeMaterial;
@Column(name = "rankup_challenge_amount_given", nullable = false)
@Setter
private int challengeAmountGiven;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof RankChallengeProgression that)) {
return false;
}
return new EqualsBuilder()
.append(playerUuid, that.playerUuid)
.append(rankId, that.rankId)
.append(challengeMaterial, that.challengeMaterial)
.append(challengeAmountGiven, that.challengeAmountGiven)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(playerUuid)
.append(rankId)
.append(challengeMaterial)
.append(challengeAmountGiven)
.toHashCode();
}
}
| 1.242188 | 1 |
src/main/java/net/dzikoysk/netkit/NetkitPage.java | dzikoysk/NetView | 4 | 553 | package net.dzikoysk.netkit;
import net.dzikoysk.netkit.listener.LoadListener;
import org.apache.commons.lang.StringEscapeUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicLong;
public class NetkitPage {
private static final AtomicLong idAssigner = new AtomicLong();
private final long id;
private final Netkit netkit;
private final Collection<LoadListener> loadListeners;
public NetkitPage(Netkit netkit) {
this.id = idAssigner.getAndIncrement();
this.netkit = netkit;
this.loadListeners = new ArrayList<>(1);
}
public void loadURL(String url) {
netkit.executeScript("Netkit.loadURL(" + id + ",'" + url + "');");
}
public void loadContent(String content) {
netkit.executeScript("Netkit.loadContent(" + id + ", '" + StringEscapeUtils.escapeHtml(content) + "');");
}
public void addJavascriptInterface(String interfaceName, Object gateway) {
String temp = getId() + "-" + interfaceName;
netkit.addJavascriptInterface(temp, gateway);
netkit.executeScript("Netkit.setPageJavascriptInterface(" + temp + ", '" + interfaceName + "');");
}
public void executeScript(String script) {
netkit.executeScript(script);
}
public void addLoadListener(LoadListener listener) {
this.loadListeners.add(listener);
}
protected Collection<LoadListener> getLoadListeners() {
return loadListeners;
}
public long getId() {
return id;
}
}
| 1.265625 | 1 |
src/test/java/org/jbpm/migration/scenarios/SingleTransitionWithActionTest.java | kiegroup/jbpmmigration | 3 | 561 | package org.jbpm.migration.scenarios;
import static org.jbpm.migration.tools.listeners.TrackingListenerAssert.assertProcessCompleted;
import static org.jbpm.migration.tools.listeners.TrackingListenerAssert.assertProcessStarted;
import static org.jbpm.migration.tools.listeners.TrackingListenerAssert.assertTriggeredAndLeft;
import org.jbpm.migration.JbpmMigrationRuntimeTest;
import org.jbpm.migration.tools.bpmn2.JavaNodeHandler;
import org.jbpm.migration.tools.jpdl.JpdlAssert;
import org.jbpm.migration.tools.jpdl.handlers.DefaultActionHandler;
import org.jbpm.migration.tools.jpdl.listeners.TrackingActionListener;
import org.jbpm.migration.tools.listeners.TrackingProcessEventListener;
import org.jbpm.graph.exe.ProcessInstance;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests migration of a node with a transition defining an action. The
* transition should be migrated into a sequenceFlow -> scriptTask ->
* sequenceFlow, and the action executed.
*/
public class SingleTransitionWithActionTest extends JbpmMigrationRuntimeTest {
public static final String definition =
"org/jbpm/migration/scenarios/singleTransitionWithAction/processdefinition.xml";
public static final String processId = "SingleTransitionWithAction_Process";
@BeforeClass
public static void getTestReady() {
prepareProcess(definition);
}
@Test
public void testJpdl() {
ProcessInstance pi = processDef.createProcessInstance();
TrackingActionListener listener = new TrackingActionListener();
DefaultActionHandler.setTrackingListener(listener);
DefaultActionHandler.setSignalization(false);
pi.signal();
JpdlAssert.assertProcessStarted(pi);
listener.wasCalledOnNode("node");
JpdlAssert.assertProcessCompleted(pi);
}
@Test
public void testBpmn() {
ksession = kbase.newKieSession();
TrackingProcessEventListener listener = new TrackingProcessEventListener();
ksession.addEventListener(listener);
JavaNodeHandler javaNodeHandler = new JavaNodeHandler();
ksession.getWorkItemManager().registerWorkItemHandler("JavaNode", javaNodeHandler);
ksession.startProcess(processId);
assertProcessStarted(listener, processId);
assertTriggeredAndLeft(listener, "node");
assertProcessCompleted(listener, processId);
}
}
| 1.242188 | 1 |
application/src/test/java/org/mifos/accounts/business/AccountStateMachineIntegrationTest.java | sureshkrishnamoorthy/suresh-mifos | 7 | 569 | /*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.business;
import java.util.List;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mifos.accounts.business.service.AccountBusinessService;
import org.mifos.accounts.util.helpers.AccountState;
import org.mifos.accounts.util.helpers.AccountStateFlag;
import org.mifos.accounts.util.helpers.AccountTypes;
import org.mifos.framework.MifosIntegrationTestCase;
import org.mifos.framework.business.service.ServiceFactory;
import org.mifos.framework.exceptions.ServiceUnavailableException;
import org.mifos.framework.exceptions.StatesInitializationException;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
public class AccountStateMachineIntegrationTest extends MifosIntegrationTestCase {
private AccountBusinessService service;
@Before
public void setUp() throws Exception {
service = new AccountBusinessService();
}
@After
public void tearDown() throws Exception {
StaticHibernateUtil.getSessionTL().clear();
}
@Test
public void testGetStatusList() throws Exception {
AccountStateMachines.getInstance().initialize(AccountTypes.LOAN_ACCOUNT, null);
List<AccountStateEntity> stateList = service.getStatusList(new AccountStateEntity(
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING), AccountTypes.LOAN_ACCOUNT, Short.valueOf("1"));
Assert.assertEquals(2, stateList.size());
}
@Test
public void testGetStatusName() throws Exception {
AccountStateMachines.getInstance().initialize(AccountTypes.LOAN_ACCOUNT, null);
Assert.assertNotNull(service.getStatusName(AccountState.LOAN_CLOSED_RESCHEDULED, AccountTypes.LOAN_ACCOUNT));
}
@Test
public void testGetFlagName() throws Exception {
AccountStateMachines.getInstance().initialize(AccountTypes.LOAN_ACCOUNT, null);
Assert.assertNotNull(service.getFlagName(AccountStateFlag.LOAN_WITHDRAW, AccountTypes.LOAN_ACCOUNT));
}
@Test @Ignore("Convert to unit test")
public void testStatesInitializationException() throws Exception {
try {
AccountStateMachines.getInstance().initialize(AccountTypes.LOAN_ACCOUNT, null);
Assert.fail();
} catch (StatesInitializationException sie) {
} finally {
StaticHibernateUtil.flushSession();
}
}
@Test
public void testServiceUnavailableException() throws Exception {
try {
service = (AccountBusinessService) ServiceFactory.getInstance().getBusinessService(null);
Assert.fail();
} catch (ServiceUnavailableException sue) {
}
}
@Test
public void testFlagForLoanCancelState() throws Exception {
AccountStateMachines.getInstance().initialize(AccountTypes.LOAN_ACCOUNT, null);
List<AccountStateEntity> stateList = service.getStatusList(new AccountStateEntity(
AccountState.LOAN_PARTIAL_APPLICATION), AccountTypes.LOAN_ACCOUNT, Short.valueOf("1"));
for (AccountStateEntity accountState : stateList) {
if (accountState.getId().equals(AccountState.LOAN_CANCELLED.getValue())) {
Assert.assertEquals(3, accountState.getFlagSet().size());
for (AccountStateFlagEntity accountStateFlag : accountState.getFlagSet()) {
if (accountStateFlag.getId().equals(AccountStateFlag.LOAN_REVERSAL.getValue())) {
Assert.fail();
}
}
}
}
}
}
| 1.15625 | 1 |
eclipse/RipplePower/src/org/ripple/power/txns/btc/MemoryLogHandler.java | cping/RipplePower | 40 | 577 | package org.ripple.power.txns.btc;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogManager;
import java.util.logging.LogRecord;
public class MemoryLogHandler extends Handler {
/** Default ring buffer size */
private static final int DEFAULT_SIZE = 100;
/** Level OFF value */
private static final int OFF_VALUE = Level.OFF.intValue();
/** Ring buffer */
private final LogRecord[] buffer;
/** Buffer start */
private int start = 0;
/** Number of buffer entries */
private int count = 0;
/** Publish level */
private Level level;
/**
* Create a MemoryLogHandler and configure it based on LogManager properties
*/
public MemoryLogHandler() {
LogManager manager = LogManager.getLogManager();
String cname = getClass().getName();
String value;
//
// Allocate the ring buffer
//
int bufferSize;
try {
value = manager.getProperty(cname + ".size");
if (value != null)
bufferSize = Math.max(Integer.valueOf(value.trim()), 10);
else
bufferSize = DEFAULT_SIZE;
} catch (NumberFormatException exc) {
bufferSize = DEFAULT_SIZE;
}
buffer = new LogRecord[bufferSize];
//
// Get publish level
//
try {
value = manager.getProperty(cname + ".level");
if (value != null) {
level = Level.parse(value.trim());
} else {
level = Level.ALL;
}
} catch (IllegalArgumentException exc) {
level = Level.ALL;
}
}
/**
* Store a LogRecord in the ring buffer
*
* @param record
* Description of the log event. A null record is silently
* ignored and is not published
*/
@Override
public void publish(LogRecord record) {
if (record != null && record.getLevel().intValue() >= level.intValue() && level.intValue() != OFF_VALUE) {
synchronized (buffer) {
int ix = (start + count) % buffer.length;
buffer[ix] = record;
if (count < buffer.length) {
count++;
} else {
start++;
start %= buffer.length;
}
}
}
}
/**
* Return the log messages from the ring buffer
*
* @return List of log messages
*/
public List<String> getMessages() {
List<String> rtnList = new ArrayList<>(buffer.length);
synchronized (buffer) {
int pos = start;
Formatter formatter = getFormatter();
for (int i = 0; i < count; i++) {
rtnList.add(formatter.format(buffer[pos++]));
if (pos == buffer.length)
pos = 0;
}
}
return rtnList;
}
/**
* Flush the ring buffer
*/
@Override
public void flush() {
synchronized (buffer) {
start = 0;
count = 0;
}
}
/**
* Close the handler
*/
@Override
public void close() {
level = Level.OFF;
}
}
| 1.492188 | 1 |
src/main/java/com/equalize/converter/core/util/ConversionExcelOutput.java | engswee/equalize-cpi-converter | 10 | 585 | package com.equalize.converter.core.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
public class ConversionExcelOutput {
private final Workbook workbook;
public ConversionExcelOutput(String excelFormat) {
if (excelFormat.equals("xls"))
this.workbook = new HSSFWorkbook();
else
this.workbook = new XSSFWorkbook();
}
public byte[] generateOutput(String sheetName, List<Field> contents) throws IOException {
Sheet sheet = this.workbook.createSheet(sheetName);
// Generate rows and cells - for simple structure only
for (int i = 0; i < contents.size(); i++) {
@SuppressWarnings("unchecked")
List<Field> rowContents = (List<Field>) contents.get(i).fieldContent;
Row sheetRow = sheet.createRow(i);
for (int j = 0; j < rowContents.size(); j++) {
Cell cell = sheetRow.createCell(j);
cell.setCellType(CellType.STRING);
cell.setCellValue((String) rowContents.get(j).fieldContent);
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
this.workbook.write(baos);
baos.close();
return baos.toByteArray();
}
}
| 1.4375 | 1 |
guice/core/test/com/google/inject/BindingTest.java | fcopardo/roboguice | 1,919 | 593 | /*
* Copyright (C) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject;
import static com.google.inject.Asserts.assertContains;
import static com.google.inject.name.Names.named;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Runnables;
import com.google.inject.matcher.Matchers;
import com.google.inject.spi.InjectionPoint;
import com.google.inject.spi.TypeEncounter;
import com.google.inject.spi.TypeListener;
import junit.framework.TestCase;
/*if[AOP]*/
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
/*end[AOP]*/
import java.lang.reflect.Constructor;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Logger;
/**
* @author <EMAIL> (<NAME>)
*/
public class BindingTest extends TestCase {
static class Dependent {
@Inject A a;
@Inject Dependent(A a, B b) {}
@Inject void injectBob(Bob bob) {}
}
public void testExplicitCyclicDependency() {
Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(A.class);
bind(B.class);
}
}).getInstance(A.class);
}
static class A { @Inject B b; }
static class B { @Inject A a; }
static class Bob {}
static class MyModule extends AbstractModule {
protected void configure() {
// Linked.
bind(Object.class).to(Runnable.class).in(Scopes.SINGLETON);
// Instance.
bind(Runnable.class).toInstance(Runnables.doNothing());
// Provider instance.
bind(Foo.class).toProvider(new Provider<Foo>() {
public Foo get() {
return new Foo();
}
}).in(Scopes.SINGLETON);
// Provider.
bind(Foo.class)
.annotatedWith(named("provider"))
.toProvider(FooProvider.class);
// Class.
bind(Bar.class).in(Scopes.SINGLETON);
// Constant.
bindConstant().annotatedWith(named("name")).to("Bob");
}
}
static class Foo {}
public static class FooProvider implements Provider<Foo> {
public Foo get() {
throw new UnsupportedOperationException();
}
}
public static class Bar {}
public void testBindToUnboundLinkedBinding() {
try {
Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Collection.class).to(List.class);
}
});
fail();
} catch (CreationException expected) {
assertContains(expected.getMessage(), "No implementation for java.util.List was bound.");
}
}
/**
* This test ensures that the asEagerSingleton() scoping applies to the key,
* not to what the key is linked to.
*/
public void testScopeIsAppliedToKeyNotTarget() {
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Integer.class).toProvider(Counter.class).asEagerSingleton();
bind(Number.class).toProvider(Counter.class).asEagerSingleton();
}
});
assertNotSame(injector.getInstance(Integer.class), injector.getInstance(Number.class));
}
static class Counter implements Provider<Integer> {
static AtomicInteger next = new AtomicInteger(1);
public Integer get() {
return next.getAndIncrement();
}
}
public void testAnnotatedNoArgConstructor() {
assertBindingSucceeds(PublicNoArgAnnotated.class);
assertBindingSucceeds(ProtectedNoArgAnnotated.class);
assertBindingSucceeds(PackagePrivateNoArgAnnotated.class);
assertBindingSucceeds(PrivateNoArgAnnotated.class);
}
static class PublicNoArgAnnotated {
@Inject public PublicNoArgAnnotated() { }
}
static class ProtectedNoArgAnnotated {
@Inject protected ProtectedNoArgAnnotated() { }
}
static class PackagePrivateNoArgAnnotated {
@Inject PackagePrivateNoArgAnnotated() { }
}
static class PrivateNoArgAnnotated {
@Inject private PrivateNoArgAnnotated() { }
}
public void testUnannotatedNoArgConstructor() throws Exception{
assertBindingSucceeds(PublicNoArg.class);
assertBindingSucceeds(ProtectedNoArg.class);
assertBindingSucceeds(PackagePrivateNoArg.class);
assertBindingSucceeds(PrivateNoArgInPrivateClass.class);
assertBindingFails(PrivateNoArg.class);
}
static class PublicNoArg {
public PublicNoArg() { }
}
static class ProtectedNoArg {
protected ProtectedNoArg() { }
}
static class PackagePrivateNoArg {
PackagePrivateNoArg() { }
}
private static class PrivateNoArgInPrivateClass {
PrivateNoArgInPrivateClass() { }
}
static class PrivateNoArg {
private PrivateNoArg() { }
}
private void assertBindingSucceeds(final Class<?> clazz) {
assertNotNull(Guice.createInjector().getInstance(clazz));
}
private void assertBindingFails(final Class<?> clazz) throws NoSuchMethodException {
try {
Guice.createInjector().getInstance(clazz);
fail();
} catch (ConfigurationException expected) {
assertContains(expected.getMessage(),
"Could not find a suitable constructor in " + PrivateNoArg.class.getName(),
"at " + PrivateNoArg.class.getName() + ".class(BindingTest.java:");
}
}
public void testTooManyConstructors() {
try {
Guice.createInjector().getInstance(TooManyConstructors.class);
fail();
} catch (ConfigurationException expected) {
assertContains(expected.getMessage(),
TooManyConstructors.class.getName() + " has more than one constructor annotated with "
+ "@Inject. Classes must have either one (and only one) constructor",
"at " + TooManyConstructors.class.getName() + ".class(BindingTest.java:");
}
}
static class TooManyConstructors {
@Inject TooManyConstructors(Injector i) {}
@Inject TooManyConstructors() {}
}
public void testToConstructorBinding() throws NoSuchMethodException {
final Constructor<D> constructor = D.class.getConstructor(Stage.class);
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Object.class).toConstructor(constructor);
}
});
D d = (D) injector.getInstance(Object.class);
assertEquals(Stage.DEVELOPMENT, d.stage);
}
public void testToConstructorBindingsOnParameterizedTypes() throws NoSuchMethodException {
final Constructor<C> constructor = C.class.getConstructor(Stage.class, Object.class);
final Key<Object> s = new Key<Object>(named("s")) {};
final Key<Object> i = new Key<Object>(named("i")) {};
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(s).toConstructor(constructor, new TypeLiteral<C<Stage>>() {});
bind(i).toConstructor(constructor, new TypeLiteral<C<Injector>>() {});
}
});
C<Stage> one = (C<Stage>) injector.getInstance(s);
assertEquals(Stage.DEVELOPMENT, one.stage);
assertEquals(Stage.DEVELOPMENT, one.t);
assertEquals(Stage.DEVELOPMENT, one.anotherT);
C<Injector> two = (C<Injector>) injector.getInstance(i);
assertEquals(Stage.DEVELOPMENT, two.stage);
assertEquals(injector, two.t);
assertEquals(injector, two.anotherT);
}
public void testToConstructorBindingsFailsOnRawTypes() throws NoSuchMethodException {
final Constructor constructor = C.class.getConstructor(Stage.class, Object.class);
try {
Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Object.class).toConstructor(constructor);
}
});
fail();
} catch (CreationException expected) {
assertContains(expected.getMessage(),
"1) T cannot be used as a key; It is not fully specified.",
"at " + C.class.getName() + ".<init>(BindingTest.java:",
"2) T cannot be used as a key; It is not fully specified.",
"at " + C.class.getName() + ".anotherT(BindingTest.java:");
}
}
/*if[AOP]*/
public void testToConstructorAndMethodInterceptors() throws NoSuchMethodException {
final Constructor<D> constructor = D.class.getConstructor(Stage.class);
final AtomicInteger count = new AtomicInteger();
final MethodInterceptor countingInterceptor = new MethodInterceptor() {
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
count.incrementAndGet();
return methodInvocation.proceed();
}
};
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Object.class).toConstructor(constructor);
bindInterceptor(Matchers.any(), Matchers.any(), countingInterceptor);
}
});
D d = (D) injector.getInstance(Object.class);
d.hashCode();
d.hashCode();
assertEquals(2, count.get());
}
/*end[AOP]*/
public void testInaccessibleConstructor() throws NoSuchMethodException {
final Constructor<E> constructor = E.class.getDeclaredConstructor(Stage.class);
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(E.class).toConstructor(constructor);
}
});
E e = injector.getInstance(E.class);
assertEquals(Stage.DEVELOPMENT, e.stage);
}
public void testToConstructorAndScopes() throws NoSuchMethodException {
final Constructor<F> constructor = F.class.getConstructor(Stage.class);
final Key<Object> d = Key.get(Object.class, named("D")); // default scoping
final Key<Object> s = Key.get(Object.class, named("S")); // singleton
final Key<Object> n = Key.get(Object.class, named("N")); // "N" instances
final Key<Object> r = Key.get(Object.class, named("R")); // a regular binding
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(d).toConstructor(constructor);
bind(s).toConstructor(constructor).in(Singleton.class);
bind(n).toConstructor(constructor).in(Scopes.NO_SCOPE);
bind(r).to(F.class);
}
});
assertDistinct(injector, 1, d, d, d, d);
assertDistinct(injector, 1, s, s, s, s);
assertDistinct(injector, 4, n, n, n, n);
assertDistinct(injector, 1, r, r, r, r);
assertDistinct(injector, 4, d, d, r, r, s, s, n);
}
public void assertDistinct(Injector injector, int expectedCount, Key<?>... keys) {
ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
for (Key<?> k : keys) {
builder.add(injector.getInstance(k));
}
assertEquals(expectedCount, builder.build().size());
}
public void testToConstructorSpiData() throws NoSuchMethodException {
final Set<TypeLiteral<?>> heardTypes = Sets.newHashSet();
final Constructor<D> constructor = D.class.getConstructor(Stage.class);
final TypeListener listener = new TypeListener() {
public <I> void hear(TypeLiteral<I> type, TypeEncounter<I> encounter) {
if (!heardTypes.add(type)) {
fail("Heard " + type + " multiple times!");
}
}
};
Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(Object.class).toConstructor(constructor);
bind(D.class).toConstructor(constructor);
bindListener(Matchers.any(), listener);
}
});
assertEquals(ImmutableSet.of(TypeLiteral.get(D.class)), heardTypes);
}
public void testInterfaceToImplementationConstructor() throws NoSuchMethodException {
final Constructor<CFoo> constructor = CFoo.class.getDeclaredConstructor();
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(IFoo.class).toConstructor(constructor);
}
});
injector.getInstance(IFoo.class);
}
public static interface IFoo {}
public static class CFoo implements IFoo {}
public void testGetAllBindings() {
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(D.class).toInstance(new D(Stage.PRODUCTION));
bind(Object.class).to(D.class);
getProvider(new Key<C<Stage>>() {});
}
});
Map<Key<?>,Binding<?>> bindings = injector.getAllBindings();
assertEquals(ImmutableSet.of(Key.get(Injector.class), Key.get(Stage.class), Key.get(D.class),
Key.get(Logger.class), Key.get(Object.class), new Key<C<Stage>>() {}),
bindings.keySet());
// add a JIT binding
injector.getInstance(F.class);
Map<Key<?>,Binding<?>> bindings2 = injector.getAllBindings();
assertEquals(ImmutableSet.of(Key.get(Injector.class), Key.get(Stage.class), Key.get(D.class),
Key.get(Logger.class), Key.get(Object.class), new Key<C<Stage>>() {}, Key.get(F.class)),
bindings2.keySet());
// the original map shouldn't have changed
assertEquals(ImmutableSet.of(Key.get(Injector.class), Key.get(Stage.class), Key.get(D.class),
Key.get(Logger.class), Key.get(Object.class), new Key<C<Stage>>() {}),
bindings.keySet());
// check the bindings' values
assertEquals(injector, bindings.get(Key.get(Injector.class)).getProvider().get());
}
public void testGetAllServletBindings() throws Exception {
Injector injector = Guice.createInjector(new AbstractModule() {
protected void configure() {
bind(F.class); // an explicit binding that uses a JIT binding for a constructor
}
});
injector.getAllBindings();
}
public static class C<T> {
private Stage stage;
private T t;
@Inject T anotherT;
public C(Stage stage, T t) {
this.stage = stage;
this.t = t;
}
@Inject C() {}
}
public static class D {
Stage stage;
public D(Stage stage) {
this.stage = stage;
}
}
private static class E {
Stage stage;
private E(Stage stage) {
this.stage = stage;
}
}
@Singleton
public static class F {
Stage stage;
@Inject public F(Stage stage) {
this.stage = stage;
}
}
public void testTurkeyBaconProblemUsingToConstuctor() {
Injector injector = Guice.createInjector(new AbstractModule() {
@SuppressWarnings("unchecked")
@Override
public void configure() {
bind(Bacon.class).to(UncookedBacon.class);
bind(Bacon.class).annotatedWith(named("Turkey")).to(TurkeyBacon.class);
bind(Bacon.class).annotatedWith(named("Cooked")).toConstructor(
(Constructor)InjectionPoint.forConstructorOf(Bacon.class).getMember());
}
});
Bacon bacon = injector.getInstance(Bacon.class);
assertEquals(Food.PORK, bacon.getMaterial());
assertFalse(bacon.isCooked());
Bacon turkeyBacon = injector.getInstance(Key.get(Bacon.class, named("Turkey")));
assertEquals(Food.TURKEY, turkeyBacon.getMaterial());
assertTrue(turkeyBacon.isCooked());
Bacon cookedBacon = injector.getInstance(Key.get(Bacon.class, named("Cooked")));
assertEquals(Food.PORK, cookedBacon.getMaterial());
assertTrue(cookedBacon.isCooked());
}
enum Food { TURKEY, PORK }
private static class Bacon {
public Food getMaterial() { return Food.PORK; }
public boolean isCooked() { return true; }
}
private static class TurkeyBacon extends Bacon {
public Food getMaterial() { return Food.TURKEY; }
}
private static class UncookedBacon extends Bacon {
public boolean isCooked() { return false; }
}
}
| 1.328125 | 1 |
Scheduler/src/main/java/com/kxj/SchedulerConfig.java | kong0827/SpringBoot-Learning | 5 | 601 | package com.kxj;
import com.kxj.dao.SpringScheduledCronRepository;
import com.kxj.entity.SpringScheduledCron;
import com.kxj.utils.SpringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.SchedulingConfigurer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import org.springframework.scheduling.support.CronTrigger;
import java.util.List;
/**
* @author xiangjin.kong
* @date 2020/10/22 14:12
*/
@Configuration
public class SchedulerConfig implements SchedulingConfigurer {
/*
此处成员变量应该使用@Value从配置中读取
*/
private int corePoolSize = 10;
@Autowired
SpringScheduledCronRepository springScheduledCronRepository;
@Autowired
ApplicationContext context;
@Override
public void configureTasks(ScheduledTaskRegistrar taskRegistrar) {
ThreadPoolTaskScheduler executor = getThreadPoolTaskScheduler();
// 设置线程池
taskRegistrar.setScheduler(executor);
List<SpringScheduledCron> springScheduledCrons = springScheduledCronRepository.findAll();
for (SpringScheduledCron springScheduledCron : springScheduledCrons) {
Class<?> clazz = null;
Object task = null;
try {
clazz = Class.forName(springScheduledCron.getCronKey());
task = context.getBean(clazz);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
taskRegistrar.addTriggerTask(((Runnable) task), triggerContext -> {
String cronExpression = springScheduledCronRepository.findByCronKey(springScheduledCron.getCronKey()).getCronExpression();
return new CronTrigger(cronExpression).nextExecutionTime(triggerContext);
});
}
}
private ThreadPoolTaskScheduler getThreadPoolTaskScheduler() {
ThreadPoolTaskScheduler executor = new ThreadPoolTaskScheduler();
executor.setPoolSize(corePoolSize);
executor.setThreadNamePrefix("scheduled-task-pool-");
executor.initialize();
return executor;
}
}
| 1.398438 | 1 |
core/src/main/java/com/netflix/iceberg/MergingSnapshotUpdate.java | rdsr/incubator-iceberg | 0 | 609 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.netflix.iceberg;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.iceberg.ManifestEntry.Status;
import com.netflix.iceberg.exceptions.RuntimeIOException;
import com.netflix.iceberg.exceptions.ValidationException;
import com.netflix.iceberg.expressions.Evaluator;
import com.netflix.iceberg.expressions.Expression;
import com.netflix.iceberg.expressions.Expressions;
import com.netflix.iceberg.expressions.Projections;
import com.netflix.iceberg.expressions.StrictMetricsEvaluator;
import com.netflix.iceberg.io.OutputFile;
import com.netflix.iceberg.util.BinPacking.ListPacker;
import com.netflix.iceberg.util.CharSequenceWrapper;
import com.netflix.iceberg.util.StructLikeWrapper;
import com.netflix.iceberg.util.Tasks;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static com.netflix.iceberg.TableProperties.MANIFEST_MIN_MERGE_COUNT;
import static com.netflix.iceberg.TableProperties.MANIFEST_MIN_MERGE_COUNT_DEFAULT;
import static com.netflix.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES;
import static com.netflix.iceberg.TableProperties.MANIFEST_TARGET_SIZE_BYTES_DEFAULT;
import static com.netflix.iceberg.util.ThreadPools.getWorkerPool;
abstract class MergingSnapshotUpdate extends SnapshotUpdate {
private final Logger LOG = LoggerFactory.getLogger(getClass());
private static final Joiner COMMA = Joiner.on(",");
protected static class DeleteException extends ValidationException {
private final String partition;
private DeleteException(String partition) {
super("Operation would delete existing data");
this.partition = partition;
}
public String partition() {
return partition;
}
}
private final TableOperations ops;
private final PartitionSpec spec;
private final long manifestTargetSizeBytes;
private final int minManifestsCountToMerge;
// update data
private final AtomicInteger manifestCount = new AtomicInteger(0);
private final List<DataFile> newFiles = Lists.newArrayList();
private final Set<CharSequenceWrapper> deletePaths = Sets.newHashSet();
private final Set<StructLikeWrapper> dropPartitions = Sets.newHashSet();
private Expression deleteExpression = Expressions.alwaysFalse();
private boolean failAnyDelete = false;
private boolean failMissingDeletePaths = false;
// cache the new manifest once it is written
private ManifestFile newManifest = null;
private boolean hasNewFiles = false;
// cache merge results to reuse when retrying
private final Map<List<ManifestFile>, ManifestFile> mergeManifests = Maps.newConcurrentMap();
// cache filtered manifests to avoid extra work when commits fail.
private final Map<ManifestFile, ManifestFile> filteredManifests = Maps.newConcurrentMap();
// tracking where files were deleted to validate retries quickly
private final Map<ManifestFile, Set<CharSequenceWrapper>> filteredManifestToDeletedFiles =
Maps.newConcurrentMap();
private boolean filterUpdated = false; // used to clear caches of filtered and merged manifests
MergingSnapshotUpdate(TableOperations ops) {
super(ops);
this.ops = ops;
this.spec = ops.current().spec();
this.manifestTargetSizeBytes = ops.current()
.propertyAsLong(MANIFEST_TARGET_SIZE_BYTES, MANIFEST_TARGET_SIZE_BYTES_DEFAULT);
this.minManifestsCountToMerge = ops.current()
.propertyAsInt(MANIFEST_MIN_MERGE_COUNT, MANIFEST_MIN_MERGE_COUNT_DEFAULT);
}
protected PartitionSpec writeSpec() {
// the spec is set when the write is started
return spec;
}
protected Expression rowFilter() {
return deleteExpression;
}
protected List<DataFile> addedFiles() {
return newFiles;
}
protected void failAnyDelete() {
this.failAnyDelete = true;
}
protected void failMissingDeletePaths() {
this.failMissingDeletePaths = true;
}
/**
* Add a filter to match files to delete. A file will be deleted if all of the rows it contains
* match this or any other filter passed to this method.
*
* @param expr an expression to match rows.
*/
protected void deleteByRowFilter(Expression expr) {
Preconditions.checkNotNull(expr, "Cannot delete files using filter: null");
this.filterUpdated = true;
this.deleteExpression = Expressions.or(deleteExpression, expr);
}
/**
* Add a partition tuple to drop from the table during the delete phase.
*/
protected void dropPartition(StructLike partition) {
dropPartitions.add(StructLikeWrapper.wrap(partition));
}
/**
* Add a specific path to be deleted in the new snapshot.
*/
protected void delete(CharSequence path) {
Preconditions.checkNotNull(path, "Cannot delete file path: null");
this.filterUpdated = true;
deletePaths.add(CharSequenceWrapper.wrap(path));
}
/**
* Add a file to the new snapshot.
*/
protected void add(DataFile file) {
hasNewFiles = true;
newFiles.add(file);
}
@Override
public List<ManifestFile> apply(TableMetadata base) {
if (filterUpdated) {
cleanUncommittedFilters(SnapshotUpdate.EMPTY_SET);
this.filterUpdated = false;
}
Snapshot current = base.currentSnapshot();
Map<Integer, List<ManifestFile>> groups = Maps.newTreeMap(Comparator.<Integer>reverseOrder());
// use a common metrics evaluator for all manifests because it is bound to the table schema
StrictMetricsEvaluator metricsEvaluator = new StrictMetricsEvaluator(
ops.current().schema(), deleteExpression);
// add the current spec as the first group. files are added to the beginning.
try {
if (newFiles.size() > 0) {
ManifestFile newManifest = newFilesAsManifest();
List<ManifestFile> manifestGroup = Lists.newArrayList();
manifestGroup.add(newManifest);
groups.put(newManifest.partitionSpecId(), manifestGroup);
}
Set<CharSequenceWrapper> deletedFiles = Sets.newHashSet();
// group manifests by compatible partition specs to be merged
if (current != null) {
List<ManifestFile> manifests = current.manifests();
ManifestFile[] filtered = new ManifestFile[manifests.size()];
// open all of the manifest files in parallel, use index to avoid reordering
Tasks.range(filtered.length)
.stopOnFailure().throwFailureWhenFinished()
.executeWith(getWorkerPool())
.run(index -> {
ManifestFile manifest = filterManifest(
deleteExpression, metricsEvaluator,
manifests.get(index));
filtered[index] = manifest;
}, IOException.class);
for (ManifestFile manifest : filtered) {
Set<CharSequenceWrapper> manifestDeletes = filteredManifestToDeletedFiles.get(manifest);
if (manifestDeletes != null) {
deletedFiles.addAll(manifestDeletes);
}
List<ManifestFile> group = groups.get(manifest.partitionSpecId());
if (group != null) {
group.add(manifest);
} else {
group = Lists.newArrayList();
group.add(manifest);
groups.put(manifest.partitionSpecId(), group);
}
}
}
List<ManifestFile> manifests = Lists.newArrayList();
for (Map.Entry<Integer, List<ManifestFile>> entry : groups.entrySet()) {
for (ManifestFile manifest : mergeGroup(entry.getKey(), entry.getValue())) {
manifests.add(manifest);
}
}
ValidationException.check(!failMissingDeletePaths || deletedFiles.containsAll(deletePaths),
"Missing required files to delete: %s",
COMMA.join(transform(filter(deletePaths,
path -> !deletedFiles.contains(path)),
CharSequenceWrapper::get)));
return manifests;
} catch (IOException e) {
throw new RuntimeIOException(e, "Failed to create snapshot manifest list");
}
}
private void cleanUncommittedMerges(Set<ManifestFile> committed) {
// iterate over a copy of entries to avoid concurrent modification
List<Map.Entry<List<ManifestFile>, ManifestFile>> entries =
Lists.newArrayList(mergeManifests.entrySet());
for (Map.Entry<List<ManifestFile>, ManifestFile> entry : entries) {
// delete any new merged manifests that aren't in the committed list
ManifestFile merged = entry.getValue();
if (!committed.contains(merged)) {
deleteFile(merged.path());
// remove the deleted file from the cache
mergeManifests.remove(entry.getKey());
}
}
}
private void cleanUncommittedFilters(Set<ManifestFile> committed) {
// iterate over a copy of entries to avoid concurrent modification
List<Map.Entry<ManifestFile, ManifestFile>> filterEntries =
Lists.newArrayList(filteredManifests.entrySet());
for (Map.Entry<ManifestFile, ManifestFile> entry : filterEntries) {
// remove any new filtered manifests that aren't in the committed list
ManifestFile manifest = entry.getKey();
ManifestFile filtered = entry.getValue();
if (!committed.contains(filtered)) {
// only delete if the filtered copy was created
if (!manifest.equals(filtered)) {
deleteFile(filtered.path());
}
// remove the entry from the cache
filteredManifests.remove(manifest);
}
}
}
@Override
protected void cleanUncommitted(Set<ManifestFile> committed) {
if (!committed.contains(newManifest)) {
deleteFile(newManifest.path());
this.newManifest = null;
}
cleanUncommittedMerges(committed);
cleanUncommittedFilters(committed);
}
private boolean nothingToFilter() {
return (deleteExpression == null || deleteExpression == Expressions.alwaysFalse()) &&
deletePaths.isEmpty() && dropPartitions.isEmpty();
}
/**
* @return a ManifestReader that is a filtered version of the input manifest.
*/
private ManifestFile filterManifest(Expression deleteExpression,
StrictMetricsEvaluator metricsEvaluator,
ManifestFile manifest) throws IOException {
ManifestFile cached = filteredManifests.get(manifest);
if (cached != null) {
return cached;
}
if (nothingToFilter()) {
filteredManifests.put(manifest, manifest);
return manifest;
}
try (ManifestReader reader = ManifestReader.read(ops.newInputFile(manifest.path()))) {
Expression inclusiveExpr = Projections
.inclusive(reader.spec())
.project(deleteExpression);
Evaluator inclusive = new Evaluator(reader.spec().partitionType(), inclusiveExpr);
Expression strictExpr = Projections
.strict(reader.spec())
.project(deleteExpression);
Evaluator strict = new Evaluator(reader.spec().partitionType(), strictExpr);
// this is reused to compare file paths with the delete set
CharSequenceWrapper pathWrapper = CharSequenceWrapper.wrap("");
// reused to compare file partitions with the drop set
StructLikeWrapper partitionWrapper = StructLikeWrapper.wrap(null);
// this assumes that the manifest doesn't have files to remove and streams through the
// manifest without copying data. if a manifest does have a file to remove, this will break
// out of the loop and move on to filtering the manifest.
boolean hasDeletedFiles = false;
for (ManifestEntry entry : reader.entries()) {
DataFile file = entry.file();
boolean fileDelete = (deletePaths.contains(pathWrapper.set(file.path())) ||
dropPartitions.contains(partitionWrapper.set(file.partition())));
if (fileDelete || inclusive.eval(file.partition())) {
ValidationException.check(
fileDelete || strict.eval(file.partition()) || metricsEvaluator.eval(file),
"Cannot delete file where some, but not all, rows match filter %s: %s",
this.deleteExpression, file.path());
hasDeletedFiles = true;
if (failAnyDelete) {
throw new DeleteException(writeSpec().partitionToPath(file.partition()));
}
break; // as soon as a deleted file is detected, stop scanning
}
}
if (!hasDeletedFiles) {
filteredManifests.put(manifest, manifest);
return manifest;
}
// when this point is reached, there is at least one file that will be deleted in the
// manifest. produce a copy of the manifest with all deleted files removed.
Set<CharSequenceWrapper> deletedPaths = Sets.newHashSet();
OutputFile filteredCopy = manifestPath(manifestCount.getAndIncrement());
ManifestWriter writer = new ManifestWriter(reader.spec(), filteredCopy, snapshotId());
try {
for (ManifestEntry entry : reader.entries()) {
DataFile file = entry.file();
boolean fileDelete = (deletePaths.contains(pathWrapper.set(file.path())) ||
dropPartitions.contains(partitionWrapper.set(file.partition())));
if (entry.status() != Status.DELETED) {
if (fileDelete || inclusive.eval(file.partition())) {
ValidationException.check(
fileDelete || strict.eval(file.partition()) || metricsEvaluator.eval(file),
"Cannot delete file where some, but not all, rows match filter %s: %s",
this.deleteExpression, file.path());
writer.delete(entry);
CharSequenceWrapper wrapper = CharSequenceWrapper.wrap(entry.file().path());
if (deletedPaths.contains(wrapper)) {
LOG.warn("Deleting a duplicate path from manifest {}: {}",
manifest.path(), wrapper.get());
}
deletedPaths.add(wrapper);
} else {
writer.addExisting(entry);
}
}
}
} finally {
writer.close();
}
// return the filtered manifest as a reader
ManifestFile filtered = writer.toManifestFile();
// update caches
filteredManifests.put(manifest, filtered);
filteredManifestToDeletedFiles.put(filtered, deletedPaths);
return filtered;
}
}
@SuppressWarnings("unchecked")
private Iterable<ManifestFile> mergeGroup(int specId, List<ManifestFile> group)
throws IOException {
// use a lookback of 1 to avoid reordering the manifests. using 1 also means this should pack
// from the end so that the manifest that gets under-filled is the first one, which will be
// merged the next time.
ListPacker<ManifestFile> packer = new ListPacker<>(manifestTargetSizeBytes, 1);
List<List<ManifestFile>> bins = packer.packEnd(group, manifest -> manifest.length());
// process bins in parallel, but put results in the order of the bins into an array to preserve
// the order of manifests and contents. preserving the order helps avoid random deletes when
// data files are eventually aged off.
List<ManifestFile>[] binResults = (List<ManifestFile>[])
Array.newInstance(List.class, bins.size());
Tasks.range(bins.size())
.stopOnFailure().throwFailureWhenFinished()
.executeWith(getWorkerPool())
.run(index -> {
List<ManifestFile> bin = bins.get(index);
List<ManifestFile> outputManifests = Lists.newArrayList();
binResults[index] = outputManifests;
if (bin.size() == 1) {
// no need to rewrite
outputManifests.add(bin.get(0));
return;
}
// if the bin has a new manifest (the new data files) then only merge it if the number of
// manifests is above the minimum count. this is applied only to bins with an in-memory
// manifest so that large manifests don't prevent merging older groups.
if (bin.contains(newManifest) && bin.size() < minManifestsCountToMerge) {
// not enough to merge, add all manifest files to the output list
outputManifests.addAll(bin);
} else {
// merge the group
outputManifests.add(createManifest(specId, bin));
}
}, IOException.class);
return Iterables.concat(binResults);
}
private ManifestFile createManifest(int specId, List<ManifestFile> bin) throws IOException {
// if this merge was already rewritten, use the existing file.
// if the new files are in this merge, then the ManifestFile for the new files has changed and
// will be a cache miss.
if (mergeManifests.containsKey(bin)) {
return mergeManifests.get(bin);
}
OutputFile out = manifestPath(manifestCount.getAndIncrement());
ManifestWriter writer = new ManifestWriter(ops.current().spec(specId), out, snapshotId());
try {
for (ManifestFile manifest : bin) {
try (ManifestReader reader = ManifestReader.read(ops.newInputFile(manifest.path()))) {
for (ManifestEntry entry : reader.entries()) {
if (entry.status() == Status.DELETED) {
// suppress deletes from previous snapshots. only files deleted by this snapshot
// should be added to the new manifest
if (entry.snapshotId() == snapshotId()) {
writer.add(entry);
}
} else if (entry.status() == Status.ADDED && entry.snapshotId() == snapshotId()) {
// adds from this snapshot are still adds, otherwise they should be existing
writer.add(entry);
} else {
// add all files from the old manifest as existing files
writer.addExisting(entry);
}
}
}
}
} finally {
writer.close();
}
ManifestFile manifest = writer.toManifestFile();
// update the cache
mergeManifests.put(bin, manifest);
return manifest;
}
private ManifestFile newFilesAsManifest() throws IOException {
if (hasNewFiles && newManifest != null) {
deleteFile(newManifest.path());
newManifest = null;
}
if (newManifest == null) {
OutputFile out = manifestPath(manifestCount.getAndIncrement());
ManifestWriter writer = new ManifestWriter(spec, out, snapshotId());
try {
writer.addAll(newFiles);
} finally {
writer.close();
}
this.newManifest = writer.toManifestFile();
this.hasNewFiles = false;
}
return newManifest;
}
}
| 1.304688 | 1 |
lang/Java/permutation-test.java | ethansaxenian/RosettaDecode | 3 | 617 | public class PermutationTest {
private static final int[] data = new int[]{
85, 88, 75, 66, 25, 29, 83, 39, 97,
68, 41, 10, 49, 16, 65, 32, 92, 28, 98
};
private static int pick(int at, int remain, int accu, int treat) {
if (remain == 0) return (accu > treat) ? 1 : 0;
return pick(at - 1, remain - 1, accu + data[at - 1], treat)
+ ((at > remain) ? pick(at - 1, remain, accu, treat) : 0);
}
public static void main(String[] args) {
int treat = 0;
double total = 1.0;
for (int i = 0; i <= 8; ++i) {
treat += data[i];
}
for (int i = 19; i >= 11; --i) {
total *= i;
}
for (int i = 9; i >= 1; --i) {
total /= i;
}
int gt = pick(19, 9, 0, treat);
int le = (int) (total - gt);
System.out.printf("<= : %f%% %d\n", 100.0 * le / total, le);
System.out.printf(" > : %f%% %d\n", 100.0 * gt / total, gt);
}
}
| 2.28125 | 2 |
inference-framework/jsr308-langtools/test/tools/javac/processing/model/util/OverridesSpecEx.java | SoftwareEngineeringToolDemos/type-inference | 1 | 625 | /*
* Copyright (c) 2006, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6453386
* @summary Verify that example code in Elements.overrides works as spec'ed.
* @author <NAME>
* @library ../../../lib
* @build JavacTestingAbstractProcessor
* @compile -g OverridesSpecEx.java
* @compile -processor OverridesSpecEx -proc:only OverridesSpecEx.java
*/
import java.util.Set;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.*;
import javax.lang.model.type.*;
import javax.lang.model.util.*;
import static javax.lang.model.util.ElementFilter.*;
public class OverridesSpecEx extends JavacTestingAbstractProcessor {
public boolean process(Set<? extends TypeElement> annoTypes,
RoundEnvironment round) {
if (!round.processingOver())
doit(annoTypes, round);
return true;
}
private void doit(Set<? extends TypeElement> annoTypes,
RoundEnvironment round) {
TypeElement string = elements.getTypeElement("java.lang.String");
TypeElement object = elements.getTypeElement("java.lang.Object");
ExecutableElement m1 = null;
ExecutableElement m2 = null;
for (ExecutableElement m : methodsIn(string.getEnclosedElements())) {
if (m.getSimpleName().contentEquals("hashCode")) {
m1 = m;
break;
}
}
for (ExecutableElement m : methodsIn(object.getEnclosedElements())) {
if (m.getSimpleName().contentEquals("hashCode")) {
m2 = m;
break;
}
}
boolean res =
elements.overrides(m1, m2, (TypeElement) m1.getEnclosingElement());
System.out.println("String.hashCode overrides Object.hashCode? " + res);
checkResult(res);
TypeElement a = elements.getTypeElement("OverridesSpecEx.A");
TypeElement b = elements.getTypeElement("OverridesSpecEx.B");
TypeElement c = elements.getTypeElement("OverridesSpecEx.C");
m1 = null;
m2 = null;
for (ExecutableElement m : methodsIn(a.getEnclosedElements()))
m1 = m;
for (ExecutableElement m : methodsIn(b.getEnclosedElements()))
m2 = m;
res = elements.overrides(m1, m2, a);
System.out.println("A.m overrides B.m in B? " + res);
checkResult(!res);
res = elements.overrides(m1, m2, c);
System.out.println("A.m overrides B.m in C? " + res);
checkResult(res);
}
private static void checkResult(boolean truthiness) {
if (!truthiness)
throw new AssertionError("Bogus result");
}
// Fodder for the processor
class A {
public void m() {}
}
interface B {
void m();
}
class C extends A implements B {
}
}
| 1.257813 | 1 |
src/main/java/com/simibubi/create/foundation/render/backend/instancing/InstancedTileRenderRegistry.java | Ttanasart-pt/Create | 3 | 633 | package com.simibubi.create.foundation.render.backend.instancing;
import java.util.Map;
import javax.annotation.Nullable;
import com.google.common.collect.Maps;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntityType;
public class InstancedTileRenderRegistry {
public static final InstancedTileRenderRegistry instance = new InstancedTileRenderRegistry();
private final Map<TileEntityType<?>, IRendererFactory<?>> renderers = Maps.newHashMap();
public <T extends TileEntity> void register(TileEntityType<? extends T> type, IRendererFactory<? super T> rendererFactory) {
this.renderers.put(type, rendererFactory);
}
@SuppressWarnings("unchecked")
@Nullable
public <T extends TileEntity> TileEntityInstance<? super T> create(InstancedTileRenderer<?> manager, T tile) {
TileEntityType<?> type = tile.getType();
IRendererFactory<? super T> factory = (IRendererFactory<? super T>) this.renderers.get(type);
if (factory == null) return null;
else return factory.create(manager, tile);
}
}
| 1.15625 | 1 |
Ghidra/Debug/Debugger-agent-gdb/src/main/java/agent/gdb/model/impl/GdbModelTargetSession.java | pabx06/ghidra | 1 | 641 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package agent.gdb.model.impl;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import agent.gdb.manager.*;
import agent.gdb.manager.impl.*;
import agent.gdb.manager.impl.cmd.GdbStateChangeRecord;
import agent.gdb.manager.reason.GdbReason;
import ghidra.async.AsyncUtils;
import ghidra.dbg.agent.DefaultTargetModelRoot;
import ghidra.dbg.error.DebuggerIllegalArgumentException;
import ghidra.dbg.target.*;
import ghidra.dbg.target.TargetLauncher.TargetCmdLineLauncher;
import ghidra.dbg.target.schema.*;
import ghidra.dbg.util.PathUtils;
import ghidra.util.Msg;
/**
* TODO: We should probably expose the raw CLI (if available) via TargetConsole, and perhaps re-work
* the UI to use it when available. This could more generally solve the multi-line input thing, and
* provide a distinction between API access (where {@link TargetInterpreter} makes more sense), and
* I/O access (where {@link TargetConsole}) makes more sense. I'm hoping this will also allow the
* CLI to prompt the user when appropriate, e.g., on {@code quit} when an inferior is active. NOTE:
* Probably should not expose raw MI2 via TargetConsole
*/
@TargetObjectSchemaInfo(
name = "Session",
elements = {
@TargetElementType(type = Void.class) },
attributes = {
@TargetAttributeType(type = Void.class) })
public class GdbModelTargetSession extends DefaultTargetModelRoot
implements TargetAccessConditioned, TargetAttacher, TargetInterpreter, TargetInterruptible,
TargetCmdLineLauncher, TargetActiveScope, TargetEventScope, TargetFocusScope,
GdbConsoleOutputListener, GdbEventsListenerAdapter {
protected static final String GDB_PROMPT = "(gdb)";
protected final GdbModelImpl impl;
protected String display = "GNU gdb (GDB)";
protected final GdbModelTargetInferiorContainer inferiors;
protected final GdbModelTargetAvailableContainer available;
protected final GdbModelTargetBreakpointContainer breakpoints;
private boolean accessible = true;
protected GdbModelSelectableObject focus;
protected String debugger = "gdb"; // Used by GdbModelTargetEnvironment
public GdbModelTargetSession(GdbModelImpl impl, TargetObjectSchema schema) {
super(impl, "Session", schema);
this.impl = impl;
//impl.addModelObject(session, this);
this.inferiors = new GdbModelTargetInferiorContainer(this);
this.available = new GdbModelTargetAvailableContainer(this);
this.breakpoints = new GdbModelTargetBreakpointContainer(this);
changeAttributes(List.of(), Map.of( //
inferiors.getName(), inferiors, //
available.getName(), available, //
breakpoints.getName(), breakpoints, //
ACCESSIBLE_ATTRIBUTE_NAME, accessible, //
PROMPT_ATTRIBUTE_NAME, GDB_PROMPT, //
DISPLAY_ATTRIBUTE_NAME, display, //
TargetMethod.PARAMETERS_ATTRIBUTE_NAME, TargetCmdLineLauncher.PARAMETERS, //
SUPPORTED_ATTACH_KINDS_ATTRIBUTE_NAME, GdbModelTargetInferior.SUPPORTED_KINDS, //
FOCUS_ATTRIBUTE_NAME, this // Satisfy schema. Will be set to first inferior.
), "Initialized");
impl.gdb.addEventsListener(this);
impl.gdb.addConsoleOutputListener(this);
getVersion();
}
@TargetAttributeType(name = GdbModelTargetInferiorContainer.NAME, required = true, fixed = true)
public GdbModelTargetInferiorContainer getInferiors() {
return inferiors;
}
@TargetAttributeType(
name = GdbModelTargetAvailableContainer.NAME,
required = true,
fixed = true)
public GdbModelTargetAvailableContainer getAvailable() {
return available;
}
@TargetAttributeType(
name = GdbModelTargetBreakpointContainer.NAME,
required = true,
fixed = true)
public GdbModelTargetBreakpointContainer getBreakpoints() {
return breakpoints;
}
protected void getVersion() {
impl.gdb.waitForPrompt().thenCompose(__ -> {
return impl.gdb.consoleCapture("show version");
}).thenAccept(out -> {
debugger = out;
changeAttributes(List.of(),
Map.of(DISPLAY_ATTRIBUTE_NAME, display = out.split("\n")[0].strip() //
), "Version refreshed");
}).exceptionally(e -> {
model.reportError(this, "Could not get GDB version", e);
debugger = "gdb";
return null;
});
}
@Override
public String getDisplay() {
return display;
}
@Override
public void output(GdbManager.Channel gdbChannel, String out) {
TargetConsole.Channel dbgChannel;
switch (gdbChannel) {
case STDOUT:
dbgChannel = TargetConsole.Channel.STDOUT;
break;
case STDERR:
dbgChannel = TargetConsole.Channel.STDERR;
break;
default:
throw new AssertionError();
}
listeners.fire.consoleOutput(this, dbgChannel, out);
}
@Override
public void inferiorSelected(GdbInferior inferior, GdbCause cause) {
if (inferior.getKnownThreads().isEmpty()) {
GdbModelTargetInferior inf = inferiors.getTargetInferior(inferior);
setFocus(inf);
}
// Otherwise, we'll presumably get the =thread-selected event
}
/**
* TODO: This check should be done in the manager? This "internal" concept is either a manager
* concept or a model concept. Right now, it breaches the interface.
*
* @param cause the cause to examine
* @return true if internal
*/
protected boolean isFocusInternallyDriven(GdbCause cause) {
if (cause == null || cause == GdbCause.Causes.UNCLAIMED) {
return false;
}
if (cause instanceof GdbEvent<?>) {
return false;
}
if (cause instanceof GdbPendingCommand<?>) {
GdbPendingCommand<?> pcmd = (GdbPendingCommand<?>) cause;
GdbCommand<?> cmd = pcmd.getCommand();
return cmd.isFocusInternallyDriven();
}
return true;
}
@Override
public void threadSelected(GdbThread thread, GdbStackFrame frame, GdbCause cause) {
if (isFocusInternallyDriven(cause)) {
return;
}
GdbModelTargetInferior inf = inferiors.getTargetInferior(thread.getInferior());
GdbModelTargetThread t = inf.threads.getTargetThread(thread);
if (frame == null) {
setFocus(t);
return;
}
GdbModelTargetStackFrame f = t.stack.getTargetFrame(frame);
setFocus(f);
}
public void setAccessible(boolean accessible) {
changeAttributes(List.of(), Map.of( //
ACCESSIBLE_ATTRIBUTE_NAME, this.accessible = accessible //
), "Accessibility changed");
}
@Override
public boolean isAccessible() {
return accessible;
}
@Override
public CompletableFuture<Void> launch(List<String> args) {
return impl.gateFuture(impl.gdb.availableInferior().thenCompose(inf -> {
return GdbModelImplUtils.launch(impl, inf, args);
}).thenApply(__ -> null));
}
@Override
public CompletableFuture<Void> attach(TargetAttachable attachable) {
GdbModelTargetAttachable mine = impl.assertMine(GdbModelTargetAttachable.class, attachable);
return attach(mine.pid);
}
@Override
public CompletableFuture<Void> attach(long pid) {
return impl.gateFuture(impl.gdb.availableInferior().thenCompose(inf -> {
return inf.attach(pid).thenApply(__ -> null);
}));
}
@Override
public CompletableFuture<Void> interrupt() {
//return impl.gdb.interrupt();
try {
impl.gdb.sendInterruptNow();
impl.gdb.cancelCurrentCommand();
}
catch (IOException e) {
Msg.error(this, "Could not interrupt", e);
}
return AsyncUtils.NIL;
}
@Override
public CompletableFuture<Void> execute(String cmd) {
return impl.gateFuture(impl.gdb.console(cmd).exceptionally(GdbModelImpl::translateEx));
}
@Override
public CompletableFuture<String> executeCapture(String cmd) {
return impl
.gateFuture(impl.gdb.consoleCapture(cmd).exceptionally(GdbModelImpl::translateEx));
}
@Override
public CompletableFuture<Void> requestActivation(TargetObject obj) {
impl.assertMine(TargetObject.class, obj);
/**
* Yes, this is pointless, since I'm the root, but do it right (TM), since this may change
* or be used as an example for other implementations.
*/
if (!PathUtils.isAncestor(this.getPath(), obj.getPath())) {
throw new DebuggerIllegalArgumentException("Can only focus a successor of the scope");
}
TargetObject cur = obj;
while (cur != null) {
if (cur instanceof GdbModelSelectableObject) {
GdbModelSelectableObject sel = (GdbModelSelectableObject) cur;
return sel.setActive();
}
cur = cur.getParent();
}
return AsyncUtils.NIL;
}
@Override
public CompletableFuture<Void> requestFocus(TargetObject obj) {
impl.assertMine(TargetObject.class, obj);
/**
* Yes, this is pointless, since I'm the root, but do it right (TM), since this may change
* or be used as an example for other implementations.
*/
if (!PathUtils.isAncestor(this.getPath(), obj.getPath())) {
throw new DebuggerIllegalArgumentException("Can only focus a successor of the scope");
}
TargetObject cur = obj;
while (cur != null) {
if (cur instanceof GdbModelSelectableObject) {
GdbModelSelectableObject sel = (GdbModelSelectableObject) cur;
setFocus(sel);
return AsyncUtils.NIL;
}
cur = cur.getParent();
}
return AsyncUtils.NIL;
}
protected void invalidateMemoryAndRegisterCaches() {
inferiors.invalidateMemoryAndRegisterCaches();
}
protected void setFocus(GdbModelSelectableObject focus) {
changeAttributes(List.of(), Map.of( //
FOCUS_ATTRIBUTE_NAME, this.focus = focus //
), "Focus changed");
}
@Override
public GdbModelSelectableObject getFocus() {
return focus;
}
@Override
public void inferiorStateChanged(GdbInferior inf, Collection<GdbThread> threads, GdbState state,
GdbThread thread, GdbCause cause, GdbReason reason) {
/**
* TODO: It might be nice if the manager gave a manager-level callback for *stopped and
* *running events. Without that, I can't really specify an action to execute, *after* all
* inferiors have completed the stateChanged routines.
*/
GdbStateChangeRecord sco =
new GdbStateChangeRecord(inf, threads, state, thread, cause, reason);
CompletableFuture<Void> infUpdates =
CompletableFuture.allOf(breakpoints.stateChanged(sco), inferiors.stateChanged(sco));
infUpdates.whenComplete((v, t) -> {
if (thread == null) {
return;
}
/**
* I have to do this for all inferiors, because I don't know in what order they will
* complete.
*/
if (impl.gdb.getKnownThreads().get(thread.getId()) != thread) {
return;
}
thread.setActive(true).exceptionally(ex -> {
impl.reportError(this, "Could not restore event thread", ex);
return null;
});
});
}
@Override
public void threadStateChanged(GdbThread thread, GdbState state, GdbCause cause,
GdbReason reason) {
TargetThread targetThread = (TargetThread) impl.getModelObject(thread);
changeAttributes(List.of(), List.of(), Map.of( //
TargetEventScope.EVENT_OBJECT_ATTRIBUTE_NAME, targetThread //
), reason.desc());
}
}
| 1.335938 | 1 |
vraptor-core/src/main/java/br/com/caelum/vraptor/observer/upload/NullMultipartObserver.java | gruposwa/vraptor4 | 0 | 649 | /***
* Copyright (c) 2009 Caelum - www.caelum.com.br/opensource
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package br.com.caelum.vraptor.observer.upload;
import static com.google.common.base.Strings.nullToEmpty;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Observes;
import javax.servlet.http.HttpServletRequest;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import br.com.caelum.vraptor.events.ControllerFound;
/**
* This observer will warn a message in console when no Apache Commons FileUpload
* was found in classpath and application try to upload any files.
*
* @author <NAME>
* @author <NAME>
* @since 3.1.3
*/
@ApplicationScoped
public class NullMultipartObserver {
//private static final Logger logger = getLogger(NullMultipartObserver.class);
private static final Logger logger = LogManager.getLogger(NullMultipartObserver.class);
public void nullUpload(@Observes ControllerFound event, HttpServletRequest request) {
if (request.getMethod().toUpperCase().equals("POST")
&& nullToEmpty(request.getContentType()).startsWith("multipart/form-data")) {
logger.warn("There is no file upload handlers registered. If you are willing to "
+ "upload a file, please add the commons-fileupload in your classpath");
}
}
}
| 1.460938 | 1 |
server-core/src/main/java/io/onedev/server/web/editable/job/trigger/JobTriggerListEditPanel.java | grego10/onedev | 1 | 657 | package io.onedev.server.web.editable.job.trigger;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.event.IEvent;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.DataTable;
import org.apache.wicket.extensions.markup.html.repeater.data.table.HeadersToolbar;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.NoRecordsToolbar;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.data.IDataProvider;
import org.apache.wicket.markup.repeater.data.ListDataProvider;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.util.convert.ConversionException;
import io.onedev.server.buildspec.job.Job;
import io.onedev.server.buildspec.job.JobAware;
import io.onedev.server.buildspec.job.trigger.JobTrigger;
import io.onedev.server.web.behavior.NoRecordsBehavior;
import io.onedev.server.web.behavior.sortable.SortBehavior;
import io.onedev.server.web.behavior.sortable.SortPosition;
import io.onedev.server.web.component.modal.ModalLink;
import io.onedev.server.web.component.modal.ModalPanel;
import io.onedev.server.web.component.svg.SpriteImage;
import io.onedev.server.web.editable.PropertyDescriptor;
import io.onedev.server.web.editable.PropertyEditor;
import io.onedev.server.web.editable.PropertyUpdating;
@SuppressWarnings("serial")
class JobTriggerListEditPanel extends PropertyEditor<List<Serializable>> {
private final List<JobTrigger> triggers;
public JobTriggerListEditPanel(String id, PropertyDescriptor propertyDescriptor, IModel<List<Serializable>> model) {
super(id, propertyDescriptor, model);
triggers = new ArrayList<>();
for (Serializable each: model.getObject()) {
triggers.add((JobTrigger) each);
}
}
private Job getJob() {
JobAware jobAware = findParent(JobAware.class);
if (jobAware != null)
return jobAware.getJob();
else
return null;
}
@Override
protected void onInitialize() {
super.onInitialize();
add(new ModalLink("addNew") {
@Override
protected Component newContent(String id, ModalPanel modal) {
return new JobTriggerEditPanel(id, triggers, -1) {
@Override
protected void onCancel(AjaxRequestTarget target) {
modal.close();
}
@Override
protected void onSave(AjaxRequestTarget target) {
markFormDirty(target);
modal.close();
onPropertyUpdating(target);
target.add(JobTriggerListEditPanel.this);
}
@Override
public Job getJob() {
return JobTriggerListEditPanel.this.getJob();
}
};
}
});
List<IColumn<JobTrigger, Void>> columns = new ArrayList<>();
columns.add(new AbstractColumn<JobTrigger, Void>(Model.of("")) {
@Override
public void populateItem(Item<ICellPopulator<JobTrigger>> cellItem, String componentId, IModel<JobTrigger> rowModel) {
cellItem.add(new SpriteImage(componentId, "grip") {
@Override
protected void onComponentTag(ComponentTag tag) {
super.onComponentTag(tag);
tag.setName("svg");
tag.put("class", "icon drag-indicator");
}
});
}
@Override
public String getCssClass() {
return "minimum actions";
}
});
columns.add(new AbstractColumn<JobTrigger, Void>(Model.of("Description")) {
@Override
public void populateItem(Item<ICellPopulator<JobTrigger>> cellItem, String componentId, IModel<JobTrigger> rowModel) {
cellItem.add(new Label(componentId, rowModel.getObject().getDescription()));
}
});
columns.add(new AbstractColumn<JobTrigger, Void>(Model.of("#Params")) {
@Override
public void populateItem(Item<ICellPopulator<JobTrigger>> cellItem, String componentId, IModel<JobTrigger> rowModel) {
cellItem.add(new Label(componentId, rowModel.getObject().getParams().size()));
}
});
columns.add(new AbstractColumn<JobTrigger, Void>(Model.of("")) {
@Override
public void populateItem(Item<ICellPopulator<JobTrigger>> cellItem, String componentId, IModel<JobTrigger> rowModel) {
Fragment fragment = new Fragment(componentId, "actionColumnFrag", JobTriggerListEditPanel.this);
fragment.add(new ModalLink("edit") {
@Override
protected Component newContent(String id, ModalPanel modal) {
return new JobTriggerEditPanel(id, triggers, cellItem.findParent(Item.class).getIndex()) {
@Override
protected void onCancel(AjaxRequestTarget target) {
modal.close();
}
@Override
protected void onSave(AjaxRequestTarget target) {
markFormDirty(target);
modal.close();
onPropertyUpdating(target);
target.add(JobTriggerListEditPanel.this);
}
@Override
public Job getJob() {
return JobTriggerListEditPanel.this.getJob();
}
};
}
});
fragment.add(new AjaxLink<Void>("delete") {
@Override
public void onClick(AjaxRequestTarget target) {
markFormDirty(target);
triggers.remove(rowModel.getObject());
onPropertyUpdating(target);
target.add(JobTriggerListEditPanel.this);
}
});
cellItem.add(fragment);
}
@Override
public String getCssClass() {
return "minimum actions";
}
});
IDataProvider<JobTrigger> dataProvider = new ListDataProvider<JobTrigger>() {
@Override
protected List<JobTrigger> getData() {
return triggers;
}
};
DataTable<JobTrigger, Void> dataTable;
add(dataTable = new DataTable<JobTrigger, Void>("triggers", columns, dataProvider, Integer.MAX_VALUE));
dataTable.addTopToolbar(new HeadersToolbar<Void>(dataTable, null));
dataTable.addBottomToolbar(new NoRecordsToolbar(dataTable, Model.of("Not defined")));
dataTable.add(new NoRecordsBehavior());
dataTable.add(new SortBehavior() {
@Override
protected void onSort(AjaxRequestTarget target, SortPosition from, SortPosition to) {
int fromIndex = from.getItemIndex();
int toIndex = to.getItemIndex();
if (fromIndex < toIndex) {
for (int i=0; i<toIndex-fromIndex; i++)
Collections.swap(triggers, fromIndex+i, fromIndex+i+1);
} else {
for (int i=0; i<fromIndex-toIndex; i++)
Collections.swap(triggers, fromIndex-i, fromIndex-i-1);
}
onPropertyUpdating(target);
target.add(JobTriggerListEditPanel.this);
}
}.sortable("tbody"));
}
@Override
public void onEvent(IEvent<?> event) {
super.onEvent(event);
if (event.getPayload() instanceof PropertyUpdating) {
event.stop();
onPropertyUpdating(((PropertyUpdating)event.getPayload()).getHandler());
}
}
@Override
protected List<Serializable> convertInputToValue() throws ConversionException {
List<Serializable> value = new ArrayList<>();
for (JobTrigger each: triggers)
value.add(each);
return value;
}
}
| 1.265625 | 1 |
arcoflex056/src/mame056/cpu/z80/z80H.java | javaemus/arcoflex056 | 0 | 665 | /**
* ported to v0.56
*/
package mame056.cpu.z80;
public class z80H {
public static final int Z80_PC = 1;
public static final int Z80_SP = 2;
public static final int Z80_AF = 3;
public static final int Z80_BC = 4;
public static final int Z80_DE = 5;
public static final int Z80_HL = 6;
public static final int Z80_IX = 7;
public static final int Z80_IY = 8;
public static final int Z80_AF2 = 9;
public static final int Z80_BC2 = 10;
public static final int Z80_DE2 = 11;
public static final int Z80_HL2 = 12;
public static final int Z80_R = 13;
public static final int Z80_I = 14;
public static final int Z80_IM = 15;
public static final int Z80_IFF1 = 16;
public static final int Z80_IFF2 = 17;
public static final int Z80_HALT = 18;
public static final int Z80_NMI_STATE = 19;
public static final int Z80_IRQ_STATE = 20;
public static final int Z80_DC0 = 21;
public static final int Z80_DC1 = 22;
public static final int Z80_DC2 = 23;
public static final int Z80_DC3 = 24;
public static final int Z80_TABLE_op = 0;
public static final int Z80_TABLE_cb = 1;
public static final int Z80_TABLE_ed = 2;
public static final int Z80_TABLE_xy = 3;
public static final int Z80_TABLE_xycb = 4;
public static final int Z80_TABLE_ex = 5;/* cycles counts for taken jr/jp/call and interrupt latency (rst opcodes) */
}
| 1.273438 | 1 |
app/src/main/java/org/elastos/wallet/ela/utils/MatcherUtil.java | chunshulimao/Elastos.App.UnionSquare.Android | 0 | 673 | package org.elastos.wallet.ela.utils;
import android.text.Editable;
import android.text.InputFilter;
import android.text.Spanned;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.widget.EditText;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.inject.Inject;
/**
* 正则工具类
* Created by wangdongfeng on 2018/4/24.
*/
public class MatcherUtil {
@Inject
public MatcherUtil() {
}
/*检查钱包地址的*/
public boolean isMatcherAddr(String str) {
if (TextUtils.isEmpty(str)) {
return false;
}
String pattern = "^0x[\\w]{40}$";
return Pattern.matches(pattern, str);
}
/*检查钱包私钥的*/
public boolean isMatcherPrivateKey(String str) {
String pattern = "^0x[\\w]{64}$";
return Pattern.matches(pattern, str);
}
/*检测身份证*/
public static final String REGEX_ID_CARD = "(^\\d{15}$)|(^\\d{17}([0-9]|X|x)$)";
public boolean idcardChenck(String idCard) {
return Pattern.matches(REGEX_ID_CARD, idCard);
}
public boolean idcardChenck(String idCard, String areaCode) {
if (TextUtils.isEmpty(idCard)) {
return false;
} else if ("0086".equals(areaCode)) {
return Pattern.matches(REGEX_ID_CARD, idCard);
}
return true;
}
public String replaceBlank(String src) {
String dest = "";
if (src != null) {
Pattern pattern = Pattern.compile("\t|\r|\n|\\s*");
Matcher matcher = pattern.matcher(src);
dest = matcher.replaceAll("");
}
return dest;
}
public static String time(long sd) {
Date dat = new Date(sd);
GregorianCalendar gc = new GregorianCalendar();
gc.setTime(dat);
SimpleDateFormat format = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss");
String sb = format.format(gc.getTime());
return sb;
}
public static String time(String sd) {
Date dat = new Date(Long.parseLong(sd));
GregorianCalendar gc = new GregorianCalendar();
gc.setTime(dat);
SimpleDateFormat format = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss");
String sb = format.format(gc.getTime());
return sb;
}
public static String time_nyr(String sd) {
Date dat = new Date(Long.parseLong(sd));
GregorianCalendar gc = new GregorianCalendar();
gc.setTime(dat);
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
String sb = format.format(gc.getTime());
return sb;
}
public static String timeStamp2Date(String seconds, String format) {
if (seconds == null || seconds.isEmpty() || seconds.equals("null")) {
return "";
}
if (format == null || format.isEmpty()) {
format = "yyyy-MM-dd HH:mm:ss";
}
SimpleDateFormat sdf = new SimpleDateFormat(format);
return sdf.format(new Date(Long.valueOf(seconds)));
}
/**
* 日期格式字符串转换成时间戳
*
* @param date_str 字符串日期
* @param format 如:yyyy-MM-dd HH:mm:ss
* @return
*/
public static String date2TimeStamp(String date_str, String format) {
try {
SimpleDateFormat sdf = new SimpleDateFormat(format);
return String.valueOf(sdf.parse(date_str).getTime());
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
public static String date2Date(String sd) {
String date = date2TimeStamp(sd, "yyyy-MM-dd HH:mm:ss");
return timeStamp2Date(date, "yyyy-MM-dd");
}
/**
* 判断s长度是否小于count
*
* @param s
* @return
*/
public int getWordCountRegex(String s) {
///^[A-Za-z0-9\u4e00-\u9fa5]+$/
s = s.replaceAll("[^\\x00-\\xff]", "*");
return s.length();
}
public static String sub(String data) {
return data.substring(0, 9) + "..." + data.substring(data.length() - 9, data.length());
}
public static boolean isRightName(String s) {
///^[A-Za-z0-9\u4e00-\u9fa5]+$/
//"[\u4e00-\u9fa5_a-zA-Z0-9_]{2,6}"
//String pattern = "[\u4e00-\u9fa5\\w]+";
return (Pattern.matches("[\u4e00-\u9fa5a-zA-Z0-9]{1,16}", s));
}
public static boolean isEmoji(String string) {
//过滤Emoji表情
Pattern p = Pattern.compile("[^\\u0000-\\uFFFF]");
//过滤Emoji表情和颜文字
//Pattern p = Pattern.compile("[\\ud83c\\udc00-\\ud83c\\udfff]|[\\ud83d\\udc00-\\ud83d\\udfff]|[\\u2600-\\u27ff]|[\\ud83e\\udd00-\\ud83e\\uddff]|[\\u2300-\\u23ff]|[\\u2500-\\u25ff]|[\\u2100-\\u21ff]|[\\u0000-\\u00ff]|[\\u2b00-\\u2bff]|[\\u2d06]|[\\u3030]");
Matcher m = p.matcher(string);
return m.find();
}
public static InputFilter filter() {
return new InputFilter() {
Pattern pattern = Pattern.compile("[^a-zA-Z0-9\\u4E00-\\u9FA5_,.?!:;…~_\\-\"\"/@*+'()<>{}/[/]()<>{}\\[\\]=%&$|\\/♀♂#¥£¢€\"^` ,。?!:;……~“”、“()”、(——)‘’@‘·’&*#《》¥《〈〉》〈$〉[]£[]{}{}¢【】【】%〖〗〖〗/〔〕〔〕\『』『』^「」「」|﹁﹂`.]");
@Override
public CharSequence filter(CharSequence charSequence, int i, int i1, Spanned spanned, int i2, int i3) {
Matcher matcher = pattern.matcher(charSequence);
if (!matcher.find()) {
return null;
} else {
// Toast.makeText(context, "非法字符!", Toast.LENGTH_SHORT).show();
return "";
}
}
};
}
/*source: CharSequence, //将要输入的字符串,如果是删除操作则为空字符串
start: Int, //将要输入的字符串起始下标,一般为0
end: Int, //start + source字符的长度
dest: Spanned, //输入之前文本框中的内容
dstart: Int, //将会被替换的起始位置
dend: Int //dstart+将会被替换的字符串长度
返回值 :CharSequence //方法返回的值将会替换掉dest字符串中dstartd位置到dend位置之间字符,返回source表示不做任何处理,返回空字符串""表示不输入任何字符
**/
public static InputFilter filter(int digits) {
// beizhu.setFilters(new InputFilter[]{MatcherUtil.filter(getContext())});
return new InputFilter() {
public CharSequence filter(CharSequence source, int start, int end,
Spanned dest, int dstart, int dend) {
// 删除等特殊字符,直接返回
if ("".equals(source.toString())) {
return null;
}
String dValue = dest.toString();
String[] splitArray = dValue.split("\\.");
if (splitArray.length > 1) {
String dotValue = splitArray[1];
int diff = dotValue.length() + 1 - digits;
if (diff > 0) {
return source.subSequence(start, end - diff);
}
}
return null;
}
};
}
/**
* 限制edittext字节数
* @param editText
* @param wei 字节数
*/
public static void editTextFormat(EditText editText, int wei) {
editText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
if (editText.getTag() != null && (boolean) editText.getTag()) {
editText.setTag(false);
return;
}
if (!TextUtils.isEmpty(s)) {
String original = s.toString().trim();
if (original.getBytes().length <= wei) {
return;
}
StringBuffer res = new StringBuffer();
for (int i = 0; i < original.length(); i++) {
char c = original.charAt(i);
if (String.valueOf(c).getBytes().length + res.toString().getBytes().length <= wei) {
res = res.append(c);
} else {
break;
}
}
editText.setTag(true);
editText.setText(res.toString());
editText.setSelection(res.length());
}
}
});
}
}
| 1.554688 | 2 |
src/main/java/openmods/calc/parsing/PostfixCompiler.java | GTNewHorizons/OpenModsLib | 1 | 681 | package openmods.calc.parsing;
import com.google.common.collect.PeekingIterator;
import openmods.calc.IExecutable;
import openmods.calc.parsing.IPostfixCompilerState.Result;
import openmods.utils.Stack;
public abstract class PostfixCompiler<E> implements ITokenStreamCompiler<E> {
@Override
public IExecutable<E> compile(PeekingIterator<Token> input) {
final Stack<IPostfixCompilerState<E>> stateStack = Stack.create();
stateStack.push(createInitialState());
while (input.hasNext()) {
final Token token = input.next();
if (token.type == TokenType.MODIFIER) {
stateStack.push(createStateForModifier(token.value));
} else if (token.type == TokenType.LEFT_BRACKET) {
stateStack.push(createStateForBracket(token.value));
} else {
final IPostfixCompilerState<E> currentState = stateStack.peek(0);
final Result result = currentState.acceptToken(token);
switch (result) {
case ACCEPTED_AND_FINISHED:
unwindStack(stateStack);
// fall-through
case ACCEPTED:
// NO-OP
break;
case REJECTED:
default:
throw new IllegalStateException("Token " + token + " not accepted in state " + currentState);
}
}
}
final IPostfixCompilerState<E> finalState = stateStack.popAndExpectEmptyStack();
return finalState.exit();
}
private void unwindStack(Stack<IPostfixCompilerState<E>> stateStack) {
IPostfixCompilerState<E> currentState = stateStack.pop();
UNWIND: while (true) {
final IExecutable<E> exitResult = currentState.exit();
currentState = stateStack.peek(0);
final Result acceptResult = currentState.acceptExecutable(exitResult);
switch (acceptResult) {
case ACCEPTED_AND_FINISHED:
stateStack.pop();
continue UNWIND;
case ACCEPTED:
break UNWIND;
case REJECTED:
default:
throw new IllegalStateException("Executable " + exitResult + " not accepted in state " + currentState);
}
}
}
protected abstract IPostfixCompilerState<E> createInitialState();
protected IPostfixCompilerState<E> createStateForModifier(String modifier) {
throw new UnsupportedOperationException(modifier);
}
protected IPostfixCompilerState<E> createStateForBracket(String bracket) {
throw new UnsupportedOperationException(bracket);
}
}
| 1.4375 | 1 |
src/main/java/com/github/ideahut/sbms/shared/wrapper/RequestWrapper.java | ideahut/sbms-shared | 0 | 689 | package com.github.ideahut.sbms.shared.wrapper;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ReadListener;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import org.apache.commons.io.IOUtils;
public class RequestWrapper extends HttpServletRequestWrapper {
private Map<String, List<String>> parameters;
private Map<String, List<String>> headers;
//private Map<String, Long> dateHeaders;
private byte[] bytes;
public RequestWrapper(HttpServletRequest request) {
super(request);
parameters = new HashMap<String, List<String>>();
Enumeration<String> en = request.getParameterNames();
while (en.hasMoreElements()) {
String name = en.nextElement();
String[] values = request.getParameterValues(name);
if (values != null) {
List<String> list = new ArrayList<String>();
for (String v : values) {
list.add(v);
}
parameters.put(name, list);
} else {
parameters.put(name, null);
}
}
//dateHeaders = new HashMap<String, Long>();
headers = new HashMap<String, List<String>>();
en = request.getHeaderNames();
while (en.hasMoreElements()) {
String name = en.nextElement();
Enumeration<String> values = request.getHeaders(name);
if (values != null) {
//try {
//dateHeaders.put(name, request.getDateHeader(name));
//} catch (Exception e) {
//dateHeaders.put(name, -2L);
//}
List<String> list = new ArrayList<String>();
while (values.hasMoreElements()) {
list.add(values.nextElement());
}
headers.put(name, list);
} else {
headers.put(name, null);
}
}
try {
//bytes = IOUtils.toByteArray(request.getReader(), "UTF-8");
bytes = IOUtils.toByteArray(request.getInputStream());
} catch (IOException e) {
bytes = new byte[0];
}
}
//@Override
//public long getDateHeader(String name) {
//Long value = dateHeaders.get(name);
//if (value == null) {
//return -1L;
//}
//if (value == -2L) {
//throw new IllegalArgumentException();
//}
//return value;
//}
@Override
public String getHeader(String name) {
List<String> values = headers.get(name);
return values != null && values.size() != 0 ? values.get(0) : null;
}
@Override
public Enumeration<String> getHeaders(String name) {
List<String> values = headers.get(name);
return values != null ? Collections.enumeration(values) : null;
}
@Override
public Enumeration<String> getHeaderNames() {
return Collections.enumeration(headers.keySet());
}
@Override
public int getIntHeader(String name) {
String value = getHeader(name);
if (value == null) {
return -1;
}
return Integer.parseInt(value);
}
public void setHeader(String name, String[] values) {
headers.put(name, values != null ? Arrays.asList(values) : null);
}
public void setHeader(String name, String value) {
setHeader(name, new String[] { value });
}
public void addHeader(String name, String[] values) {
List<String> list = headers.get(name);
if (list == null) {
list = new ArrayList<String>();
}
for (String v : values) {
list.add(v);
}
headers.put(name, list);
}
public void addHeader(String name, String value) {
addHeader(name, new String[] { value });
}
public void removeHeader(String name) {
headers.remove(name);
}
@Override
public String getParameter(String name) {
List<String> values = parameters.get(name);
return values != null && values.size() != 0 ? values.get(0) : null;
}
@Override
public Map<String, String[]> getParameterMap() {
Map<String, String[]> map = new HashMap<String, String[]>();
for (String key : parameters.keySet()) {
List<String> values = parameters.get(key);
map.put(key, values != null ? values.toArray(new String[0]) : null);
}
return Collections.unmodifiableMap(map);
}
@Override
public Enumeration<String> getParameterNames() {
return Collections.enumeration(parameters.keySet());
}
@Override
public String[] getParameterValues(String name) {
List<String> values = parameters.get(name);
return values != null ? values.toArray(new String[0]) : null;
}
public void setParameter(String name, String[] values) {
parameters.put(name, values != null ? Arrays.asList(values) : null);
}
public void setParameter(String name, String value) {
setParameter(name, new String[] { value });
}
public void addParameter(String name, String[] values) {
List<String> list = parameters.get(name);
if (list == null) {
list = new ArrayList<String>();
}
for (String v : values) {
list.add(v);
}
parameters.put(name, list);
}
public void addParameter(String name, String value) {
addParameter(name, new String[] { value });
}
public void removeParameter(String name) {
parameters.remove(name);
}
@Override
public ServletInputStream getInputStream() throws IOException {
ServletInputStream stream = new ServletInputStream() {
private int lastIndexRetrieved = -1;
private ReadListener readListener = null;
@Override
public int read() throws IOException {
int i;
if (!isFinished()) {
i = bytes[lastIndexRetrieved + 1];
lastIndexRetrieved++;
if (isFinished() && (readListener != null)) {
try {
readListener.onAllDataRead();
} catch (IOException ex) {
readListener.onError(ex);
throw ex;
}
}
return i;
} else {
return -1;
}
}
@Override
public boolean isFinished() {
return lastIndexRetrieved == (bytes.length - 1);
}
@Override
public boolean isReady() {
return isFinished();
}
@Override
public void setReadListener(ReadListener listener) {
this.readListener = listener;
if (!isFinished()) {
try {
readListener.onDataAvailable();
} catch (IOException e) {
readListener.onError(e);
}
} else {
try {
readListener.onAllDataRead();
} catch (IOException e) {
readListener.onError(e);
}
}
}
};
return stream;
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(new ByteArrayInputStream(bytes)));
}
public byte[] getBytes() {
return bytes;
}
}
| 1.390625 | 1 |
support/cas-server-support-redis-ticket-registry/src/main/java/org/apereo/cas/config/RedisTicketRegistryConfiguration.java | yauheni-sidarenka/apereo-cas | 1 | 697 | package org.apereo.cas.config;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.configuration.model.support.redis.RedisTicketRegistryProperties;
import org.apereo.cas.redis.core.RedisObjectFactory;
import org.apereo.cas.ticket.Ticket;
import org.apereo.cas.ticket.registry.RedisTicketRegistry;
import org.apereo.cas.ticket.registry.TicketRedisTemplate;
import org.apereo.cas.ticket.registry.TicketRegistry;
import org.apereo.cas.util.CoreTicketUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
/**
* This is {@link RedisTicketRegistryConfiguration}.
*
* @author serv
* @since 5.0.0
*/
@Configuration("redisTicketRegistryConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
public class RedisTicketRegistryConfiguration {
@Autowired
private CasConfigurationProperties casProperties;
@ConditionalOnMissingBean(name = "redisTicketConnectionFactory")
@Bean
public RedisConnectionFactory redisTicketConnectionFactory() {
final RedisTicketRegistryProperties redis = casProperties.getTicket().getRegistry().getRedis();
final RedisObjectFactory obj = new RedisObjectFactory();
return obj.newRedisConnectionFactory(redis);
}
@Bean
@ConditionalOnMissingBean(name = "ticketRedisTemplate")
public RedisTemplate<String, Ticket> ticketRedisTemplate() {
return new TicketRedisTemplate(redisTicketConnectionFactory());
}
@Bean
public TicketRegistry ticketRegistry() {
final RedisTicketRegistryProperties redis = casProperties.getTicket().getRegistry().getRedis();
final RedisTicketRegistry r = new RedisTicketRegistry(ticketRedisTemplate());
r.setCipherExecutor(CoreTicketUtils.newTicketRegistryCipherExecutor(redis.getCrypto(), "redis"));
return r;
}
}
| 1.015625 | 1 |
src/main/java/net/gegy1000/psf/server/api/RegisterItemBlock.java | PracticalSpaceFireworks/PracticalSpaceFireworks | 8 | 705 | package net.gegy1000.psf.server.api;
import javax.annotation.ParametersAreNonnullByDefault;
import net.minecraft.block.Block;
import net.minecraft.item.ItemBlock;
@ParametersAreNonnullByDefault
public interface RegisterItemBlock {
default ItemBlock createItemBlock(Block block) {
return new ItemBlock(block);
}
}
| 0.859375 | 1 |
src/main/java/com/l1/service/impl/ImageServiceImpl.java | ylsn19821104/storage | 1 | 713 | package com.l1.service.impl;
import com.l1.dao.ImageDao;
import com.l1.dao.SkuDao;
import com.l1.entity.Image;
import com.l1.service.ImageService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* Created by luopotaotao on 2016/5/7.
*/
@Service("imageServiceImpl")
public class ImageServiceImpl implements ImageService{
@Autowired
private ImageDao imageDao;
@Override
public int save(Image image) {
int ret = imageDao.save(image);
return ret;
}
@Override
public List<Image> find(Integer page, Integer rows) {
return imageDao.find();
}
@Override
public List<Image> findAll() {
return imageDao.findAll();
}
@Override
public Image findById(int id) {
return imageDao.findById(id);
}
@Override
public int update(Image image) {
return imageDao.update(image);
}
@Override
public int batchUpdate(List<Image> images) {
return imageDao.batchUpdate(images);
}
@Override
public int remove(Integer[] ids) {
return imageDao.remove(ids);
}
@Override
public int queryTotal() {
return imageDao.queryTotal();
}
@Override
public List<Image> findByIds(Integer[] ids) {
return imageDao.findByIds(ids);
}
}
| 1.109375 | 1 |
java/src/main/java/SimpleFeedClient.java | theiotes/nordnet | 36 | 721 | /*
* Copyright 2021 Nordnet Bank AB
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import com.fasterxml.jackson.databind.JsonNode;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.math.BigInteger;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.Arrays;
public class SimpleFeedClient {
private final Socket socket;
private String sessionKey;
private final BufferedReader in;
private final BufferedWriter out;
private final String SERVICE = "NEXTAPI";
SimpleFeedClient(JsonNode loginResponse) throws IOException {
sessionKey = loginResponse.get("session_key").asText();
String hostName = loginResponse.get("public_feed").get("hostname").asText();
int port = Integer.parseInt(loginResponse.get("public_feed").get("port").asText());
// Open an encrypted TCP connection
SSLSocketFactory ssf = (SSLSocketFactory) SSLSocketFactory.getDefault();
socket = ssf.createSocket(hostName, port);
// Configure connection
socket.setSoTimeout(10000000);
socket.setKeepAlive(true);
in = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
out = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8));
}
public Socket getSocket() {
return socket;
}
public void closeSocket() throws IOException {
socket.close();
}
public void login() {
String loginRequest = """
{ "cmd": "login", "args": { "session_key": "%s", "service":"%s"}}\n
""".formatted(sessionKey, SERVICE);
// Always validate your JSON
Util.assertValidateJSONString(loginRequest);
try {
out.write(loginRequest);
out.flush();
} catch(IOException e) {
System.err.println("Could not write to API");
}
}
public void logout() {
// Log out of NNAPI
try {
in.close();
out.close();
closeSocket();
} catch (IOException e) {
System.err.println("Could not close feedclient connection");
}
}
public void subscribePublicFeed(String feedSubscription) throws Exception {
Util.assertValidateJSONString(feedSubscription);
out.write(feedSubscription);
out.flush();
System.out.println(">> Response from Price Feed");
String responsePriceFeed = in.readLine();
Util.prettyPrintJSON(responsePriceFeed);
}
public void printCertificateDetails() {
// Output details about certificates and session
SSLSession session = ((SSLSocket) getSocket()).getSession();
Certificate[] certificates = null;
try {
certificates = session.getPeerCertificates();
} catch (SSLPeerUnverifiedException e) {
// empty
}
SimpleFeedClient.printCertificateDetails(certificates);
SimpleFeedClient.printSessionDetails(session);
}
public static void printSessionDetails(SSLSession session) {
System.out.println(">> NAPI's certificate");
System.out.println("Peer host: " + session.getPeerHost());
System.out.println("Cipher: " + session.getCipherSuite());
System.out.println("Protocol: " + session.getProtocol());
System.out.println("ID: " + new BigInteger(session.getId()));
System.out.println("Session created: " + session.getCreationTime());
System.out.println("Session accessed: " + session.getLastAccessedTime());
}
public static void printCertificateDetails(Certificate[] certificates) {
Arrays.stream(certificates)
.map(c -> (X509Certificate)c)
.map(X509Certificate::getSubjectDN)
.forEach(System.out::println);
}
}
| 1.351563 | 1 |
spark-client/src/main/java/org/apache/hive/spark/client/rpc/SaslHandler.java | xxzzycq/hive-2.1.1 | 30 | 729 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.spark.client.rpc;
import java.io.IOException;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract SASL handler. Abstracts the auth protocol handling and encryption, if it's enabled.
* Needs subclasses to provide access to the actual underlying SASL implementation (client or
* server).
*/
abstract class SaslHandler extends SimpleChannelInboundHandler<Rpc.SaslMessage>
implements KryoMessageCodec.EncryptionHandler {
// LOG is not static to make debugging easier (being able to identify which sub-class
// generated the log message).
private final Logger LOG;
private final boolean requiresEncryption;
private KryoMessageCodec kryo;
private boolean hasAuthResponse = false;
protected SaslHandler(RpcConfiguration config) {
this.requiresEncryption = Rpc.SASL_AUTH_CONF.equals(config.getSaslOptions().get(Sasl.QOP));
this.LOG = LoggerFactory.getLogger(getClass());
}
// Use a separate method to make it easier to create a SaslHandler without having to
// plumb the KryoMessageCodec instance through the constructors.
void setKryoMessageCodec(KryoMessageCodec kryo) {
this.kryo = kryo;
}
@Override
protected final void channelRead0(ChannelHandlerContext ctx, Rpc.SaslMessage msg)
throws Exception {
LOG.debug("Handling SASL challenge message...");
Rpc.SaslMessage response = update(msg);
if (response != null) {
LOG.debug("Sending SASL challenge response...");
hasAuthResponse = true;
ctx.channel().writeAndFlush(response).sync();
}
if (!isComplete()) {
return;
}
// If negotiation is complete, remove this handler from the pipeline, and register it with
// the Kryo instance to handle encryption if needed.
ctx.channel().pipeline().remove(this);
String qop = getNegotiatedProperty(Sasl.QOP);
LOG.debug("SASL negotiation finished with QOP {}.", qop);
if (Rpc.SASL_AUTH_CONF.equals(qop)) {
LOG.info("SASL confidentiality enabled.");
kryo.setEncryptionHandler(this);
} else {
if (requiresEncryption) {
throw new SaslException("Encryption required, but SASL negotiation did not set it up.");
}
dispose();
}
onComplete();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
dispose();
super.channelInactive(ctx);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
if (!isComplete()) {
LOG.info("Exception in SASL negotiation.", cause);
onError(cause);
ctx.close();
}
ctx.fireExceptionCaught(cause);
}
protected abstract boolean isComplete();
protected abstract String getNegotiatedProperty(String name);
protected abstract Rpc.SaslMessage update(Rpc.SaslMessage challenge) throws IOException;
protected abstract void onComplete() throws Exception;
protected abstract void onError(Throwable t);
}
| 1.34375 | 1 |
DAM2_DI_GestionClientes/src/gestionclientes/logica/LogicaNegocio.java | alexiscv/DAM2_DI | 0 | 737 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package gestionclientes.logica;
import gestionclientes.dto.Cliente;
import java.util.ArrayList;
/**
*
* @author angel
*/
public class LogicaNegocio {
private static ArrayList<Cliente> listaClientes = new ArrayList<>();
public static void addCliente( Cliente cliente ){
listaClientes.add(cliente);
}
public static ArrayList<Cliente> getListaClientes() {
return listaClientes;
}
}
| 0.785156 | 1 |
flow-data/src/main/java/com/vaadin/flow/data/provider/ListDataView.java | mlopezFC/flow | 402 | 745 | /*
* Copyright 2000-2021 Vaad<NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.flow.data.provider;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import com.vaadin.flow.component.ComponentEventListener;
import com.vaadin.flow.function.SerializableComparator;
import com.vaadin.flow.function.SerializablePredicate;
import com.vaadin.flow.function.ValueProvider;
/**
* DataView for a in-memory list data that provides information on the data and
* allows operations on it. Mutation operations can be used only if the backing
* {@link List} is mutable.
*
* @param <T>
* data type
* @param <V>
* ListDataView type
* @since
*/
public interface ListDataView<T, V extends ListDataView<T, ?>>
extends DataView<T> {
/**
* Check if item is present in the currently filtered data set.
* <p>
* By default, {@code equals} method implementation of the item is used for
* identity check. If a custom data provider is used, then the
* {@link DataProvider#getId(Object)} method is used instead. Item's custom
* identity can be set up with a
* {@link DataView#setIdentifierProvider(IdentifierProvider)}.
*
* @param item
* item to search for
* @return {@code true} if item is found in filtered data set
*
* @see #setIdentifierProvider(IdentifierProvider)
*/
boolean contains(T item);
/**
* Get the full item count with filters if any set. As the item count might
* change at any point, it is recommended to add a listener with the
* {@link #addItemCountChangeListener(ComponentEventListener)} method
* instead to get notified when the item count has changed.
*
* @return filtered item count
* @see #addItemCountChangeListener(ComponentEventListener)
*/
int getItemCount();
/**
* Gets the item after given item from the filtered and sorted data.
* <p>
* Note! Item might be present in the data set, but be filtered out or be
* the last item so that the next item won't be available.
*
* @param item
* item to get next for
* @return next item if available, else empty optional if item doesn't exist
* or not in current filtered items
*
* @see #getPreviousItem(Object)
*/
Optional<T> getNextItem(T item);
/**
* Gets the item before given item from the filtered and sorted data.
* <p>
* Note! Item might be present in the data set, but be filtered out or be
* the first item so that the previous item won't be available.
*
* @param item
* item to get previous for
* @return previous item if available, else empty optional if item doesn't
* exist or not in current filtered items
*
* @see #getNextItem(Object)
*/
Optional<T> getPreviousItem(T item);
/**
* Adds an item to the data list if it is not already present.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
*
* @param item
* item to add
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @see #addItemBefore(Object, Object)
* @see #addItemAfter(Object, Object)
* @see #removeItem(Object)
*/
V addItem(T item);
/**
* Adds an item after the given target item.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
* <p>
* If the item is already present in the data provider, then it is moved.
* <p>
* Note! Item is added to the unfiltered and unsorted List.
*
* @param item
* item to add
* @param after
* item after which to add the item at
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @throws IllegalArgumentException
* if item doesn't exist or collection is not a list
* @see #addItem(Object)
* @see #addItemBefore(Object, Object)
*/
V addItemAfter(T item, T after);
/**
* Adds an item before the given target item.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
* <p>
* If the item is already present in the data provider, then it is moved.
* <p>
* Note! Item is added to the unfiltered and unsorted List.
*
* @param item
* item to add
* @param before
* item before which to add the item at
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @throws IllegalArgumentException
* if item doesn't exist or collection is not a list
* @see #addItem(Object)
* @see #addItemAfter(Object, Object)
*/
V addItemBefore(T item, T before);
/**
* Adds multiple items to the data list.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
* <p>
* Any items that already present in the data provider are moved to the end.
*
* @param items
* collection of item to add
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @see #removeItems(Collection)
* @see #addItemsBefore(Collection, Object)
* @see #addItemsAfter(Collection, Object)
*/
V addItems(Collection<T> items);
/**
* Adds multiple items after the given target item. The full collection is
* added in order after the target.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception. Any items that already present in
* the data provider are moved.
* <p>
* Note! Item is added to the unfiltered and unsorted List.
*
* @param items
* collection of items to add
* @param after
* item after which to add the item at
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @throws IllegalArgumentException
* if item doesn't exist or collection is not a list
* @see #addItems(Collection)
* @see #addItemsBefore(Collection, Object)
*/
V addItemsAfter(Collection<T> items, T after);
/**
* Adds multiple items before the given target item. The full collection is
* added in order before the target.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
* <p>
* Any items that already present in the data provider are moved.
* <p>
* Note! Item is added to the unfiltered and unsorted List.
*
* @param items
* collection of items to add
* @param before
* item before which to add the item at
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @throws IllegalArgumentException
* if item doesn't exist or collection is not a list
* @see #addItems(Collection)
* @see #addItemsAfter(Collection, Object)
*/
V addItemsBefore(Collection<T> items, T before);
/**
* Remove an item from the data list.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
*
* @param item
* item to remove
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @see #addItem(Object)
* @see #removeItems(Collection)
*/
V removeItem(T item);
/**
* Remove multiple items from the data list.
* <p>
* The backing {@link List} must be mutable to use this method. Immutable
* data structure will throw an exception.
*
* @param items
* collection of items to remove
* @return this ListDataView instance
* @throws UnsupportedOperationException
* if backing collection doesn't support modification
* @see #removeItem(Object)
* @see #removeItems(Collection)
*/
V removeItems(Collection<T> items);
/**
* Sets a filter to be applied to the data. The filter replaces any filter
* that has been set or added previously. {@code null} will clear all
* filters.
* <p>
* This filter is bound to the component. Thus, any other component using
* the same {@link DataProvider} object would not be affected by setting a
* filter through data view of another component. A filter set by this
* method won't be retained when a new {@link DataProvider} is set to the
* component.
*
* @param filter
* filter to be set, or <code>null</code> to clear any previously
* set filters
* @return ListDataView instance
*
* @see #addFilter(SerializablePredicate)
* @see #removeFilters()
*/
V setFilter(SerializablePredicate<T> filter);
/**
* Adds a filter to be applied to all queries. The filter will be used in
* addition to any filter that has been set or added previously.
* <p>
* This filter is bound to the component. Thus, any other component using
* the same {@link DataProvider} object would not be affected by setting a
* filter through data view of another component. A filter set by this
* method won't be retained when a new {@link DataProvider} is set to the
* component.
*
* @param filter
* the filter to add, not <code>null</code>
* @return ListDataView instance
*
* @see #setFilter(SerializablePredicate)
* @see #removeFilters()
*/
V addFilter(SerializablePredicate<T> filter);
/**
* Removes all in-memory filters set or added.
*
* @return ListDataView instance
*
* @see #addFilter(SerializablePredicate)
* @see #setFilter(SerializablePredicate)
*/
V removeFilters();
/**
* Sets the comparator to use as the default sorting. This overrides the
* sorting set by any other method that manipulates the default sorting.
* <p>
* This comparator is bound to the component. Thus, any other component
* using the same {@link DataProvider} object would not be affected by
* setting a sort comparator through data view of another component. A
* sorting set by this method won't be retained when a new
* {@link DataProvider} is set to the component.
*
* @param sortComparator
* a comparator to use, or <code>null</code> to clear any
* previously set sort order
* @return ListDataView instance
*
* @see #addSortComparator(SerializableComparator)
*/
V setSortComparator(SerializableComparator<T> sortComparator);
/**
* Adds a comparator to the data default sorting. If no default sorting has
* been defined, then the provided comparator will be used as the default
* sorting. If a default sorting has been defined, then the provided
* comparator will be used to determine the ordering of items that are
* considered equal by the previously defined default sorting.
* <p>
* This comparator is bound to the component. Thus, any other component
* using the same {@link DataProvider} object would not be affected by
* setting a sort comparator through data view of another component. A
* sorting set by this method won't be retained when a new
* {@link DataProvider} is set to the component.
*
* @param sortComparator
* a comparator to add, not <code>null</code>
* @return ListDataView instance
*
* @see #setSortComparator(SerializableComparator)
*/
V addSortComparator(SerializableComparator<T> sortComparator);
/**
* Removes any default sorting that has been set or added previously.
* <p>
* Any other component using the same {@link DataProvider} object would not
* be affected by removing default sorting through data view of another
* component.
*
* @return ListDataView instance
*
* @see #setSortComparator(SerializableComparator)
* @see #addSortComparator(SerializableComparator)
*/
V removeSorting();
/**
* Sets the property and direction to use as the default sorting. This
* overrides the sorting set by any other method that manipulates the
* default sorting of this {@link DataProvider}.
* <p>
* This sort order is bound to the component. Thus, any other component
* using the same {@link DataProvider} object would not be affected by
* setting a sort order through data view of another component. A sort order
* set by this method won't be retained when a new {@link DataProvider} is
* set to the component.
*
* @param valueProvider
* the value provider that defines the property do sort by, not
* <code>null</code>
* @param sortDirection
* the sort direction to use, not <code>null</code>
* @param <V1>
* the provided value type
*
* @return ListDataView instance
*
* @see #addSortOrder(ValueProvider, SortDirection)
*/
<V1 extends Comparable<? super V1>> V setSortOrder(
ValueProvider<T, V1> valueProvider, SortDirection sortDirection);
/**
* Adds a property and direction to the default sorting. If no default
* sorting has been defined, then the provided sort order will be used as
* the default sorting. If a default sorting has been defined, then the
* provided sort order will be used to determine the ordering of items that
* are considered equal by the previously defined default sorting.
* <p>
* This sort order is bound to the component. Thus, any other component
* using the same {@link DataProvider} object would not be affected by
* setting a sort sort through data view of another component. A sorting set
* by this method won't be retained when a new {@link DataProvider} is set
* to the component.
*
* @param valueProvider
* the value provider that defines the property do sort by, not
* <code>null</code>
* @param sortDirection
* the sort direction to use, not <code>null</code>
* @param <V1>
* the provided value type
*
* @return ListDataView instance
*
* @see #setSortOrder(ValueProvider, SortDirection)
*/
<V1 extends Comparable<? super V1>> V addSortOrder(
ValueProvider<T, V1> valueProvider, SortDirection sortDirection);
}
| 1.71875 | 2 |
hazelcast/src/test/java/com/hazelcast/client/protocol/ClientMessageTest.java | lbakman/hazelcast | 0 | 753 | package com.hazelcast.client.protocol;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.util.BitUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import static java.nio.ByteOrder.LITTLE_ENDIAN;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
/**
* ClientMessage Tests of Flyweight functionality
*/
public class ClientMessageTest {
private static final String DEFAULT_ENCODING = "UTF8";
private static final String VAR_DATA_STR_1 = "abcdef";
private static final byte[] BYTE_DATA = VAR_DATA_STR_1.getBytes();
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
public void shouldEncodeClientMessageCorrectly() {
ByteBuffer byteBuffer = ByteBuffer.allocate(512);
TestClientMessage cmEncode = new TestClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.setMessageType(0x1122).setVersion((short) 0xEF).setFlags(ClientMessage.BEGIN_AND_END_FLAGS).setCorrelationId(0x12345678)
.setPartitionId(0x11223344);
// little endian
//FRAME LENGTH
assertThat(byteBuffer.get(0), is((byte) ClientMessage.HEADER_SIZE));
assertThat(byteBuffer.get(1), is((byte) 0));
assertThat(byteBuffer.get(2), is((byte) 0));
assertThat(byteBuffer.get(3), is((byte) 0));
//VERSION
assertThat(byteBuffer.get(4), is((byte) 0xEF));
//FLAGS
assertThat(byteBuffer.get(5), is((byte) 0xC0));
//TYPE
assertThat(byteBuffer.get(6), is((byte) 0x22));
assertThat(byteBuffer.get(7), is((byte) 0x11));
//setCorrelationId
assertThat(byteBuffer.get(8), is((byte) 0x78));
assertThat(byteBuffer.get(9), is((byte) 0x56));
assertThat(byteBuffer.get(10), is((byte) 0x34));
assertThat(byteBuffer.get(11), is((byte) 0x12));
//setPartitionId
assertThat(byteBuffer.get(12), is((byte) 0x44));
assertThat(byteBuffer.get(13), is((byte) 0x33));
assertThat(byteBuffer.get(14), is((byte) 0x22));
assertThat(byteBuffer.get(15), is((byte) 0x11));
//data offset
assertThat(byteBuffer.get(16), is((byte) ClientMessage.HEADER_SIZE));
assertThat(byteBuffer.get(17), is((byte) 0x00));
}
@Test
public void shouldEncodeAndDecodeClientMessageCorrectly() {
ByteBuffer byteBuffer = ByteBuffer.allocate(512);
TestClientMessage cmEncode = new TestClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.setMessageType(7)
.setVersion((short) 3)
.setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(66).setPartitionId(77);
ClientMessage cmDecode = ClientMessage.createForDecode(byteBuffer, 0);
assertEquals(7, cmDecode.getMessageType());
assertEquals(3, cmDecode.getVersion());
assertEquals(ClientMessage.BEGIN_AND_END_FLAGS, cmDecode.getFlags());
assertEquals(66, cmDecode.getCorrelationId());
assertEquals(77, cmDecode.getPartitionId());
assertEquals(ClientMessage.HEADER_SIZE, cmDecode.getFrameLength());
}
@Test
public void shouldEncodeAndDecodeClientMessageCorrectly_withPayLoadData()
throws UnsupportedEncodingException {
ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
TestClientMessage cmEncode = new TestClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.setMessageType(7)
.setVersion((short) 3)
.setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(66).setPartitionId(77);
final byte[] data1 = VAR_DATA_STR_1.getBytes(DEFAULT_ENCODING);
final int calculatedFrameSize = ClientMessage.HEADER_SIZE + data1.length;
cmEncode.putPayloadData(data1);
ClientMessage cmDecode = ClientMessage.createForDecode(byteBuffer, 0);
final byte[] cmDecodeVarData1 = new byte[data1.length];
cmDecode.getPayloadData(cmDecodeVarData1);
assertEquals(calculatedFrameSize, cmDecode.getFrameLength());
assertArrayEquals(cmDecodeVarData1, data1);
}
@Test
public void shouldEncodeWithNewVersionAndDecodeWithOldVersionCorrectly_withPayLoadData()
throws UnsupportedEncodingException {
ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
FutureClientMessage cmEncode = new FutureClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.theNewField(999)
.setMessageType(7).setVersion((short) 3)
.setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(66).setPartitionId(77);
final int calculatedFrameSize = FutureClientMessage.THE_NEW_HEADER_SIZE + BYTE_DATA.length;
cmEncode.putPayloadData(BYTE_DATA);
ClientMessage cmDecode = ClientMessage.createForDecode(byteBuffer, 0);
final byte[] cmDecodeVarData1 = new byte[BYTE_DATA.length];
cmDecode.getPayloadData(cmDecodeVarData1);
assertEquals(7, cmDecode.getMessageType());
assertEquals(3, cmDecode.getVersion());
assertEquals(ClientMessage.BEGIN_AND_END_FLAGS, cmDecode.getFlags());
assertEquals(66, cmDecode.getCorrelationId());
assertEquals(77, cmDecode.getPartitionId());
assertEquals(calculatedFrameSize, cmDecode.getFrameLength());
assertArrayEquals(cmDecodeVarData1, BYTE_DATA);
}
@Test
public void shouldEncodeWithOldVersionAndDecodeWithNewVersionCorrectly_withPayLoadData()
throws UnsupportedEncodingException {
ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
TestClientMessage cmEncode = new TestClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.setMessageType(7).setVersion((short) 3)
.setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(66).setPartitionId(77);
final int calculatedFrameSize = ClientMessage.HEADER_SIZE + BYTE_DATA.length;
cmEncode.putPayloadData(BYTE_DATA);
FutureClientMessage cmDecode = new FutureClientMessage();
cmDecode.wrapForDecode(byteBuffer, 0);
final byte[] cmDecodeVarData1 = new byte[BYTE_DATA.length];
cmDecode.getPayloadData(cmDecodeVarData1);
assertEquals(7, cmDecode.getMessageType());
assertEquals(3, cmDecode.getVersion());
assertEquals(ClientMessage.BEGIN_AND_END_FLAGS, cmDecode.getFlags());
assertEquals(66, cmDecode.getCorrelationId());
assertEquals(77, cmDecode.getPartitionId());
assertEquals(calculatedFrameSize, cmDecode.getFrameLength());
assertArrayEquals(cmDecodeVarData1, BYTE_DATA);
}
@Test
public void shouldEncodeAndDecodeClientMessageCorrectly_withPayLoadData_multipleMessages()
throws UnsupportedEncodingException {
ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
TestClientMessage cmEncode = new TestClientMessage();
cmEncode.wrapForEncode(byteBuffer, 0);
cmEncode.setMessageType(7).setVersion((short) 3).setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(1).setPartitionId(77);
cmEncode.putPayloadData(BYTE_DATA);
final int calculatedFrame1Size = ClientMessage.HEADER_SIZE + BYTE_DATA.length;
final int nexMessageOffset = cmEncode.getFrameLength();
TestClientMessage cmEncode2 = new TestClientMessage();
cmEncode2.wrapForEncode(byteBuffer, nexMessageOffset);
cmEncode2.setMessageType(7).setVersion((short) 3).setFlags(ClientMessage.BEGIN_AND_END_FLAGS)
.setCorrelationId(2).setPartitionId(77);
cmEncode2.putPayloadData(BYTE_DATA);
final int calculatedFrame2Size = ClientMessage.HEADER_SIZE + BYTE_DATA.length;
ClientMessage cmDecode1 = ClientMessage.createForDecode(byteBuffer, 0);
final byte[] cmDecodeVarData = new byte[BYTE_DATA.length];
cmDecode1.getPayloadData(cmDecodeVarData);
assertEquals(7, cmDecode1.getMessageType());
assertEquals(3, cmDecode1.getVersion());
assertEquals(ClientMessage.BEGIN_AND_END_FLAGS, cmDecode1.getFlags());
assertEquals(1, cmDecode1.getCorrelationId());
assertEquals(77, cmDecode1.getPartitionId());
assertEquals(calculatedFrame1Size, cmDecode1.getFrameLength());
assertArrayEquals(cmDecodeVarData, BYTE_DATA);
ClientMessage cmDecode2 = ClientMessage.createForDecode(byteBuffer, cmDecode1.getFrameLength());
cmDecode2.getPayloadData(cmDecodeVarData);
assertEquals(7, cmDecode2.getMessageType());
assertEquals(3, cmDecode2.getVersion());
assertEquals(ClientMessage.BEGIN_AND_END_FLAGS, cmDecode2.getFlags());
assertEquals(2, cmDecode2.getCorrelationId());
assertEquals(77, cmDecode2.getPartitionId());
assertEquals(calculatedFrame2Size, cmDecode2.getFrameLength());
assertArrayEquals(cmDecodeVarData, BYTE_DATA);
}
private static class FutureClientMessage extends TestClientMessage {
private static final int THE_NEW_FIELD_OFFSET = HEADER_SIZE + BitUtil.SIZE_OF_SHORT;
private static final int THE_NEW_HEADER_SIZE = HEADER_SIZE + BitUtil.SIZE_OF_INT;
@Override
public void wrapForEncode(final ByteBuffer buffer, final int offset) {
super.wrap(buffer, offset);
setDataOffset(THE_NEW_HEADER_SIZE);
setFrameLength(THE_NEW_HEADER_SIZE);
index(getDataOffset());
}
public int theNewField(){
return (int) uint32Get(offset() + THE_NEW_FIELD_OFFSET, LITTLE_ENDIAN);
}
public FutureClientMessage theNewField(int value){
uint32Put(offset() + THE_NEW_FIELD_OFFSET, value, LITTLE_ENDIAN);
return this;
}
}
private static class TestClientMessage extends ClientMessage {
@Override
public ClientMessage setMessageType(int type) {
return super.setMessageType(type);
}
}
}
| 1.359375 | 1 |
cloudalibaba-consumer-nacos-order83/src/main/java/com/atguigu/springcloud/alibaba/controller/OrderNacosController.java | ClqStart/SpringCloud2020 | 0 | 761 | package com.atguigu.springcloud.alibaba.controller;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
import javax.annotation.Resource;
@RestController
@Slf4j
public class OrderNacosController
{
@Resource
private RestTemplate restTemplate;
@Value("${service-url.nacos-user-service}")
private String serverURL;
@GetMapping(value = "/consumer/payment/nacos/{id}")
public String paymentInfo(@PathVariable("id") Long id)
{
return restTemplate.getForObject(serverURL+"/payment/nacos/"+id,String.class);
}
}
| 0.914063 | 1 |
zipkin-storage/elasticsearch-http/src/test/java/zipkin/storage/elasticsearch/http/PseudoAddressRecordSetTest.java | dantesun/zipkin | 0 | 769 | /**
* Copyright 2015-2016 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.storage.elasticsearch.http;
import com.google.common.net.InetAddresses;
import java.net.UnknownHostException;
import okhttp3.Dns;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
public class PseudoAddressRecordSetTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
Dns underlying = hostname -> {
throw new UnsupportedOperationException();
};
@Test
public void mixedPortsNotSupported() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(
"Only one port supported with multiple hosts [http://1.1.1.1:9200, http://2.2.2.2:9201]");
PseudoAddressRecordSet.create(asList("http://1.1.1.1:9200", "http://2.2.2.2:9201"), underlying);
}
@Test
public void httpsNotSupported() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(
"Only http supported with multiple hosts [https://1.1.1.1:9200, https://2.2.2.2:9200]");
PseudoAddressRecordSet.create(asList("https://1.1.1.1:9200", "https://2.2.2.2:9200"),
underlying);
}
@Test
public void concatenatesIPv4List() throws UnknownHostException {
Dns result = PseudoAddressRecordSet.create(asList("http://1.1.1.1:9200", "http://2.2.2.2:9200"),
underlying);
assertThat(result).isInstanceOf(PseudoAddressRecordSet.StaticDns.class);
assertThat(result.lookup("foo"))
.containsExactly(InetAddresses.forString("1.1.1.1"), InetAddresses.forString("2.2.2.2"));
}
@Test
public void onlyLooksUpHostnames() throws UnknownHostException {
underlying = hostname -> {
assertThat(hostname).isEqualTo("myhost");
return asList(InetAddresses.forString("2.2.2.2"));
};
Dns result = PseudoAddressRecordSet.create(asList("http://1.1.1.1:9200", "http://myhost:9200"),
underlying);
assertThat(result.lookup("foo"))
.containsExactly(InetAddresses.forString("1.1.1.1"), InetAddresses.forString("2.2.2.2"));
}
@Test
public void concatenatesMixedIpLengths() throws UnknownHostException {
Dns result =
PseudoAddressRecordSet.create(asList("http://1.1.1.1:9200", "http://[2001:db8::c001]:9200"),
underlying);
assertThat(result).isInstanceOf(PseudoAddressRecordSet.StaticDns.class);
assertThat(result.lookup("foo"))
.containsExactly(InetAddresses.forString("1.1.1.1"),
InetAddresses.forString("2001:db8::c001"));
}
}
| 1.289063 | 1 |
xchange-core/src/main/java/org/knowm/xchange/dto/meta/CurrencyMetaData.java | mathijs81/XChange | 1 | 777 | package org.knowm.xchange.dto.meta;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import java.math.BigDecimal;
public class CurrencyMetaData implements Serializable {
private static final long serialVersionUID = -247899067657358542L;
@JsonProperty("scale")
private final Integer scale;
/** Withdrawal fee */
@JsonProperty("withdrawal_fee")
private final BigDecimal withdrawalFee;
/** Minimum withdrawal amount */
@JsonProperty("min_withdrawal_amount")
private final BigDecimal minWithdrawalAmount;
/**
* Constructor
*
* @param scale
*/
public CurrencyMetaData(Integer scale, BigDecimal withdrawalFee) {
this(scale, withdrawalFee, null);
}
/**
* Constructor
*
* @param scale
*/
public CurrencyMetaData(
@JsonProperty("scale") Integer scale,
@JsonProperty("withdrawal_fee") BigDecimal withdrawalFee,
@JsonProperty("min_withdrawal_amount") BigDecimal minWithdrawalAmount) {
this.scale = scale;
this.withdrawalFee = withdrawalFee;
this.minWithdrawalAmount = minWithdrawalAmount;
}
public Integer getScale() {
return scale;
}
public BigDecimal getWithdrawalFee() {
return withdrawalFee;
}
public BigDecimal getMinWithdrawalAmount() {
return minWithdrawalAmount;
}
@Override
public String toString() {
return "CurrencyMetaData ["
+ "scale="
+ scale
+ ", withdrawalFee="
+ withdrawalFee
+ ", minWithdrawalAmount="
+ minWithdrawalAmount
+ "]";
}
}
| 1.101563 | 1 |
src/test/java/edu/montana/gsoc/msusel/quamoco/distiller/DistillerDataTest.java | MSUSEL/msusel-quamoco | 1 | 785 | /**
* The MIT License (MIT)
*
* MSUSEL Quamoco Implementation
* Copyright (c) 2015-2017 Montana State University, Gianforte School of Computing,
* Software Engineering Laboratory
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package edu.montana.gsoc.msusel.quamoco.distiller;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Lists;
import edu.montana.gsoc.msusel.quamoco.distiller.DistillerData;
import edu.montana.gsoc.msusel.quamoco.model.qm.QualityModel;
/**
* The class <code>DistillerDataTest</code> contains tests for the class
* <code>{@link DistillerData}</code>.
*
* @generatedBy CodePro at 5/30/15 3:41 PM
* @author isaac
* @version $Revision: 1.0 $
*/
public class DistillerDataTest {
/**
* Run the DistillerData(List<QualityModel>) constructor test.
*
* @throws Exception
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
@Test
public void testDistillerData_1() throws Exception
{
final List<QualityModel> models = Lists.newArrayList();
models.add(new QualityModel("java", "", null, null, "java"));
final DistillerData result = new DistillerData(models);
Assert.assertNotNull(result);
}
/**
* Run the Map<String, QualityModel> getModelMap() method test.
*
* @throws Exception
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
@Test
public void testGetModelMap_1() throws Exception
{
final List<QualityModel> models = Lists.newArrayList();
models.add(new QualityModel("java", "", null, null, "java"));
final DistillerData fixture = new DistillerData(models);
final Map<String, QualityModel> result = fixture.getModelMap();
Assert.assertTrue(result.containsKey("java"));
Assert.assertNotNull(result.get("java"));
}
/**
* Run the List<QualityModel> getModels() method test.
*
* @throws Exception
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
@Test
public void testGetModels_1() throws Exception
{
final List<QualityModel> models = Lists.newArrayList();
models.add(new QualityModel("java", "", null, null, "java"));
final DistillerData fixture = new DistillerData(models);
final List<QualityModel> result = fixture.getModels();
Assert.assertEquals(models, result);
}
/**
* Perform pre-test initialization.
*
* @throws Exception
* if the initialization fails for some reason
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
@Before
public void setUp() throws Exception
{
// TODO: add additional set up code here
}
/**
* Perform post-test clean-up.
*
* @throws Exception
* if the clean-up fails for some reason
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
@After
public void tearDown() throws Exception
{
// TODO: add additional tear down code here
}
/**
* Launch the test.
*
* @param args
* the command line arguments
* @generatedBy CodePro at 5/30/15 3:41 PM
*/
public static void main(final String[] args)
{
new org.junit.runner.JUnitCore().run(DistillerDataTest.class);
}
}
| 1.226563 | 1 |
tomcat/src/main/java/com/bolly/tomcat/config/HttpServletRequestParseBodyWrapper.java | chibaolai/java_wheels | 0 | 793 | package com.bolly.tomcat.config;
import com.bolly.support.utils.JacksonUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ReadListener;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
/**
* servletRequest 包装类
*/
public class HttpServletRequestParseBodyWrapper extends HttpServletRequestWrapper implements Cloneable{
private static final Logger LOGGER = LoggerFactory.getLogger(HttpServletRequestParseBodyWrapper.class);
private final byte[] body; // 报文
public HttpServletRequestParseBodyWrapper(HttpServletRequest request) throws IOException {
super(request);
body = getBodyString(request).getBytes(Charset.forName("UTF-8"));
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(getInputStream(),Charset.forName("UTF-8")));
}
@Override
public ServletInputStream getInputStream() throws IOException {
final ByteArrayInputStream bais = new ByteArrayInputStream(body);
return new ServletInputStream() {
@Override
public boolean isFinished() {
return false;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void setReadListener(ReadListener listener) {
}
@Override
public int read() throws IOException {
return bais.read();
}
};
}
private String getBodyString(ServletRequest request) {
StringBuilder sb = new StringBuilder();
InputStream inputStream = null;
BufferedReader reader = null;
try {
inputStream = request.getInputStream();
reader = new BufferedReader(new InputStreamReader(inputStream, Charset.forName("UTF-8")));
String line;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return sb.toString();
}
public String getBody() {
return new String(body);
}
public String getHeader(HttpServletRequest request) {
Map<String, String> map = new HashMap();
Enumeration headerNames = request.getHeaderNames();
while (headerNames.hasMoreElements()) {
String key = (String) headerNames.nextElement();
String value = request.getHeader(key);
map.put(key, value);
}
return JacksonUtils.marshal(map);
}
}
| 1.421875 | 1 |