file_id
int64
1
215k
content
stringlengths
7
454k
repo
stringlengths
6
113
path
stringlengths
6
251
884
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common; import org.elasticsearch.common.time.DateFormatter; import java.time.Instant; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; public class Table { private List<Cell> headers = new ArrayList<>(); private List<List<Cell>> rows = new ArrayList<>(); private Map<String, List<Cell>> map = new HashMap<>(); private Map<String, Cell> headerMap = new HashMap<>(); private List<Cell> currentCells; private boolean inHeaders = false; private boolean withTime = false; public static final String EPOCH = "epoch"; public static final String TIMESTAMP = "timestamp"; public Table startHeaders() { inHeaders = true; currentCells = new ArrayList<>(); return this; } public Table startHeadersWithTimestamp() { startHeaders(); this.withTime = true; addCell("epoch", "alias:t,time;desc:seconds since 1970-01-01 00:00:00"); addCell("timestamp", "alias:ts,hms,hhmmss;desc:time in HH:MM:SS"); return this; } public Table endHeaders() { if (currentCells == null || currentCells.isEmpty()) { throw new IllegalStateException("no headers added..."); } inHeaders = false; headers = currentCells; currentCells = null; /* Create associative structure for columns that * contain the same cells as the rows: * * header1 => [Cell, Cell, ...] * header2 => [Cell, Cell, ...] * header3 => [Cell, Cell, ...] * * Also populate map to look up headers by name. * */ for (Cell header : headers) { map.put(header.value.toString(), new ArrayList<Cell>()); headerMap.put(header.value.toString(), header); } return this; } private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); public Table startRow() { if (headers.isEmpty()) { throw new IllegalStateException("no headers added..."); } currentCells = new ArrayList<>(headers.size()); if (withTime) { long time = System.currentTimeMillis(); addCell(TimeUnit.SECONDS.convert(time, TimeUnit.MILLISECONDS)); addCell(FORMATTER.format(Instant.ofEpochMilli(time))); } return this; } public Table endRow(boolean check) { if (currentCells == null) { throw new IllegalStateException("no row started..."); } if (check && (currentCells.size() != headers.size())) { StringBuilder s = new StringBuilder(); s.append("mismatch on number of cells "); s.append(currentCells.size()); s.append(" in a row compared to header "); s.append(headers.size()); throw new IllegalStateException(s.toString()); } rows.add(currentCells); currentCells = null; return this; } public Table endRow() { endRow(true); return this; } public Table addCell(Object value) { return addCell(value, ""); } public Table addCell(Object value, String attributes) { if (currentCells == null) { throw new IllegalStateException("no block started..."); } if (inHeaders == false) { if (currentCells.size() == headers.size()) { throw new IllegalStateException("can't add more cells to a row than the header"); } } Map<String, String> mAttr; if (attributes.length() == 0) { if (inHeaders) { mAttr = emptyMap(); } else { // get the attributes of the header cell we are going to add to mAttr = headers.get(currentCells.size()).attr; } } else { mAttr = new HashMap<>(); if (inHeaders == false) { // get the attributes of the header cell we are going to add mAttr.putAll(headers.get(currentCells.size()).attr); } String[] sAttrs = attributes.split(";"); for (String sAttr : sAttrs) { if (sAttr.length() == 0) { continue; } int idx = sAttr.indexOf(':'); mAttr.put(sAttr.substring(0, idx), sAttr.substring(idx + 1)); } } Cell cell = new Cell(value, mAttr); int cellIndex = currentCells.size(); currentCells.add(cell); // If we're in a value row, also populate the named column. if (inHeaders == false) { String hdr = (String) headers.get(cellIndex).value; map.get(hdr).add(cell); } return this; } public List<Cell> getHeaders() { return this.headers; } public List<List<Cell>> getRows() { return rows; } public Map<String, List<Cell>> getAsMap() { return this.map; } public Map<String, Cell> getHeaderMap() { return this.headerMap; } public Cell findHeaderByName(String header) { for (Cell cell : headers) { if (cell.value.toString().equals(header)) { return cell; } } return null; } public Map<String, String> getAliasMap() { Map<String, String> headerAliasMap = new HashMap<>(); for (int i = 0; i < headers.size(); i++) { Cell headerCell = headers.get(i); String headerName = headerCell.value.toString(); if (headerCell.attr.containsKey("alias")) { String[] aliases = Strings.splitStringByCommaToArray(headerCell.attr.get("alias")); for (String alias : aliases) { headerAliasMap.put(alias, headerName); } } headerAliasMap.put(headerName, headerName); } return headerAliasMap; } public static class Cell { public final Object value; public final Map<String, String> attr; public Cell(Object value, Cell other) { this.value = value; this.attr = other.attr; } public Cell(Object value) { this.value = value; this.attr = new HashMap<>(); } public Cell(Object value, Map<String, String> attr) { this.value = value; this.attr = attr; } } }
elastic/elasticsearch
server/src/main/java/org/elasticsearch/common/Table.java
886
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.flux.view; import com.iluwatar.flux.action.Content; import com.iluwatar.flux.store.ContentStore; import com.iluwatar.flux.store.Store; import lombok.extern.slf4j.Slf4j; /** * ContentView is a concrete view. */ @Slf4j public class ContentView implements View { private Content content = Content.PRODUCTS; @Override public void storeChanged(Store store) { var contentStore = (ContentStore) store; content = contentStore.getContent(); render(); } @Override public void render() { LOGGER.info(content.toString()); } }
smedals/java-design-patterns
flux/src/main/java/com/iluwatar/flux/view/ContentView.java
888
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.dao; import java.sql.SQLException; import java.util.List; import javax.sql.DataSource; import lombok.extern.slf4j.Slf4j; import org.h2.jdbcx.JdbcDataSource; /** * Data Access Object (DAO) is an object that provides an abstract interface to some type of * database or other persistence mechanism. By mapping application calls to the persistence layer, * DAO provide some specific data operations without exposing details of the database. This * isolation supports the Single responsibility principle. It separates what data accesses the * application needs, in terms of domain-specific objects and data types (the public interface of * the DAO), from how these needs can be satisfied with a specific DBMS. * * <p>With the DAO pattern, we can use various method calls to retrieve/add/delete/update data * without directly interacting with the data source. The below example demonstrates basic CRUD * operations: select, add, update, and delete. */ @Slf4j public class App { private static final String DB_URL = "jdbc:h2:mem:dao;DB_CLOSE_DELAY=-1"; private static final String ALL_CUSTOMERS = "customerDao.getAllCustomers(): "; /** * Program entry point. * * @param args command line args. * @throws Exception if any error occurs. */ public static void main(final String[] args) throws Exception { final var inMemoryDao = new InMemoryCustomerDao(); performOperationsUsing(inMemoryDao); final var dataSource = createDataSource(); createSchema(dataSource); final var dbDao = new DbCustomerDao(dataSource); performOperationsUsing(dbDao); deleteSchema(dataSource); } private static void deleteSchema(DataSource dataSource) throws SQLException { try (var connection = dataSource.getConnection(); var statement = connection.createStatement()) { statement.execute(CustomerSchemaSql.DELETE_SCHEMA_SQL); } } private static void createSchema(DataSource dataSource) throws SQLException { try (var connection = dataSource.getConnection(); var statement = connection.createStatement()) { statement.execute(CustomerSchemaSql.CREATE_SCHEMA_SQL); } } private static DataSource createDataSource() { var dataSource = new JdbcDataSource(); dataSource.setURL(DB_URL); return dataSource; } private static void performOperationsUsing(final CustomerDao customerDao) throws Exception { addCustomers(customerDao); LOGGER.info(ALL_CUSTOMERS); try (var customerStream = customerDao.getAll()) { customerStream.forEach(customer -> LOGGER.info(customer.toString())); } LOGGER.info("customerDao.getCustomerById(2): " + customerDao.getById(2)); final var customer = new Customer(4, "Dan", "Danson"); customerDao.add(customer); LOGGER.info(ALL_CUSTOMERS + customerDao.getAll()); customer.setFirstName("Daniel"); customer.setLastName("Danielson"); customerDao.update(customer); LOGGER.info(ALL_CUSTOMERS); try (var customerStream = customerDao.getAll()) { customerStream.forEach(cust -> LOGGER.info(cust.toString())); } customerDao.delete(customer); LOGGER.info(ALL_CUSTOMERS + customerDao.getAll()); } private static void addCustomers(CustomerDao customerDao) throws Exception { for (var customer : generateSampleCustomers()) { customerDao.add(customer); } } /** * Generate customers. * * @return list of customers. */ public static List<Customer> generateSampleCustomers() { final var customer1 = new Customer(1, "Adam", "Adamson"); final var customer2 = new Customer(2, "Bob", "Bobson"); final var customer3 = new Customer(3, "Carl", "Carlson"); return List.of(customer1, customer2, customer3); } }
arasgungore/java-design-patterns
dao/src/main/java/com/iluwatar/dao/App.java
890
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.script; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.AbstractObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ObjectParser.ValueType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser.Token; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; /** * {@link Script} represents used-defined input that can be used to * compile and execute a script from the {@link ScriptService} * based on the {@link ScriptType}. * * There are two types of scripts specified by {@link ScriptType}, * <code>INLINE</code>, and <code>STORED</code>. * * The following describes the expected parameters for each type of script: * * <ul> * <li> {@link ScriptType#INLINE} * <ul> * <li> {@link Script#lang} - specifies the language, defaults to {@link Script#DEFAULT_SCRIPT_LANG} * <li> {@link Script#idOrCode} - specifies the code to be compiled, must not be {@code null} * <li> {@link Script#options} - specifies the compiler options for this script; must not be {@code null}, * use an empty {@link Map} to specify no options * <li> {@link Script#params} - {@link Map} of user-defined parameters; must not be {@code null}, * use an empty {@link Map} to specify no params * </ul> * <li> {@link ScriptType#STORED} * <ul> * <li> {@link Script#lang} - the language will be specified when storing the script, so this should * be {@code null} * <li> {@link Script#idOrCode} - specifies the id of the stored script to be looked up, must not be {@code null} * <li> {@link Script#options} - compiler options will be specified when a stored script is stored, * so they have no meaning here and must be {@code null} * <li> {@link Script#params} - {@link Map} of user-defined parameters; must not be {@code null}, * use an empty {@link Map} to specify no params * </ul> * </ul> */ public final class Script implements ToXContentObject, Writeable { /** * The name of the of the default scripting language. */ public static final String DEFAULT_SCRIPT_LANG = "painless"; /** * The name of the default template language. */ public static final String DEFAULT_TEMPLATE_LANG = "mustache"; /** * The default {@link ScriptType}. */ public static final ScriptType DEFAULT_SCRIPT_TYPE = ScriptType.INLINE; /** * Compiler option for {@link XContentType} used for templates. */ public static final String CONTENT_TYPE_OPTION = "content_type"; /** * Standard {@link ParseField} for outer level of script queries. */ public static final ParseField SCRIPT_PARSE_FIELD = new ParseField("script"); /** * Standard {@link ParseField} for source on the inner level. */ public static final ParseField SOURCE_PARSE_FIELD = new ParseField("source"); /** * Standard {@link ParseField} for lang on the inner level. */ public static final ParseField LANG_PARSE_FIELD = new ParseField("lang"); /** * Standard {@link ParseField} for options on the inner level. */ public static final ParseField OPTIONS_PARSE_FIELD = new ParseField("options"); /** * Standard {@link ParseField} for params on the inner level. */ public static final ParseField PARAMS_PARSE_FIELD = new ParseField("params"); /** * Helper class used by {@link ObjectParser} to store mutable {@link Script} variables and then * construct an immutable {@link Script} object based on parsed XContent. */ private static final class Builder { private ScriptType type; private String lang; private String idOrCode; private Map<String, String> options; private Map<String, Object> params; private Builder() { // This cannot default to an empty map because options are potentially added at multiple points. this.options = new HashMap<>(); this.params = Collections.emptyMap(); } /** * Since inline scripts can accept code rather than just an id, they must also be able * to handle template parsing, hence the need for custom parsing code. Templates can * consist of either an {@link String} or a JSON object. If a JSON object is discovered * then the content type option must also be saved as a compiler option. */ private void setInline(XContentParser parser) { try { if (type != null) { throwOnlyOneOfType(); } type = ScriptType.INLINE; if (parser.currentToken() == Token.START_OBJECT) { // this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); idOrCode = Strings.toString(builder.copyCurrentStructure(parser)); options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { idOrCode = parser.text(); } } catch (IOException exception) { throw new UncheckedIOException(exception); } } /** * Set both the id and the type of the stored script. */ private void setStored(String idOrCode) { if (type != null) { throwOnlyOneOfType(); } type = ScriptType.STORED; this.idOrCode = idOrCode; } /** * Helper method to throw an exception if more than one type of {@link Script} is specified. */ private static void throwOnlyOneOfType() { throw new IllegalArgumentException( "must only use one of [" + ScriptType.INLINE.getParseField().getPreferredName() + ", " + ScriptType.STORED.getParseField().getPreferredName() + "]" + " when specifying a script" ); } private void setLang(String lang) { this.lang = lang; } /** * Options may have already been added if an inline template was specified. * Appends the user-defined compiler options with the internal compiler options. */ private void setOptions(Map<String, String> options) { this.options.putAll(options); } private void setParams(Map<String, Object> params) { this.params = params; } /** * Validates the parameters and creates an {@link Script}. * @param defaultLang The default lang is not a compile-time constant and must be provided * at run-time this way in case a legacy default language is used from * previously stored queries. */ private Script build(String defaultLang) { if (type == null) { throw new IllegalArgumentException("must specify either [source] for an inline script or [id] for a stored script"); } if (type == ScriptType.INLINE) { if (lang == null) { lang = defaultLang; } if (idOrCode == null) { throw new IllegalArgumentException("must specify <id> for an inline script"); } if (options.size() > 1 || options.size() == 1 && options.get(CONTENT_TYPE_OPTION) == null) { options.remove(CONTENT_TYPE_OPTION); throw new IllegalArgumentException("illegal compiler options [" + options + "] specified"); } } else if (type == ScriptType.STORED) { if (lang != null) { throw new IllegalArgumentException("illegally specified <lang> for a stored script"); } if (idOrCode == null) { throw new IllegalArgumentException("must specify <code> for a stored script"); } if (options.isEmpty()) { options = null; } else { throw new IllegalArgumentException( "field [" + OPTIONS_PARSE_FIELD.getPreferredName() + "] " + "cannot be specified using a stored script" ); } } return new Script(type, lang, idOrCode, options, params); } } private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("script", Builder::new); static { // Defines the fields necessary to parse a Script as XContent using an ObjectParser. PARSER.declareField(Builder::setInline, parser -> parser, ScriptType.INLINE.getParseField(), ValueType.OBJECT_OR_STRING); PARSER.declareString(Builder::setStored, ScriptType.STORED.getParseField()); PARSER.declareString(Builder::setLang, LANG_PARSE_FIELD); PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT); PARSER.declareField(Builder::setParams, XContentParser::map, PARAMS_PARSE_FIELD, ValueType.OBJECT); } /** * Declare a script field on an {@link ObjectParser} with the standard name ({@code script}). * @param <T> Whatever type the {@linkplain ObjectParser} is parsing. * @param parser the parser itself * @param consumer the consumer for the script */ public static <T> void declareScript(AbstractObjectParser<T, ?> parser, BiConsumer<T, Script> consumer) { declareScript(parser, consumer, Script.SCRIPT_PARSE_FIELD); } /** * Declare a script field on an {@link ObjectParser}. * @param <T> Whatever type the {@linkplain ObjectParser} is parsing. * @param parser the parser itself * @param consumer the consumer for the script * @param parseField the field name */ public static <T> void declareScript(AbstractObjectParser<T, ?> parser, BiConsumer<T, Script> consumer, ParseField parseField) { parser.declareField(consumer, (p, c) -> Script.parse(p), parseField, ValueType.OBJECT_OR_STRING); } /** * Convenience method to call {@link Script#parse(XContentParser, String)} * using the default scripting language. */ public static Script parse(XContentParser parser) throws IOException { return parse(parser, DEFAULT_SCRIPT_LANG); } /** * Parse the script configured in the given settings. */ public static Script parse(Settings settings) { try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.startObject(); settings.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); try ( XContentParser parser = XContentHelper.createParserNotCompressed( LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, BytesReference.bytes(builder), XContentType.JSON ) ) { return parse(parser); } } catch (IOException e) { // it should not happen since we are not actually reading from a stream but an in-memory byte[] throw new IllegalStateException(e); } } /** * This will parse XContent into a {@link Script}. The following formats can be parsed: * * The simple format defaults to an {@link ScriptType#INLINE} with no compiler options or user-defined params: * * Example: * {@code * "return Math.log(doc.popularity) * 100;" * } * * The complex format where {@link ScriptType} and idOrCode are required while lang, options and params are not required. * * {@code * { * // Exactly one of "id" or "source" must be specified * "id" : "<id>", * // OR * "source": "<source>", * "lang" : "<lang>", * "options" : { * "option0" : "<option0>", * "option1" : "<option1>", * ... * }, * "params" : { * "param0" : "<param0>", * "param1" : "<param1>", * ... * } * } * } * * Example: * {@code * { * "source" : "return Math.log(doc.popularity) * params.multiplier", * "lang" : "painless", * "params" : { * "multiplier" : 100.0 * } * } * } * * This also handles templates in a special way. If a complexly formatted query is specified as another complex * JSON object the query is assumed to be a template, and the format will be preserved. * * {@code * { * "source" : { "query" : ... }, * "lang" : "<lang>", * "options" : { * "option0" : "<option0>", * "option1" : "<option1>", * ... * }, * "params" : { * "param0" : "<param0>", * "param1" : "<param1>", * ... * } * } * } * * @param parser The {@link XContentParser} to be used. * @param defaultLang The default language to use if no language is specified. The default language isn't necessarily * the one defined by {@link Script#DEFAULT_SCRIPT_LANG} due to backwards compatibility requirements * related to stored queries using previously default languages. * * @return The parsed {@link Script}. */ public static Script parse(XContentParser parser, String defaultLang) throws IOException { Objects.requireNonNull(defaultLang); Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); } if (token == Token.VALUE_STRING) { return new Script(ScriptType.INLINE, defaultLang, parser.text(), Collections.emptyMap()); } return PARSER.apply(parser, null).build(defaultLang); } /** * Parse a {@link Script} from an {@link Object}, that can either be a {@link String} or a {@link Map}. * @see #parse(XContentParser, String) * @param config The object to parse the script from. * @return The parsed {@link Script}. */ @SuppressWarnings("unchecked") public static Script parse(Object config) { Objects.requireNonNull(config, "Script must not be null"); if (config instanceof String) { return new Script((String) config); } else if (config instanceof Map) { Map<String, Object> configMap = (Map<String, Object>) config; String script = null; ScriptType type = null; String lang = null; Map<String, Object> params = Collections.emptyMap(); Map<String, String> options = Collections.emptyMap(); for (Map.Entry<String, Object> entry : configMap.entrySet()) { String parameterName = entry.getKey(); Object parameterValue = entry.getValue(); if (Script.LANG_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) { if (parameterValue instanceof String || parameterValue == null) { lang = (String) parameterValue; } else { throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); } } else if (Script.PARAMS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) { if (parameterValue instanceof Map || parameterValue == null) { params = (Map<String, Object>) parameterValue; } else { throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]"); } } else if (Script.OPTIONS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) { if (parameterValue instanceof Map || parameterValue == null) { options = (Map<String, String>) parameterValue; } else { throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]"); } } else if (ScriptType.INLINE.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) { if (parameterValue instanceof String || parameterValue == null) { script = (String) parameterValue; type = ScriptType.INLINE; } else { throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); } } else if (ScriptType.STORED.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) { if (parameterValue instanceof String || parameterValue == null) { script = (String) parameterValue; type = ScriptType.STORED; } else { throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); } } else { throw new ElasticsearchParseException("Unsupported field [" + parameterName + "]"); } } if (script == null) { throw new ElasticsearchParseException( "Expected one of [{}] or [{}] fields, but found none", ScriptType.INLINE.getParseField().getPreferredName(), ScriptType.STORED.getParseField().getPreferredName() ); } assert type != null : "if script is not null, type should definitely not be null"; if (type == ScriptType.STORED) { if (lang != null) { throw new IllegalArgumentException( "[" + Script.LANG_PARSE_FIELD.getPreferredName() + "] cannot be specified for stored scripts" ); } return new Script(type, null, script, null, params); } else { return new Script(type, lang == null ? DEFAULT_SCRIPT_LANG : lang, script, options, params); } } else { throw new IllegalArgumentException("Script value should be a String or a Map"); } } private final ScriptType type; private final String lang; private final String idOrCode; private final Map<String, String> options; private final Map<String, Object> params; /** * Constructor for simple script using the default language and default type. * @param idOrCode The id or code to use dependent on the default script type. */ public Script(String idOrCode) { this(DEFAULT_SCRIPT_TYPE, DEFAULT_SCRIPT_LANG, idOrCode, Collections.emptyMap(), Collections.emptyMap()); } /** * Constructor for a script that does not need to use compiler options. * @param type The {@link ScriptType}. * @param lang The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. * For {@link ScriptType#STORED} scripts this should be null, but can * be specified to access scripts stored as part of the stored scripts deprecated API. * @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}. * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. * @param params The user-defined params to be bound for script execution. */ public Script(ScriptType type, String lang, String idOrCode, Map<String, Object> params) { this(type, lang, idOrCode, type == ScriptType.INLINE ? Collections.emptyMap() : null, params); } /** * Constructor for a script that requires the use of compiler options. * @param type The {@link ScriptType}. * @param lang The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. * For {@link ScriptType#STORED} scripts this should be null, but can * be specified to access scripts stored as part of the stored scripts deprecated API. * @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}. * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. * @param options The map of compiler options for this {@link Script} if the {@link ScriptType} * is {@link ScriptType#INLINE}, {@code null} otherwise. * @param params The user-defined params to be bound for script execution. */ public Script(ScriptType type, String lang, String idOrCode, Map<String, String> options, Map<String, Object> params) { this.type = Objects.requireNonNull(type); this.idOrCode = Objects.requireNonNull(idOrCode); this.params = Collections.unmodifiableMap(Objects.requireNonNull(params)); if (type == ScriptType.INLINE) { this.lang = Objects.requireNonNull(lang); this.options = Collections.unmodifiableMap(Objects.requireNonNull(options)); } else if (type == ScriptType.STORED) { if (lang != null) { throw new IllegalArgumentException("lang cannot be specified for stored scripts"); } this.lang = null; if (options != null) { throw new IllegalStateException("options cannot be specified for stored scripts"); } this.options = null; } else { throw new IllegalStateException("unknown script type [" + type.getName() + "]"); } } /** * Creates a {@link Script} read from an input stream. */ public Script(StreamInput in) throws IOException { this.type = ScriptType.readFrom(in); this.lang = in.readOptionalString(); this.idOrCode = in.readString(); @SuppressWarnings("unchecked") Map<String, String> options = (Map<String, String>) (Map) in.readGenericMap(); this.options = options; this.params = in.readGenericMap(); } @Override public void writeTo(StreamOutput out) throws IOException { type.writeTo(out); out.writeOptionalString(lang); out.writeString(idOrCode); @SuppressWarnings("unchecked") Map<String, Object> options = (Map<String, Object>) (Map) this.options; out.writeMapWithConsistentOrder(options); out.writeMapWithConsistentOrder(params); } /** * This will build scripts into the following XContent structure: * * {@code * { * "<(id, source)>" : "<idOrCode>", * "lang" : "<lang>", * "options" : { * "option0" : "<option0>", * "option1" : "<option1>", * ... * }, * "params" : { * "param0" : "<param0>", * "param1" : "<param1>", * ... * } * } * } * * Example: * {@code * { * "source" : "return Math.log(doc.popularity) * params.multiplier;", * "lang" : "painless", * "params" : { * "multiplier" : 100.0 * } * } * } * * Note that lang, options, and params will only be included if there have been any specified. * * This also handles templates in a special way. If the {@link Script#CONTENT_TYPE_OPTION} option * is provided and the {@link ScriptType#INLINE} is specified then the template will be preserved as a raw field. * * {@code * { * "source" : { "query" : ... }, * "lang" : "<lang>", * "options" : { * "option0" : "<option0>", * "option1" : "<option1>", * ... * }, * "params" : { * "param0" : "<param0>", * "param1" : "<param1>", * ... * } * } * } */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(); String contentType = options == null ? null : options.get(CONTENT_TYPE_OPTION); if (type == ScriptType.INLINE) { if (contentType != null && builder.contentType().mediaType().equals(contentType)) { try (InputStream stream = new BytesArray(idOrCode).streamInput()) { builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream); } } else { builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode); } } else { builder.field("id", idOrCode); } if (lang != null) { builder.field(LANG_PARSE_FIELD.getPreferredName(), lang); } if (options != null && options.isEmpty() == false) { builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); } if (params.isEmpty() == false) { builder.field(PARAMS_PARSE_FIELD.getPreferredName(), params); } builder.endObject(); return builder; } /** * @return The {@link ScriptType} for this {@link Script}. */ public ScriptType getType() { return type; } /** * @return The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. * For {@link ScriptType#STORED} scripts this should be null, but can * be specified to access scripts stored as part of the stored scripts deprecated API. */ public String getLang() { return lang; } /** * @return The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}. * The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}. */ public String getIdOrCode() { return idOrCode; } /** * @return The map of compiler options for this {@link Script} if the {@link ScriptType} * is {@link ScriptType#INLINE}, {@code null} otherwise. */ public Map<String, String> getOptions() { return options; } /** * @return The map of user-defined params for this {@link Script}. */ public Map<String, Object> getParams() { return params; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Script script = (Script) o; return type == script.type && Objects.equals(lang, script.lang) && Objects.equals(idOrCode, script.idOrCode) && Objects.equals(options, script.options) && Objects.equals(params, script.params); } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + (lang != null ? lang.hashCode() : 0); result = 31 * result + idOrCode.hashCode(); result = 31 * result + (options != null ? options.hashCode() : 0); result = 31 * result + params.hashCode(); return result; } @Override public String toString() { return "Script{" + "type=" + type + ", lang='" + lang + '\'' + ", idOrCode='" + idOrCode + '\'' + ", options=" + options + ", params=" + params + '}'; } }
elastic/elasticsearch
server/src/main/java/org/elasticsearch/script/Script.java
891
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.saga.choreography; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Saga representation. Saga consists of chapters. Every ChoreographyChapter is executed a certain * service. */ public class Saga { private final List<Chapter> chapters; private int pos; private boolean forward; private boolean finished; public static Saga create() { return new Saga(); } /** * get resuzlt of saga. * * @return result of saga @see {@link SagaResult} */ public SagaResult getResult() { if (finished) { return forward ? SagaResult.FINISHED : SagaResult.ROLLBACKED; } return SagaResult.PROGRESS; } /** * add chapter to saga. * * @param name chapter name * @return this */ public Saga chapter(String name) { this.chapters.add(new Chapter(name)); return this; } /** * set value to last chapter. * * @param value invalue * @return this */ public Saga setInValue(Object value) { if (chapters.isEmpty()) { return this; } chapters.get(chapters.size() - 1).setInValue(value); return this; } /** * get value from current chapter. * * @return value */ public Object getCurrentValue() { return chapters.get(pos).getInValue(); } /** * set value to current chapter. * * @param value to set */ public void setCurrentValue(Object value) { chapters.get(pos).setInValue(value); } /** * set status for current chapter. * * @param result to set */ public void setCurrentStatus(ChapterResult result) { chapters.get(pos).setResult(result); } void setFinished(boolean finished) { this.finished = finished; } boolean isForward() { return forward; } int forward() { return ++pos; } int back() { this.forward = false; return --pos; } private Saga() { this.chapters = new ArrayList<>(); this.pos = 0; this.forward = true; this.finished = false; } Chapter getCurrent() { return chapters.get(pos); } boolean isPresent() { return pos >= 0 && pos < chapters.size(); } boolean isCurrentSuccess() { return chapters.get(pos).isSuccess(); } /** * Class presents a chapter status and incoming parameters(incoming parameter transforms to * outcoming parameter). */ public static class Chapter { private final String name; private ChapterResult result; private Object inValue; public Chapter(String name) { this.name = name; this.result = ChapterResult.INIT; } public Object getInValue() { return inValue; } public void setInValue(Object object) { this.inValue = object; } public String getName() { return name; } /** * set result. * * @param result {@link ChapterResult} */ public void setResult(ChapterResult result) { this.result = result; } /** * the result for chapter is good. * * @return true if is good otherwise bad */ public boolean isSuccess() { return result == ChapterResult.SUCCESS; } } /** * result for chapter. */ public enum ChapterResult { INIT, SUCCESS, ROLLBACK } /** * result for saga. */ public enum SagaResult { PROGRESS, FINISHED, ROLLBACKED } @Override public String toString() { return "Saga{" + "chapters=" + Arrays.toString(chapters.toArray()) + ", pos=" + pos + ", forward=" + forward + '}'; } }
smedals/java-design-patterns
saga/src/main/java/com/iluwatar/saga/choreography/Saga.java
894
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package org.tensorflow.op.core; import java.nio.ByteBuffer; import org.tensorflow.DataType; import org.tensorflow.Operand; import org.tensorflow.Output; import org.tensorflow.op.Op; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Operator; /** * An operator creating a constant initialized with zeros of the shape given by `dims`. * * <p>For example, the following expression * <pre>{@code ops.zeros(ops.constant(new long[]{2, 2}), Float.class)}</pre> * is the equivalent of * <pre>{@code ops.fill(ops.constant(new long[]{2, 2}), ops.constant(0.0f))}</pre> * * @param <T> constant type */ @Operator public class Zeros<T> implements Op, Operand<T> { /** * Creates a zeroed tensor given its type and shape. * * @param scope is a scope used to add the underlying operation * @param dims a 1-D operand that represents the shape of the output tensor * @param type the output tensor datatype * @return a constant tensor initialized with zeros * @throws IllegalArgumentException if the tensor type or shape cannot be initialized with zeros. */ public static <T, U extends Number> Zeros<T> create(Scope scope, Operand<U> dims, Class<T> type) { Scope childScope = scope.withSubScope("Zeros"); // If scope had an op name set, it will prevail on "Zeros" int zeroSize = DataType.fromClass(type).byteSize(); if (zeroSize < 0) { throw new IllegalArgumentException(type.getSimpleName() + " tensors cannot be initialized with zeros"); } Constant<T> zero = Constant.create(childScope.withName("Zero"), type, new long[]{}, ByteBuffer.allocate(zeroSize)); return new Zeros<T>(Fill.create(childScope, dims, zero)); } @Override public Output<T> asOutput() { return fill.asOutput(); } private final Fill<T> fill; private Zeros(Fill<T> fill) { this.fill = fill; } }
tensorflow/tensorflow
tensorflow/java/src/main/java/org/tensorflow/op/core/Zeros.java
896
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.instanceOf; import static com.google.common.collect.CollectPreconditions.checkRemove; import static com.google.common.collect.NullnessCasts.uncheckedCastNullableTToT; import static java.util.Objects.requireNonNull; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.primitives.Ints; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.PriorityQueue; import java.util.Queue; import javax.annotation.CheckForNull; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; /** * This class contains static utility methods that operate on or return objects of type {@link * Iterator}. Except as noted, each method has a corresponding {@link Iterable}-based method in the * {@link Iterables} class. * * <p><i>Performance notes:</i> Unless otherwise noted, all of the iterators produced in this class * are <i>lazy</i>, which means that they only advance the backing iteration when absolutely * necessary. * * <p>See the Guava User Guide section on <a href= * "https://github.com/google/guava/wiki/CollectionUtilitiesExplained#iterables">{@code * Iterators}</a>. * * @author Kevin Bourrillion * @author Jared Levy * @since 2.0 */ @GwtCompatible(emulated = true) @ElementTypesAreNonnullByDefault public final class Iterators { private Iterators() {} /** * Returns the empty iterator. * * <p>The {@link Iterable} equivalent of this method is {@link ImmutableSet#of()}. */ static <T extends @Nullable Object> UnmodifiableIterator<T> emptyIterator() { return emptyListIterator(); } /** * Returns the empty iterator. * * <p>The {@link Iterable} equivalent of this method is {@link ImmutableSet#of()}. */ // Casting to any type is safe since there are no actual elements. @SuppressWarnings("unchecked") static <T extends @Nullable Object> UnmodifiableListIterator<T> emptyListIterator() { return (UnmodifiableListIterator<T>) ArrayItr.EMPTY; } /** * This is an enum singleton rather than an anonymous class so ProGuard can figure out it's only * referenced by emptyModifiableIterator(). */ private enum EmptyModifiableIterator implements Iterator<Object> { INSTANCE; @Override public boolean hasNext() { return false; } @Override public Object next() { throw new NoSuchElementException(); } @Override public void remove() { checkRemove(false); } } /** * Returns the empty {@code Iterator} that throws {@link IllegalStateException} instead of {@link * UnsupportedOperationException} on a call to {@link Iterator#remove()}. */ // Casting to any type is safe since there are no actual elements. @SuppressWarnings("unchecked") static <T extends @Nullable Object> Iterator<T> emptyModifiableIterator() { return (Iterator<T>) EmptyModifiableIterator.INSTANCE; } /** Returns an unmodifiable view of {@code iterator}. */ public static <T extends @Nullable Object> UnmodifiableIterator<T> unmodifiableIterator( Iterator<? extends T> iterator) { checkNotNull(iterator); if (iterator instanceof UnmodifiableIterator) { @SuppressWarnings("unchecked") // Since it's unmodifiable, the covariant cast is safe UnmodifiableIterator<T> result = (UnmodifiableIterator<T>) iterator; return result; } return new UnmodifiableIterator<T>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override @ParametricNullness public T next() { return iterator.next(); } }; } /** * Simply returns its argument. * * @deprecated no need to use this * @since 10.0 */ @Deprecated public static <T extends @Nullable Object> UnmodifiableIterator<T> unmodifiableIterator( UnmodifiableIterator<T> iterator) { return checkNotNull(iterator); } /** * Returns the number of elements remaining in {@code iterator}. The iterator will be left * exhausted: its {@code hasNext()} method will return {@code false}. */ public static int size(Iterator<?> iterator) { long count = 0L; while (iterator.hasNext()) { iterator.next(); count++; } return Ints.saturatedCast(count); } /** Returns {@code true} if {@code iterator} contains {@code element}. */ public static boolean contains(Iterator<?> iterator, @CheckForNull Object element) { if (element == null) { while (iterator.hasNext()) { if (iterator.next() == null) { return true; } } } else { while (iterator.hasNext()) { if (element.equals(iterator.next())) { return true; } } } return false; } /** * Traverses an iterator and removes every element that belongs to the provided collection. The * iterator will be left exhausted: its {@code hasNext()} method will return {@code false}. * * @param removeFrom the iterator to (potentially) remove elements from * @param elementsToRemove the elements to remove * @return {@code true} if any element was removed from {@code iterator} */ @CanIgnoreReturnValue public static boolean removeAll(Iterator<?> removeFrom, Collection<?> elementsToRemove) { checkNotNull(elementsToRemove); boolean result = false; while (removeFrom.hasNext()) { if (elementsToRemove.contains(removeFrom.next())) { removeFrom.remove(); result = true; } } return result; } /** * Removes every element that satisfies the provided predicate from the iterator. The iterator * will be left exhausted: its {@code hasNext()} method will return {@code false}. * * @param removeFrom the iterator to (potentially) remove elements from * @param predicate a predicate that determines whether an element should be removed * @return {@code true} if any elements were removed from the iterator * @since 2.0 */ @CanIgnoreReturnValue public static <T extends @Nullable Object> boolean removeIf( Iterator<T> removeFrom, Predicate<? super T> predicate) { checkNotNull(predicate); boolean modified = false; while (removeFrom.hasNext()) { if (predicate.apply(removeFrom.next())) { removeFrom.remove(); modified = true; } } return modified; } /** * Traverses an iterator and removes every element that does not belong to the provided * collection. The iterator will be left exhausted: its {@code hasNext()} method will return * {@code false}. * * @param removeFrom the iterator to (potentially) remove elements from * @param elementsToRetain the elements to retain * @return {@code true} if any element was removed from {@code iterator} */ @CanIgnoreReturnValue public static boolean retainAll(Iterator<?> removeFrom, Collection<?> elementsToRetain) { checkNotNull(elementsToRetain); boolean result = false; while (removeFrom.hasNext()) { if (!elementsToRetain.contains(removeFrom.next())) { removeFrom.remove(); result = true; } } return result; } /** * Determines whether two iterators contain equal elements in the same order. More specifically, * this method returns {@code true} if {@code iterator1} and {@code iterator2} contain the same * number of elements and every element of {@code iterator1} is equal to the corresponding element * of {@code iterator2}. * * <p>Note that this will modify the supplied iterators, since they will have been advanced some * number of elements forward. */ public static boolean elementsEqual(Iterator<?> iterator1, Iterator<?> iterator2) { while (iterator1.hasNext()) { if (!iterator2.hasNext()) { return false; } Object o1 = iterator1.next(); Object o2 = iterator2.next(); if (!Objects.equal(o1, o2)) { return false; } } return !iterator2.hasNext(); } /** * Returns a string representation of {@code iterator}, with the format {@code [e1, e2, ..., en]}. * The iterator will be left exhausted: its {@code hasNext()} method will return {@code false}. */ public static String toString(Iterator<?> iterator) { StringBuilder sb = new StringBuilder().append('['); boolean first = true; while (iterator.hasNext()) { if (!first) { sb.append(", "); } first = false; sb.append(iterator.next()); } return sb.append(']').toString(); } /** * Returns the single element contained in {@code iterator}. * * @throws NoSuchElementException if the iterator is empty * @throws IllegalArgumentException if the iterator contains multiple elements. The state of the * iterator is unspecified. */ @ParametricNullness public static <T extends @Nullable Object> T getOnlyElement(Iterator<T> iterator) { T first = iterator.next(); if (!iterator.hasNext()) { return first; } StringBuilder sb = new StringBuilder().append("expected one element but was: <").append(first); for (int i = 0; i < 4 && iterator.hasNext(); i++) { sb.append(", ").append(iterator.next()); } if (iterator.hasNext()) { sb.append(", ..."); } sb.append('>'); throw new IllegalArgumentException(sb.toString()); } /** * Returns the single element contained in {@code iterator}, or {@code defaultValue} if the * iterator is empty. * * @throws IllegalArgumentException if the iterator contains multiple elements. The state of the * iterator is unspecified. */ @ParametricNullness public static <T extends @Nullable Object> T getOnlyElement( Iterator<? extends T> iterator, @ParametricNullness T defaultValue) { return iterator.hasNext() ? getOnlyElement(iterator) : defaultValue; } /** * Copies an iterator's elements into an array. The iterator will be left exhausted: its {@code * hasNext()} method will return {@code false}. * * @param iterator the iterator to copy * @param type the type of the elements * @return a newly-allocated array into which all the elements of the iterator have been copied */ @GwtIncompatible // Array.newInstance(Class, int) public static <T extends @Nullable Object> T[] toArray( Iterator<? extends T> iterator, Class<@NonNull T> type) { List<T> list = Lists.newArrayList(iterator); return Iterables.<T>toArray(list, type); } /** * Adds all elements in {@code iterator} to {@code collection}. The iterator will be left * exhausted: its {@code hasNext()} method will return {@code false}. * * @return {@code true} if {@code collection} was modified as a result of this operation */ @CanIgnoreReturnValue public static <T extends @Nullable Object> boolean addAll( Collection<T> addTo, Iterator<? extends T> iterator) { checkNotNull(addTo); checkNotNull(iterator); boolean wasModified = false; while (iterator.hasNext()) { wasModified |= addTo.add(iterator.next()); } return wasModified; } /** * Returns the number of elements in the specified iterator that equal the specified object. The * iterator will be left exhausted: its {@code hasNext()} method will return {@code false}. * * @see Collections#frequency */ public static int frequency(Iterator<?> iterator, @CheckForNull Object element) { int count = 0; while (contains(iterator, element)) { // Since it lives in the same class, we know contains gets to the element and then stops, // though that isn't currently publicly documented. count++; } return count; } /** * Returns an iterator that cycles indefinitely over the elements of {@code iterable}. * * <p>The returned iterator supports {@code remove()} if the provided iterator does. After {@code * remove()} is called, subsequent cycles omit the removed element, which is no longer in {@code * iterable}. The iterator's {@code hasNext()} method returns {@code true} until {@code iterable} * is empty. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You * should use an explicit {@code break} or be certain that you will eventually remove all the * elements. */ public static <T extends @Nullable Object> Iterator<T> cycle(Iterable<T> iterable) { checkNotNull(iterable); return new Iterator<T>() { Iterator<T> iterator = emptyModifiableIterator(); @Override public boolean hasNext() { /* * Don't store a new Iterator until we know the user can't remove() the last returned * element anymore. Otherwise, when we remove from the old iterator, we may be invalidating * the new one. The result is a ConcurrentModificationException or other bad behavior. * * (If we decide that we really, really hate allocating two Iterators per cycle instead of * one, we can optimistically store the new Iterator and then be willing to throw it out if * the user calls remove().) */ return iterator.hasNext() || iterable.iterator().hasNext(); } @Override @ParametricNullness public T next() { if (!iterator.hasNext()) { iterator = iterable.iterator(); if (!iterator.hasNext()) { throw new NoSuchElementException(); } } return iterator.next(); } @Override public void remove() { iterator.remove(); } }; } /** * Returns an iterator that cycles indefinitely over the provided elements. * * <p>The returned iterator supports {@code remove()}. After {@code remove()} is called, * subsequent cycles omit the removed element, but {@code elements} does not change. The * iterator's {@code hasNext()} method returns {@code true} until all of the original elements * have been removed. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You * should use an explicit {@code break} or be certain that you will eventually remove all the * elements. */ @SafeVarargs public static <T extends @Nullable Object> Iterator<T> cycle(T... elements) { return cycle(Lists.newArrayList(elements)); } /** * Returns an Iterator that walks the specified array, nulling out elements behind it. This can * avoid memory leaks when an element is no longer necessary. * * <p>This method accepts an array with element type {@code @Nullable T}, but callers must pass an * array whose contents are initially non-null. The {@code @Nullable} annotation indicates that * this method will write nulls into the array during iteration. * * <p>This is mainly just to avoid the intermediate ArrayDeque in ConsumingQueueIterator. */ private static <I extends Iterator<?>> Iterator<I> consumingForArray(@Nullable I... elements) { return new UnmodifiableIterator<I>() { int index = 0; @Override public boolean hasNext() { return index < elements.length; } @Override public I next() { if (!hasNext()) { throw new NoSuchElementException(); } /* * requireNonNull is safe because our callers always pass non-null arguments. Each element * of the array becomes null only when we iterate past it and then clear it. */ I result = requireNonNull(elements[index]); elements[index] = null; index++; return result; } }; } /** * Combines two iterators into a single iterator. The returned iterator iterates across the * elements in {@code a}, followed by the elements in {@code b}. The source iterators are not * polled until necessary. * * <p>The returned iterator supports {@code remove()} when the corresponding input iterator * supports it. */ public static <T extends @Nullable Object> Iterator<T> concat( Iterator<? extends T> a, Iterator<? extends T> b) { checkNotNull(a); checkNotNull(b); return concat(consumingForArray(a, b)); } /** * Combines three iterators into a single iterator. The returned iterator iterates across the * elements in {@code a}, followed by the elements in {@code b}, followed by the elements in * {@code c}. The source iterators are not polled until necessary. * * <p>The returned iterator supports {@code remove()} when the corresponding input iterator * supports it. */ public static <T extends @Nullable Object> Iterator<T> concat( Iterator<? extends T> a, Iterator<? extends T> b, Iterator<? extends T> c) { checkNotNull(a); checkNotNull(b); checkNotNull(c); return concat(consumingForArray(a, b, c)); } /** * Combines four iterators into a single iterator. The returned iterator iterates across the * elements in {@code a}, followed by the elements in {@code b}, followed by the elements in * {@code c}, followed by the elements in {@code d}. The source iterators are not polled until * necessary. * * <p>The returned iterator supports {@code remove()} when the corresponding input iterator * supports it. */ public static <T extends @Nullable Object> Iterator<T> concat( Iterator<? extends T> a, Iterator<? extends T> b, Iterator<? extends T> c, Iterator<? extends T> d) { checkNotNull(a); checkNotNull(b); checkNotNull(c); checkNotNull(d); return concat(consumingForArray(a, b, c, d)); } /** * Combines multiple iterators into a single iterator. The returned iterator iterates across the * elements of each iterator in {@code inputs}. The input iterators are not polled until * necessary. * * <p>The returned iterator supports {@code remove()} when the corresponding input iterator * supports it. * * @throws NullPointerException if any of the provided iterators is null */ @SafeVarargs public static <T extends @Nullable Object> Iterator<T> concat(Iterator<? extends T>... inputs) { return concatNoDefensiveCopy(Arrays.copyOf(inputs, inputs.length)); } /** * Combines multiple iterators into a single iterator. The returned iterator iterates across the * elements of each iterator in {@code inputs}. The input iterators are not polled until * necessary. * * <p>The returned iterator supports {@code remove()} when the corresponding input iterator * supports it. The methods of the returned iterator may throw {@code NullPointerException} if any * of the input iterators is null. */ public static <T extends @Nullable Object> Iterator<T> concat( Iterator<? extends Iterator<? extends T>> inputs) { return new ConcatenatedIterator<>(inputs); } /** Concats a varargs array of iterators without making a defensive copy of the array. */ static <T extends @Nullable Object> Iterator<T> concatNoDefensiveCopy( Iterator<? extends T>... inputs) { for (Iterator<? extends T> input : checkNotNull(inputs)) { checkNotNull(input); } return concat(consumingForArray(inputs)); } /** * Divides an iterator into unmodifiable sublists of the given size (the final list may be * smaller). For example, partitioning an iterator containing {@code [a, b, c, d, e]} with a * partition size of 3 yields {@code [[a, b, c], [d, e]]} -- an outer iterator containing two * inner lists of three and two elements, all in the original order. * * <p>The returned lists implement {@link java.util.RandomAccess}. * * <p><b>Note:</b> The current implementation eagerly allocates storage for {@code size} elements. * As a consequence, passing values like {@code Integer.MAX_VALUE} can lead to {@link * OutOfMemoryError}. * * @param iterator the iterator to return a partitioned view of * @param size the desired size of each partition (the last may be smaller) * @return an iterator of immutable lists containing the elements of {@code iterator} divided into * partitions * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T extends @Nullable Object> UnmodifiableIterator<List<T>> partition( Iterator<T> iterator, int size) { return partitionImpl(iterator, size, false); } /** * Divides an iterator into unmodifiable sublists of the given size, padding the final iterator * with null values if necessary. For example, partitioning an iterator containing {@code [a, b, * c, d, e]} with a partition size of 3 yields {@code [[a, b, c], [d, e, null]]} -- an outer * iterator containing two inner lists of three elements each, all in the original order. * * <p>The returned lists implement {@link java.util.RandomAccess}. * * @param iterator the iterator to return a partitioned view of * @param size the desired size of each partition * @return an iterator of immutable lists containing the elements of {@code iterator} divided into * partitions (the final iterable may have trailing null elements) * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T extends @Nullable Object> UnmodifiableIterator<List<@Nullable T>> paddedPartition(Iterator<T> iterator, int size) { return partitionImpl(iterator, size, true); } private static <T extends @Nullable Object> UnmodifiableIterator<List<@Nullable T>> partitionImpl( Iterator<T> iterator, int size, boolean pad) { checkNotNull(iterator); checkArgument(size > 0); return new UnmodifiableIterator<List<@Nullable T>>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public List<@Nullable T> next() { if (!hasNext()) { throw new NoSuchElementException(); } @SuppressWarnings("unchecked") // we only put Ts in it @Nullable T[] array = (@Nullable T[]) new Object[size]; int count = 0; for (; count < size && iterator.hasNext(); count++) { array[count] = iterator.next(); } for (int i = count; i < size; i++) { array[i] = null; // for GWT } List<@Nullable T> list = Collections.unmodifiableList(Arrays.asList(array)); // TODO(b/192579700): Use a ternary once it no longer confuses our nullness checker. if (pad || count == size) { return list; } else { return list.subList(0, count); } } }; } /** * Returns a view of {@code unfiltered} containing all elements that satisfy the input predicate * {@code retainIfTrue}. */ public static <T extends @Nullable Object> UnmodifiableIterator<T> filter( Iterator<T> unfiltered, Predicate<? super T> retainIfTrue) { checkNotNull(unfiltered); checkNotNull(retainIfTrue); return new AbstractIterator<T>() { @Override @CheckForNull protected T computeNext() { while (unfiltered.hasNext()) { T element = unfiltered.next(); if (retainIfTrue.apply(element)) { return element; } } return endOfData(); } }; } /** * Returns a view of {@code unfiltered} containing all elements that are of the type {@code * desiredType}. */ @SuppressWarnings("unchecked") // can cast to <T> because non-Ts are removed @GwtIncompatible // Class.isInstance public static <T> UnmodifiableIterator<T> filter(Iterator<?> unfiltered, Class<T> desiredType) { return (UnmodifiableIterator<T>) filter(unfiltered, instanceOf(desiredType)); } /** * Returns {@code true} if one or more elements returned by {@code iterator} satisfy the given * predicate. */ public static <T extends @Nullable Object> boolean any( Iterator<T> iterator, Predicate<? super T> predicate) { return indexOf(iterator, predicate) != -1; } /** * Returns {@code true} if every element returned by {@code iterator} satisfies the given * predicate. If {@code iterator} is empty, {@code true} is returned. */ public static <T extends @Nullable Object> boolean all( Iterator<T> iterator, Predicate<? super T> predicate) { checkNotNull(predicate); while (iterator.hasNext()) { T element = iterator.next(); if (!predicate.apply(element)) { return false; } } return true; } /** * Returns the first element in {@code iterator} that satisfies the given predicate; use this * method only when such an element is known to exist. If no such element is found, the iterator * will be left exhausted: its {@code hasNext()} method will return {@code false}. If it is * possible that <i>no</i> element will match, use {@link #tryFind} or {@link #find(Iterator, * Predicate, Object)} instead. * * @throws NoSuchElementException if no element in {@code iterator} matches the given predicate */ @ParametricNullness public static <T extends @Nullable Object> T find( Iterator<T> iterator, Predicate<? super T> predicate) { checkNotNull(iterator); checkNotNull(predicate); while (iterator.hasNext()) { T t = iterator.next(); if (predicate.apply(t)) { return t; } } throw new NoSuchElementException(); } /** * Returns the first element in {@code iterator} that satisfies the given predicate. If no such * element is found, {@code defaultValue} will be returned from this method and the iterator will * be left exhausted: its {@code hasNext()} method will return {@code false}. Note that this can * usually be handled more naturally using {@code tryFind(iterator, predicate).or(defaultValue)}. * * @since 7.0 */ // For discussion of this signature, see the corresponding overload of *Iterables*.find. @CheckForNull public static <T extends @Nullable Object> T find( Iterator<? extends T> iterator, Predicate<? super T> predicate, @CheckForNull T defaultValue) { checkNotNull(iterator); checkNotNull(predicate); while (iterator.hasNext()) { T t = iterator.next(); if (predicate.apply(t)) { return t; } } return defaultValue; } /** * Returns an {@link Optional} containing the first element in {@code iterator} that satisfies the * given predicate, if such an element exists. If no such element is found, an empty {@link * Optional} will be returned from this method and the iterator will be left exhausted: its {@code * hasNext()} method will return {@code false}. * * <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code null}. If {@code null} * is matched in {@code iterator}, a NullPointerException will be thrown. * * @since 11.0 */ public static <T> Optional<T> tryFind(Iterator<T> iterator, Predicate<? super T> predicate) { checkNotNull(iterator); checkNotNull(predicate); while (iterator.hasNext()) { T t = iterator.next(); if (predicate.apply(t)) { return Optional.of(t); } } return Optional.absent(); } /** * Returns the index in {@code iterator} of the first element that satisfies the provided {@code * predicate}, or {@code -1} if the Iterator has no such elements. * * <p>More formally, returns the lowest index {@code i} such that {@code * predicate.apply(Iterators.get(iterator, i))} returns {@code true}, or {@code -1} if there is no * such index. * * <p>If -1 is returned, the iterator will be left exhausted: its {@code hasNext()} method will * return {@code false}. Otherwise, the iterator will be set to the element which satisfies the * {@code predicate}. * * @since 2.0 */ public static <T extends @Nullable Object> int indexOf( Iterator<T> iterator, Predicate<? super T> predicate) { checkNotNull(predicate, "predicate"); for (int i = 0; iterator.hasNext(); i++) { T current = iterator.next(); if (predicate.apply(current)) { return i; } } return -1; } /** * Returns a view containing the result of applying {@code function} to each element of {@code * fromIterator}. * * <p>The returned iterator supports {@code remove()} if {@code fromIterator} does. After a * successful {@code remove()} call, {@code fromIterator} no longer contains the corresponding * element. */ public static <F extends @Nullable Object, T extends @Nullable Object> Iterator<T> transform( Iterator<F> fromIterator, Function<? super F, ? extends T> function) { checkNotNull(function); return new TransformedIterator<F, T>(fromIterator) { @ParametricNullness @Override T transform(@ParametricNullness F from) { return function.apply(from); } }; } /** * Advances {@code iterator} {@code position + 1} times, returning the element at the {@code * position}th position. * * @param position position of the element to return * @return the element at the specified position in {@code iterator} * @throws IndexOutOfBoundsException if {@code position} is negative or greater than or equal to * the number of elements remaining in {@code iterator} */ @ParametricNullness public static <T extends @Nullable Object> T get(Iterator<T> iterator, int position) { checkNonnegative(position); int skipped = advance(iterator, position); if (!iterator.hasNext()) { throw new IndexOutOfBoundsException( "position (" + position + ") must be less than the number of elements that remained (" + skipped + ")"); } return iterator.next(); } /** * Advances {@code iterator} {@code position + 1} times, returning the element at the {@code * position}th position or {@code defaultValue} otherwise. * * @param position position of the element to return * @param defaultValue the default value to return if the iterator is empty or if {@code position} * is greater than the number of elements remaining in {@code iterator} * @return the element at the specified position in {@code iterator} or {@code defaultValue} if * {@code iterator} produces fewer than {@code position + 1} elements. * @throws IndexOutOfBoundsException if {@code position} is negative * @since 4.0 */ @ParametricNullness public static <T extends @Nullable Object> T get( Iterator<? extends T> iterator, int position, @ParametricNullness T defaultValue) { checkNonnegative(position); advance(iterator, position); return getNext(iterator, defaultValue); } static void checkNonnegative(int position) { if (position < 0) { throw new IndexOutOfBoundsException("position (" + position + ") must not be negative"); } } /** * Returns the next element in {@code iterator} or {@code defaultValue} if the iterator is empty. * The {@link Iterables} analog to this method is {@link Iterables#getFirst}. * * @param defaultValue the default value to return if the iterator is empty * @return the next element of {@code iterator} or the default value * @since 7.0 */ @ParametricNullness public static <T extends @Nullable Object> T getNext( Iterator<? extends T> iterator, @ParametricNullness T defaultValue) { return iterator.hasNext() ? iterator.next() : defaultValue; } /** * Advances {@code iterator} to the end, returning the last element. * * @return the last element of {@code iterator} * @throws NoSuchElementException if the iterator is empty */ @ParametricNullness public static <T extends @Nullable Object> T getLast(Iterator<T> iterator) { while (true) { T current = iterator.next(); if (!iterator.hasNext()) { return current; } } } /** * Advances {@code iterator} to the end, returning the last element or {@code defaultValue} if the * iterator is empty. * * @param defaultValue the default value to return if the iterator is empty * @return the last element of {@code iterator} * @since 3.0 */ @ParametricNullness public static <T extends @Nullable Object> T getLast( Iterator<? extends T> iterator, @ParametricNullness T defaultValue) { return iterator.hasNext() ? getLast(iterator) : defaultValue; } /** * Calls {@code next()} on {@code iterator}, either {@code numberToAdvance} times or until {@code * hasNext()} returns {@code false}, whichever comes first. * * @return the number of elements the iterator was advanced * @since 13.0 (since 3.0 as {@code Iterators.skip}) */ @CanIgnoreReturnValue public static int advance(Iterator<?> iterator, int numberToAdvance) { checkNotNull(iterator); checkArgument(numberToAdvance >= 0, "numberToAdvance must be nonnegative"); int i; for (i = 0; i < numberToAdvance && iterator.hasNext(); i++) { iterator.next(); } return i; } /** * Returns a view containing the first {@code limitSize} elements of {@code iterator}. If {@code * iterator} contains fewer than {@code limitSize} elements, the returned view contains all of its * elements. The returned iterator supports {@code remove()} if {@code iterator} does. * * @param iterator the iterator to limit * @param limitSize the maximum number of elements in the returned iterator * @throws IllegalArgumentException if {@code limitSize} is negative * @since 3.0 */ public static <T extends @Nullable Object> Iterator<T> limit( Iterator<T> iterator, int limitSize) { checkNotNull(iterator); checkArgument(limitSize >= 0, "limit is negative"); return new Iterator<T>() { private int count; @Override public boolean hasNext() { return count < limitSize && iterator.hasNext(); } @Override @ParametricNullness public T next() { if (!hasNext()) { throw new NoSuchElementException(); } count++; return iterator.next(); } @Override public void remove() { iterator.remove(); } }; } /** * Returns a view of the supplied {@code iterator} that removes each element from the supplied * {@code iterator} as it is returned. * * <p>The provided iterator must support {@link Iterator#remove()} or else the returned iterator * will fail on the first call to {@code next}. The returned {@link Iterator} is also not * thread-safe. * * @param iterator the iterator to remove and return elements from * @return an iterator that removes and returns elements from the supplied iterator * @since 2.0 */ public static <T extends @Nullable Object> Iterator<T> consumingIterator(Iterator<T> iterator) { checkNotNull(iterator); return new UnmodifiableIterator<T>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override @ParametricNullness public T next() { T next = iterator.next(); iterator.remove(); return next; } @Override public String toString() { return "Iterators.consumingIterator(...)"; } }; } /** * Deletes and returns the next value from the iterator, or returns {@code null} if there is no * such value. */ @CheckForNull static <T extends @Nullable Object> T pollNext(Iterator<T> iterator) { if (iterator.hasNext()) { T result = iterator.next(); iterator.remove(); return result; } else { return null; } } // Methods only in Iterators, not in Iterables /** Clears the iterator using its remove method. */ static void clear(Iterator<?> iterator) { checkNotNull(iterator); while (iterator.hasNext()) { iterator.next(); iterator.remove(); } } /** * Returns an iterator containing the elements of {@code array} in order. The returned iterator is * a view of the array; subsequent changes to the array will be reflected in the iterator. * * <p><b>Note:</b> It is often preferable to represent your data using a collection type, for * example using {@link Arrays#asList(Object[])}, making this method unnecessary. * * <p>The {@code Iterable} equivalent of this method is either {@link Arrays#asList(Object[])}, * {@link ImmutableList#copyOf(Object[])}}, or {@link ImmutableList#of}. */ @SafeVarargs public static <T extends @Nullable Object> UnmodifiableIterator<T> forArray(T... array) { return forArrayWithPosition(array, 0); } /** * Returns a list iterator containing the elements in the specified {@code array} in order, * starting at the specified {@code position}. * * <p>The {@code Iterable} equivalent of this method is {@code * Arrays.asList(array).listIterator(position)}. */ static <T extends @Nullable Object> UnmodifiableListIterator<T> forArrayWithPosition( T[] array, int position) { if (array.length == 0) { Preconditions.checkPositionIndex(position, array.length); // otherwise checked in ArrayItr return emptyListIterator(); } return new ArrayItr<>(array, position); } private static final class ArrayItr<T extends @Nullable Object> extends AbstractIndexedListIterator<T> { static final UnmodifiableListIterator<Object> EMPTY = new ArrayItr<>(new Object[0], 0); private final T[] array; ArrayItr(T[] array, int position) { super(array.length, position); this.array = array; } @Override @ParametricNullness protected T get(int index) { return array[index]; } } /** * Returns an iterator containing only {@code value}. * * <p>The {@link Iterable} equivalent of this method is {@link Collections#singleton}. */ public static <T extends @Nullable Object> UnmodifiableIterator<T> singletonIterator( @ParametricNullness T value) { return new SingletonIterator<>(value); } private static final class SingletonIterator<T extends @Nullable Object> extends UnmodifiableIterator<T> { private static final Object SENTINEL = new Object(); private @Nullable Object valueOrSentinel; SingletonIterator(T value) { this.valueOrSentinel = value; } @Override public boolean hasNext() { return valueOrSentinel != SENTINEL; } @Override @ParametricNullness public T next() { if (valueOrSentinel == SENTINEL) { throw new NoSuchElementException(); } // The field held either a T or SENTINEL, and it turned out not to be SENTINEL. @SuppressWarnings("unchecked") T t = (T) valueOrSentinel; valueOrSentinel = SENTINEL; return t; } } /** * Adapts an {@code Enumeration} to the {@code Iterator} interface. * * <p>This method has no equivalent in {@link Iterables} because viewing an {@code Enumeration} as * an {@code Iterable} is impossible. However, the contents can be <i>copied</i> into a collection * using {@link Collections#list}. * * <p><b>Java 9 users:</b> use {@code enumeration.asIterator()} instead, unless it is important to * return an {@code UnmodifiableIterator} instead of a plain {@code Iterator}. */ public static <T extends @Nullable Object> UnmodifiableIterator<T> forEnumeration( Enumeration<T> enumeration) { checkNotNull(enumeration); return new UnmodifiableIterator<T>() { @Override public boolean hasNext() { return enumeration.hasMoreElements(); } @Override @ParametricNullness public T next() { return enumeration.nextElement(); } }; } /** * Adapts an {@code Iterator} to the {@code Enumeration} interface. * * <p>The {@code Iterable} equivalent of this method is either {@link Collections#enumeration} (if * you have a {@link Collection}), or {@code Iterators.asEnumeration(collection.iterator())}. */ public static <T extends @Nullable Object> Enumeration<T> asEnumeration(Iterator<T> iterator) { checkNotNull(iterator); return new Enumeration<T>() { @Override public boolean hasMoreElements() { return iterator.hasNext(); } @Override @ParametricNullness public T nextElement() { return iterator.next(); } }; } /** Implementation of PeekingIterator that avoids peeking unless necessary. */ private static class PeekingImpl<E extends @Nullable Object> implements PeekingIterator<E> { private final Iterator<? extends E> iterator; private boolean hasPeeked; @CheckForNull private E peekedElement; public PeekingImpl(Iterator<? extends E> iterator) { this.iterator = checkNotNull(iterator); } @Override public boolean hasNext() { return hasPeeked || iterator.hasNext(); } @Override @ParametricNullness public E next() { if (!hasPeeked) { return iterator.next(); } // The cast is safe because of the hasPeeked check. E result = uncheckedCastNullableTToT(peekedElement); hasPeeked = false; peekedElement = null; return result; } @Override public void remove() { checkState(!hasPeeked, "Can't remove after you've peeked at next"); iterator.remove(); } @Override @ParametricNullness public E peek() { if (!hasPeeked) { peekedElement = iterator.next(); hasPeeked = true; } // The cast is safe because of the hasPeeked check. return uncheckedCastNullableTToT(peekedElement); } } /** * Returns a {@code PeekingIterator} backed by the given iterator. * * <p>Calls to the {@code peek} method with no intervening calls to {@code next} do not affect the * iteration, and hence return the same object each time. A subsequent call to {@code next} is * guaranteed to return the same object again. For example: * * <pre>{@code * PeekingIterator<String> peekingIterator = * Iterators.peekingIterator(Iterators.forArray("a", "b")); * String a1 = peekingIterator.peek(); // returns "a" * String a2 = peekingIterator.peek(); // also returns "a" * String a3 = peekingIterator.next(); // also returns "a" * }</pre> * * <p>Any structural changes to the underlying iteration (aside from those performed by the * iterator's own {@link PeekingIterator#remove()} method) will leave the iterator in an undefined * state. * * <p>The returned iterator does not support removal after peeking, as explained by {@link * PeekingIterator#remove()}. * * <p>Note: If the given iterator is already a {@code PeekingIterator}, it <i>might</i> be * returned to the caller, although this is neither guaranteed to occur nor required to be * consistent. For example, this method <i>might</i> choose to pass through recognized * implementations of {@code PeekingIterator} when the behavior of the implementation is known to * meet the contract guaranteed by this method. * * <p>There is no {@link Iterable} equivalent to this method, so use this method to wrap each * individual iterator as it is generated. * * @param iterator the backing iterator. The {@link PeekingIterator} assumes ownership of this * iterator, so users should cease making direct calls to it after calling this method. * @return a peeking iterator backed by that iterator. Apart from the additional {@link * PeekingIterator#peek()} method, this iterator behaves exactly the same as {@code iterator}. */ public static <T extends @Nullable Object> PeekingIterator<T> peekingIterator( Iterator<? extends T> iterator) { if (iterator instanceof PeekingImpl) { // Safe to cast <? extends T> to <T> because PeekingImpl only uses T // covariantly (and cannot be subclassed to add non-covariant uses). @SuppressWarnings("unchecked") PeekingImpl<T> peeking = (PeekingImpl<T>) iterator; return peeking; } return new PeekingImpl<>(iterator); } /** * Simply returns its argument. * * @deprecated no need to use this * @since 10.0 */ @Deprecated public static <T extends @Nullable Object> PeekingIterator<T> peekingIterator( PeekingIterator<T> iterator) { return checkNotNull(iterator); } /** * Returns an iterator over the merged contents of all given {@code iterators}, traversing every * element of the input iterators. Equivalent entries will not be de-duplicated. * * <p>Callers must ensure that the source {@code iterators} are in non-descending order as this * method does not sort its input. * * <p>For any equivalent elements across all {@code iterators}, it is undefined which element is * returned first. * * @since 11.0 */ public static <T extends @Nullable Object> UnmodifiableIterator<T> mergeSorted( Iterable<? extends Iterator<? extends T>> iterators, Comparator<? super T> comparator) { checkNotNull(iterators, "iterators"); checkNotNull(comparator, "comparator"); return new MergingIterator<>(iterators, comparator); } /** * An iterator that performs a lazy N-way merge, calculating the next value each time the iterator * is polled. This amortizes the sorting cost over the iteration and requires less memory than * sorting all elements at once. * * <p>Retrieving a single element takes approximately O(log(M)) time, where M is the number of * iterators. (Retrieving all elements takes approximately O(N*log(M)) time, where N is the total * number of elements.) */ private static class MergingIterator<T extends @Nullable Object> extends UnmodifiableIterator<T> { final Queue<PeekingIterator<T>> queue; public MergingIterator( Iterable<? extends Iterator<? extends T>> iterators, Comparator<? super T> itemComparator) { // A comparator that's used by the heap, allowing the heap // to be sorted based on the top of each iterator. Comparator<PeekingIterator<T>> heapComparator = (PeekingIterator<T> o1, PeekingIterator<T> o2) -> itemComparator.compare(o1.peek(), o2.peek()); queue = new PriorityQueue<>(2, heapComparator); for (Iterator<? extends T> iterator : iterators) { if (iterator.hasNext()) { queue.add(Iterators.peekingIterator(iterator)); } } } @Override public boolean hasNext() { return !queue.isEmpty(); } @Override @ParametricNullness public T next() { PeekingIterator<T> nextIter = queue.remove(); T next = nextIter.next(); if (nextIter.hasNext()) { queue.add(nextIter); } return next; } } private static class ConcatenatedIterator<T extends @Nullable Object> implements Iterator<T> { /* The last iterator to return an element. Calls to remove() go to this iterator. */ @CheckForNull private Iterator<? extends T> toRemove; /* The iterator currently returning elements. */ private Iterator<? extends T> iterator; /* * We track the "meta iterators," the iterators-of-iterators, below. Usually, topMetaIterator * is the only one in use, but if we encounter nested concatenations, we start a deque of * meta-iterators rather than letting the nesting get arbitrarily deep. This keeps each * operation O(1). */ @CheckForNull private Iterator<? extends Iterator<? extends T>> topMetaIterator; // Only becomes nonnull if we encounter nested concatenations. @CheckForNull private Deque<Iterator<? extends Iterator<? extends T>>> metaIterators; ConcatenatedIterator(Iterator<? extends Iterator<? extends T>> metaIterator) { iterator = emptyIterator(); topMetaIterator = checkNotNull(metaIterator); } // Returns a nonempty meta-iterator or, if all meta-iterators are empty, null. @CheckForNull private Iterator<? extends Iterator<? extends T>> getTopMetaIterator() { while (topMetaIterator == null || !topMetaIterator.hasNext()) { if (metaIterators != null && !metaIterators.isEmpty()) { topMetaIterator = metaIterators.removeFirst(); } else { return null; } } return topMetaIterator; } @Override public boolean hasNext() { while (!checkNotNull(iterator).hasNext()) { // this weird checkNotNull positioning appears required by our tests, which expect // both hasNext and next to throw NPE if an input iterator is null. topMetaIterator = getTopMetaIterator(); if (topMetaIterator == null) { return false; } iterator = topMetaIterator.next(); if (iterator instanceof ConcatenatedIterator) { // Instead of taking linear time in the number of nested concatenations, unpack // them into the queue @SuppressWarnings("unchecked") ConcatenatedIterator<T> topConcat = (ConcatenatedIterator<T>) iterator; iterator = topConcat.iterator; // topConcat.topMetaIterator, then topConcat.metaIterators, then this.topMetaIterator, // then this.metaIterators if (this.metaIterators == null) { this.metaIterators = new ArrayDeque<>(); } this.metaIterators.addFirst(this.topMetaIterator); if (topConcat.metaIterators != null) { while (!topConcat.metaIterators.isEmpty()) { this.metaIterators.addFirst(topConcat.metaIterators.removeLast()); } } this.topMetaIterator = topConcat.topMetaIterator; } } return true; } @Override @ParametricNullness public T next() { if (hasNext()) { toRemove = iterator; return iterator.next(); } else { throw new NoSuchElementException(); } } @Override public void remove() { if (toRemove == null) { throw new IllegalStateException("no calls to next() since the last call to remove()"); } toRemove.remove(); toRemove = null; } } }
secureonelabs/guava
guava/src/com/google/common/collect/Iterators.java
900
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ import abstractextensions.CommanderExtension; import abstractextensions.SergeantExtension; import abstractextensions.SoldierExtension; import java.util.Optional; import java.util.function.Function; import org.slf4j.LoggerFactory; import units.CommanderUnit; import units.SergeantUnit; import units.SoldierUnit; import units.Unit; /** * Anticipate that an object’s interface needs to be extended in the future. Additional interfaces * are defined by extension objects. */ public class App { /** * Program entry point. * * @param args command line args */ public static void main(String[] args) { //Create 3 different units var soldierUnit = new SoldierUnit("SoldierUnit1"); var sergeantUnit = new SergeantUnit("SergeantUnit1"); var commanderUnit = new CommanderUnit("CommanderUnit1"); //check for each unit to have an extension checkExtensionsForUnit(soldierUnit); checkExtensionsForUnit(sergeantUnit); checkExtensionsForUnit(commanderUnit); } private static void checkExtensionsForUnit(Unit unit) { final var logger = LoggerFactory.getLogger(App.class); var name = unit.getName(); Function<String, Runnable> func = e -> () -> logger.info("{} without {}", name, e); var extension = "SoldierExtension"; Optional.ofNullable(unit.getUnitExtension(extension)) .map(e -> (SoldierExtension) e) .ifPresentOrElse(SoldierExtension::soldierReady, func.apply(extension)); extension = "SergeantExtension"; Optional.ofNullable(unit.getUnitExtension(extension)) .map(e -> (SergeantExtension) e) .ifPresentOrElse(SergeantExtension::sergeantReady, func.apply(extension)); extension = "CommanderExtension"; Optional.ofNullable(unit.getUnitExtension(extension)) .map(e -> (CommanderExtension) e) .ifPresentOrElse(CommanderExtension::commanderReady, func.apply(extension)); } }
rajprins/java-design-patterns
extension-objects/src/main/java/App.java
901
package mindustry.content; import arc.graphics.*; import arc.graphics.g2d.*; import arc.math.*; import arc.math.geom.*; import arc.struct.*; import arc.util.*; import mindustry.ai.*; import mindustry.ai.types.*; import mindustry.annotations.Annotations.*; import mindustry.entities.*; import mindustry.entities.abilities.*; import mindustry.entities.bullet.*; import mindustry.entities.effect.*; import mindustry.entities.part.*; import mindustry.entities.pattern.*; import mindustry.gen.*; import mindustry.graphics.*; import mindustry.type.*; import mindustry.type.ammo.*; import mindustry.type.unit.*; import mindustry.type.weapons.*; import mindustry.world.meta.*; import static arc.graphics.g2d.Draw.*; import static arc.graphics.g2d.Lines.*; import static arc.math.Angles.*; import static mindustry.Vars.*; public class UnitTypes{ //region standard //mech public static @EntityDef({Unitc.class, Mechc.class}) UnitType mace, dagger, crawler, fortress, scepter, reign, vela; //mech, legacy public static @EntityDef(value = {Unitc.class, Mechc.class}, legacy = true) UnitType nova, pulsar, quasar; //legs public static @EntityDef({Unitc.class, Legsc.class}) UnitType corvus, atrax, merui, cleroi, anthicus, tecta, collaris; //legs, legacy public static @EntityDef(value = {Unitc.class, Legsc.class}, legacy = true) UnitType spiroct, arkyid, toxopid; //hover public static @EntityDef({Unitc.class, ElevationMovec.class}) UnitType elude; //air public static @EntityDef({Unitc.class}) UnitType flare, eclipse, horizon, zenith, antumbra, avert, obviate; //air, legacy public static @EntityDef(value = {Unitc.class}, legacy = true) UnitType mono; //air, legacy public static @EntityDef(value = {Unitc.class}, legacy = true) UnitType poly; //air + payload public static @EntityDef({Unitc.class, Payloadc.class}) UnitType mega, evoke, incite, emanate, quell, disrupt; //air + payload, legacy public static @EntityDef(value = {Unitc.class, Payloadc.class}, legacy = true) UnitType quad; //air + payload + legacy (different branch) public static @EntityDef(value = {Unitc.class, Payloadc.class}, legacy = true) UnitType oct; //air, legacy public static @EntityDef(value = {Unitc.class}, legacy = true) UnitType alpha, beta, gamma; //naval public static @EntityDef({Unitc.class, WaterMovec.class}) UnitType risso, minke, bryde, sei, omura, retusa, oxynoe, cyerce, aegires, navanax; //special block unit type public static @EntityDef({Unitc.class, BlockUnitc.class}) UnitType block; //special building tethered (has payload capability, because it's necessary sometimes) public static @EntityDef({Unitc.class, BuildingTetherc.class, Payloadc.class}) UnitType manifold, assemblyDrone; //tank public static @EntityDef({Unitc.class, Tankc.class}) UnitType stell, locus, precept, vanquish, conquer; //endregion //missile definition, unused here but needed for codegen public static @EntityDef({Unitc.class, TimedKillc.class}) UnitType missile; //region neoplasm public static @EntityDef({Unitc.class, Crawlc.class}) UnitType latum, renale; //endregion public static void load(){ //region ground attack dagger = new UnitType("dagger"){{ speed = 0.5f; hitSize = 8f; health = 150; weapons.add(new Weapon("large-weapon"){{ reload = 13f; x = 4f; y = 2f; top = false; ejectEffect = Fx.casing1; bullet = new BasicBulletType(2.5f, 9){{ width = 7f; height = 9f; lifetime = 60f; }}; }}); }}; mace = new UnitType("mace"){{ speed = 0.5f; hitSize = 10f; health = 550; armor = 4f; ammoType = new ItemAmmoType(Items.coal); immunities.add(StatusEffects.burning); weapons.add(new Weapon("flamethrower"){{ top = false; shootSound = Sounds.flame; shootY = 2f; reload = 11f; recoil = 1f; ejectEffect = Fx.none; bullet = new BulletType(4.2f, 37f){{ ammoMultiplier = 3f; hitSize = 7f; lifetime = 13f; pierce = true; pierceBuilding = true; pierceCap = 2; statusDuration = 60f * 4; shootEffect = Fx.shootSmallFlame; hitEffect = Fx.hitFlameSmall; despawnEffect = Fx.none; status = StatusEffects.burning; keepVelocity = false; hittable = false; }}; }}); }}; fortress = new UnitType("fortress"){{ speed = 0.43f; hitSize = 13f; rotateSpeed = 3f; targetAir = false; health = 900; armor = 9f; mechFrontSway = 0.55f; ammoType = new ItemAmmoType(Items.graphite); weapons.add(new Weapon("artillery"){{ top = false; y = 1f; x = 9f; reload = 60f; recoil = 4f; shake = 2f; ejectEffect = Fx.casing2; shootSound = Sounds.artillery; bullet = new ArtilleryBulletType(2f, 20, "shell"){{ hitEffect = Fx.blastExplosion; knockback = 0.8f; lifetime = 120f; width = height = 14f; collides = true; collidesTiles = true; splashDamageRadius = 35f; splashDamage = 80f; backColor = Pal.bulletYellowBack; frontColor = Pal.bulletYellow; }}; }}); }}; scepter = new UnitType("scepter"){{ speed = 0.36f; hitSize = 22f; rotateSpeed = 2.1f; health = 9000; armor = 10f; mechFrontSway = 1f; ammoType = new ItemAmmoType(Items.thorium); mechStepParticles = true; stepShake = 0.15f; singleTarget = true; drownTimeMultiplier = 4f; abilities.add(new ShieldRegenFieldAbility(25f, 250f, 60f * 1, 60f)); BulletType smallBullet = new BasicBulletType(3f, 10){{ width = 7f; height = 9f; lifetime = 50f; }}; weapons.add( new Weapon("scepter-weapon"){{ top = false; y = 1f; x = 16f; shootY = 8f; reload = 45f; recoil = 5f; shake = 2f; ejectEffect = Fx.casing3; shootSound = Sounds.bang; inaccuracy = 3f; shoot.shots = 3; shoot.shotDelay = 4f; bullet = new BasicBulletType(8f, 80){{ width = 11f; height = 20f; lifetime = 27f; shootEffect = Fx.shootBig; lightning = 2; lightningLength = 6; lightningColor = Pal.surge; //standard bullet damage is far too much for lightning lightningDamage = 20; }}; }}, new Weapon("mount-weapon"){{ reload = 13f; x = 8.5f; y = 6f; rotate = true; ejectEffect = Fx.casing1; bullet = smallBullet; }}, new Weapon("mount-weapon"){{ reload = 16f; x = 8.5f; y = -7f; rotate = true; ejectEffect = Fx.casing1; bullet = smallBullet; }} ); }}; reign = new UnitType("reign"){{ speed = 0.4f; hitSize = 26f; rotateSpeed = 1.65f; health = 24000; armor = 18f; mechStepParticles = true; stepShake = 0.75f; drownTimeMultiplier = 6f; mechFrontSway = 1.9f; mechSideSway = 0.6f; ammoType = new ItemAmmoType(Items.thorium); weapons.add( new Weapon("reign-weapon"){{ top = false; y = 1f; x = 21.5f; shootY = 11f; reload = 9f; recoil = 5f; shake = 2f; ejectEffect = Fx.casing4; shootSound = Sounds.bang; bullet = new BasicBulletType(13f, 80){{ pierce = true; pierceCap = 10; width = 14f; height = 33f; lifetime = 15f; shootEffect = Fx.shootBig; fragVelocityMin = 0.4f; hitEffect = Fx.blastExplosion; splashDamage = 18f; splashDamageRadius = 13f; fragBullets = 3; fragLifeMin = 0f; fragRandomSpread = 30f; fragBullet = new BasicBulletType(9f, 20){{ width = 10f; height = 10f; pierce = true; pierceBuilding = true; pierceCap = 3; lifetime = 20f; hitEffect = Fx.flakExplosion; splashDamage = 15f; splashDamageRadius = 10f; }}; }}; }} ); }}; //endregion //region ground support nova = new UnitType("nova"){{ canBoost = true; boostMultiplier = 1.5f; speed = 0.55f; hitSize = 8f; health = 120f; buildSpeed = 0.8f; armor = 1f; abilities.add(new RepairFieldAbility(10f, 60f * 4, 60f)); ammoType = new PowerAmmoType(1000); weapons.add(new Weapon("heal-weapon"){{ top = false; shootY = 2f; reload = 24f; x = 4.5f; alternate = false; ejectEffect = Fx.none; recoil = 2f; shootSound = Sounds.lasershoot; bullet = new LaserBoltBulletType(5.2f, 13){{ lifetime = 30f; healPercent = 5f; collidesTeam = true; backColor = Pal.heal; frontColor = Color.white; }}; }}); }}; pulsar = new UnitType("pulsar"){{ canBoost = true; boostMultiplier = 1.6f; speed = 0.7f; hitSize = 11f; health = 320f; buildSpeed = 0.9f; armor = 4f; riseSpeed = 0.07f; mineTier = 2; mineSpeed = 5f; abilities.add(new ShieldRegenFieldAbility(20f, 40f, 60f * 5, 60f)); ammoType = new PowerAmmoType(1300); weapons.add(new Weapon("heal-shotgun-weapon"){{ top = false; x = 5f; shake = 2.2f; y = 0.5f; shootY = 2.5f; reload = 36f; inaccuracy = 35; shoot.shots = 3; shoot.shotDelay = 0.5f; ejectEffect = Fx.none; recoil = 2.5f; shootSound = Sounds.spark; bullet = new LightningBulletType(){{ lightningColor = hitColor = Pal.heal; damage = 14f; lightningLength = 7; lightningLengthRand = 7; shootEffect = Fx.shootHeal; //Does not actually do anything; Just here to make stats work healPercent = 2f; lightningType = new BulletType(0.0001f, 0f){{ lifetime = Fx.lightning.lifetime; hitEffect = Fx.hitLancer; despawnEffect = Fx.none; status = StatusEffects.shocked; statusDuration = 10f; hittable = false; healPercent = 1.6f; collidesTeam = true; }}; }}; }}); }}; quasar = new UnitType("quasar"){{ mineTier = 3; boostMultiplier = 2f; health = 640f; buildSpeed = 1.7f; canBoost = true; armor = 9f; mechLandShake = 2f; riseSpeed = 0.05f; mechFrontSway = 0.55f; ammoType = new PowerAmmoType(1500); speed = 0.4f; hitSize = 13f; mineSpeed = 6f; drawShields = false; abilities.add(new ForceFieldAbility(60f, 0.3f, 400f, 60f * 6)); weapons.add(new Weapon("beam-weapon"){{ top = false; shake = 2f; shootY = 4f; x = 6.5f; reload = 55f; recoil = 4f; shootSound = Sounds.laser; bullet = new LaserBulletType(){{ damage = 45f; recoil = 1f; sideAngle = 45f; sideWidth = 1f; sideLength = 70f; healPercent = 10f; collidesTeam = true; length = 135f; colors = new Color[]{Pal.heal.cpy().a(0.4f), Pal.heal, Color.white}; }}; }}); }}; vela = new UnitType("vela"){{ hitSize = 24f; rotateSpeed = 1.8f; mechFrontSway = 1f; buildSpeed = 3f; mechStepParticles = true; stepShake = 0.15f; ammoType = new PowerAmmoType(2500); drownTimeMultiplier = 4f; speed = 0.44f; boostMultiplier = 2.4f; engineOffset = 12f; engineSize = 6f; lowAltitude = true; riseSpeed = 0.02f; health = 8200f; armor = 9f; canBoost = true; mechLandShake = 4f; immunities = ObjectSet.with(StatusEffects.burning); singleTarget = true; weapons.add(new Weapon("vela-weapon"){{ mirror = false; top = false; shake = 4f; shootY = 14f; x = y = 0f; shoot.firstShotDelay = Fx.greenLaserChargeSmall.lifetime - 1f; parentizeEffects = true; reload = 155f; recoil = 0f; chargeSound = Sounds.lasercharge2; shootSound = Sounds.beam; continuous = true; cooldownTime = 200f; bullet = new ContinuousLaserBulletType(){{ damage = 35f; length = 180f; hitEffect = Fx.hitMeltHeal; drawSize = 420f; lifetime = 160f; shake = 1f; despawnEffect = Fx.smokeCloud; smokeEffect = Fx.none; chargeEffect = Fx.greenLaserChargeSmall; incendChance = 0.1f; incendSpread = 5f; incendAmount = 1; //constant healing healPercent = 1f; collidesTeam = true; colors = new Color[]{Pal.heal.cpy().a(.2f), Pal.heal.cpy().a(.5f), Pal.heal.cpy().mul(1.2f), Color.white}; }}; shootStatus = StatusEffects.slow; shootStatusDuration = bullet.lifetime + shoot.firstShotDelay; }}); weapons.add(new RepairBeamWeapon("repair-beam-weapon-center-large"){{ x = 44 / 4f; y = -30f / 4f; shootY = 6f; beamWidth = 0.8f; repairSpeed = 1.4f; bullet = new BulletType(){{ maxRange = 120f; }}; }}); }}; corvus = new UnitType("corvus"){{ hitSize = 29f; health = 18000f; armor = 9f; stepShake = 1.5f; rotateSpeed = 1.5f; drownTimeMultiplier = 6f; legCount = 4; legLength = 14f; legBaseOffset = 11f; legMoveSpace = 1.5f; legForwardScl = 0.58f; hovering = true; shadowElevation = 0.2f; ammoType = new PowerAmmoType(4000); groundLayer = Layer.legUnit; speed = 0.3f; drawShields = false; weapons.add(new Weapon("corvus-weapon"){{ shootSound = Sounds.laserblast; chargeSound = Sounds.lasercharge; soundPitchMin = 1f; top = false; mirror = false; shake = 14f; shootY = 5f; x = y = 0; reload = 350f; recoil = 0f; cooldownTime = 350f; shootStatusDuration = 60f * 2f; shootStatus = StatusEffects.unmoving; shoot.firstShotDelay = Fx.greenLaserCharge.lifetime; parentizeEffects = true; bullet = new LaserBulletType(){{ length = 460f; damage = 560f; width = 75f; lifetime = 65f; lightningSpacing = 35f; lightningLength = 5; lightningDelay = 1.1f; lightningLengthRand = 15; lightningDamage = 50; lightningAngleRand = 40f; largeHit = true; lightColor = lightningColor = Pal.heal; chargeEffect = Fx.greenLaserCharge; healPercent = 25f; collidesTeam = true; sideAngle = 15f; sideWidth = 0f; sideLength = 0f; colors = new Color[]{Pal.heal.cpy().a(0.4f), Pal.heal, Color.white}; }}; }}); }}; //endregion //region ground legs crawler = new UnitType("crawler"){{ aiController = SuicideAI::new; speed = 1f; hitSize = 8f; health = 200; mechSideSway = 0.25f; range = 40f; ammoType = new ItemAmmoType(Items.coal); weapons.add(new Weapon(){{ shootOnDeath = true; reload = 24f; shootCone = 180f; ejectEffect = Fx.none; shootSound = Sounds.explosion; x = shootY = 0f; mirror = false; bullet = new BulletType(){{ collidesTiles = false; collides = false; hitSound = Sounds.explosion; rangeOverride = 30f; hitEffect = Fx.pulverize; speed = 0f; splashDamageRadius = 55f; instantDisappear = true; splashDamage = 90f; killShooter = true; hittable = false; collidesAir = true; }}; }}); }}; atrax = new UnitType("atrax"){{ speed = 0.6f; drag = 0.4f; hitSize = 13f; rotateSpeed = 3f; targetAir = false; health = 600; immunities = ObjectSet.with(StatusEffects.burning, StatusEffects.melting); legCount = 4; legLength = 9f; legForwardScl = 0.6f; legMoveSpace = 1.4f; hovering = true; armor = 3f; ammoType = new ItemAmmoType(Items.coal); shadowElevation = 0.2f; groundLayer = Layer.legUnit - 1f; weapons.add(new Weapon("atrax-weapon"){{ top = false; shootY = 3f; reload = 9f; ejectEffect = Fx.none; recoil = 1f; x = 7f; shootSound = Sounds.flame; bullet = new LiquidBulletType(Liquids.slag){{ damage = 13; speed = 2.5f; drag = 0.009f; shootEffect = Fx.shootSmall; lifetime = 57f; collidesAir = false; }}; }}); }}; spiroct = new UnitType("spiroct"){{ speed = 0.54f; drag = 0.4f; hitSize = 15f; rotateSpeed = 3f; health = 1000; legCount = 6; legLength = 13f; legForwardScl = 0.8f; legMoveSpace = 1.4f; legBaseOffset = 2f; hovering = true; armor = 5f; ammoType = new PowerAmmoType(1000); shadowElevation = 0.3f; groundLayer = Layer.legUnit; weapons.add(new Weapon("spiroct-weapon"){{ shootY = 4f; reload = 14f; ejectEffect = Fx.none; recoil = 2f; rotate = true; shootSound = Sounds.sap; x = 8.5f; y = -1.5f; bullet = new SapBulletType(){{ sapStrength = 0.5f; length = 75f; damage = 23; shootEffect = Fx.shootSmall; hitColor = color = Color.valueOf("bf92f9"); despawnEffect = Fx.none; width = 0.54f; lifetime = 35f; knockback = -1.24f; }}; }}); weapons.add(new Weapon("mount-purple-weapon"){{ reload = 18f; rotate = true; x = 4f; y = 3f; shootSound = Sounds.sap; bullet = new SapBulletType(){{ sapStrength = 0.8f; length = 40f; damage = 18; shootEffect = Fx.shootSmall; hitColor = color = Color.valueOf("bf92f9"); despawnEffect = Fx.none; width = 0.4f; lifetime = 25f; knockback = -0.65f; }}; }}); }}; arkyid = new UnitType("arkyid"){{ drag = 0.1f; speed = 0.62f; hitSize = 23f; health = 8000; armor = 6f; rotateSpeed = 2.7f; legCount = 6; legMoveSpace = 1f; legPairOffset = 3; legLength = 30f; legExtension = -15; legBaseOffset = 10f; stepShake = 1f; legLengthScl = 0.96f; rippleScale = 2f; legSpeed = 0.2f; ammoType = new PowerAmmoType(2000); legSplashDamage = 32; legSplashRange = 30; drownTimeMultiplier = 2f; hovering = true; shadowElevation = 0.65f; groundLayer = Layer.legUnit; BulletType sapper = new SapBulletType(){{ sapStrength = 0.85f; length = 55f; damage = 40; shootEffect = Fx.shootSmall; hitColor = color = Color.valueOf("bf92f9"); despawnEffect = Fx.none; width = 0.55f; lifetime = 30f; knockback = -1f; }}; weapons.add( new Weapon("spiroct-weapon"){{ reload = 9f; x = 4f; y = 8f; rotate = true; bullet = sapper; shootSound = Sounds.sap; }}, new Weapon("spiroct-weapon"){{ reload = 14f; x = 9f; y = 6f; rotate = true; bullet = sapper; shootSound = Sounds.sap; }}, new Weapon("spiroct-weapon"){{ reload = 22f; x = 14f; y = 0f; rotate = true; bullet = sapper; shootSound = Sounds.sap; }}, new Weapon("large-purple-mount"){{ y = -7f; x = 9f; shootY = 7f; reload = 45; shake = 3f; rotateSpeed = 2f; ejectEffect = Fx.casing1; shootSound = Sounds.artillery; rotate = true; shadow = 8f; recoil = 3f; bullet = new ArtilleryBulletType(2f, 12){{ hitEffect = Fx.sapExplosion; knockback = 0.8f; lifetime = 70f; width = height = 19f; collidesTiles = true; ammoMultiplier = 4f; splashDamageRadius = 70f; splashDamage = 65f; backColor = Pal.sapBulletBack; frontColor = lightningColor = Pal.sapBullet; lightning = 3; lightningLength = 10; smokeEffect = Fx.shootBigSmoke2; shake = 5f; status = StatusEffects.sapped; statusDuration = 60f * 10; }}; }}); }}; toxopid = new UnitType("toxopid"){{ drag = 0.1f; speed = 0.5f; hitSize = 26f; health = 22000; armor = 13f; lightRadius = 140f; rotateSpeed = 1.9f; drownTimeMultiplier = 3f; legCount = 8; legMoveSpace = 0.8f; legPairOffset = 3; legLength = 75f; legExtension = -20; legBaseOffset = 8f; stepShake = 1f; legLengthScl = 0.93f; rippleScale = 3f; legSpeed = 0.19f; ammoType = new ItemAmmoType(Items.graphite, 8); legSplashDamage = 80; legSplashRange = 60; hovering = true; shadowElevation = 0.95f; groundLayer = Layer.legUnit; weapons.add( new Weapon("large-purple-mount"){{ y = -5f; x = 11f; shootY = 7f; reload = 30; shake = 4f; rotateSpeed = 2f; ejectEffect = Fx.casing1; shootSound = Sounds.shootBig; rotate = true; shadow = 12f; recoil = 3f; shoot = new ShootSpread(2, 17f); bullet = new ShrapnelBulletType(){{ length = 90f; damage = 110f; width = 25f; serrationLenScl = 7f; serrationSpaceOffset = 60f; serrationFadeOffset = 0f; serrations = 10; serrationWidth = 6f; fromColor = Pal.sapBullet; toColor = Pal.sapBulletBack; shootEffect = smokeEffect = Fx.sparkShoot; }}; }}); weapons.add(new Weapon("toxopid-cannon"){{ y = -14f; x = 0f; shootY = 22f; mirror = false; reload = 210; shake = 10f; recoil = 10f; rotateSpeed = 1f; ejectEffect = Fx.casing3; shootSound = Sounds.artillery; rotate = true; shadow = 30f; rotationLimit = 80f; bullet = new ArtilleryBulletType(3f, 50){{ hitEffect = Fx.sapExplosion; knockback = 0.8f; lifetime = 80f; width = height = 25f; collidesTiles = collides = true; ammoMultiplier = 4f; splashDamageRadius = 80f; splashDamage = 75f; backColor = Pal.sapBulletBack; frontColor = lightningColor = Pal.sapBullet; lightning = 5; lightningLength = 20; smokeEffect = Fx.shootBigSmoke2; hitShake = 10f; lightRadius = 40f; lightColor = Pal.sap; lightOpacity = 0.6f; status = StatusEffects.sapped; statusDuration = 60f * 10; fragLifeMin = 0.3f; fragBullets = 9; fragBullet = new ArtilleryBulletType(2.3f, 30){{ hitEffect = Fx.sapExplosion; knockback = 0.8f; lifetime = 90f; width = height = 20f; collidesTiles = false; splashDamageRadius = 70f; splashDamage = 40f; backColor = Pal.sapBulletBack; frontColor = lightningColor = Pal.sapBullet; lightning = 2; lightningLength = 5; smokeEffect = Fx.shootBigSmoke2; hitShake = 5f; lightRadius = 30f; lightColor = Pal.sap; lightOpacity = 0.5f; status = StatusEffects.sapped; statusDuration = 60f * 10; }}; }}; }}); }}; //endregion //region air attack flare = new UnitType("flare"){{ speed = 2.7f; accel = 0.08f; drag = 0.04f; flying = true; health = 70; engineOffset = 5.75f; //TODO balance //targetAir = false; targetFlags = new BlockFlag[]{BlockFlag.generator, null}; hitSize = 9; itemCapacity = 10; weapons.add(new Weapon(){{ y = 0f; x = 2f; reload = 20f; ejectEffect = Fx.casing1; bullet = new BasicBulletType(2.5f, 9){{ width = 7f; height = 9f; lifetime = 45f; shootEffect = Fx.shootSmall; smokeEffect = Fx.shootSmallSmoke; ammoMultiplier = 2; }}; shootSound = Sounds.pew; }}); }}; horizon = new UnitType("horizon"){{ health = 340; speed = 1.65f; accel = 0.08f; drag = 0.016f; flying = true; hitSize = 10f; targetAir = false; engineOffset = 7.8f; range = 140f; faceTarget = false; armor = 3f; itemCapacity = 0; targetFlags = new BlockFlag[]{BlockFlag.factory, null}; circleTarget = true; ammoType = new ItemAmmoType(Items.graphite); weapons.add(new Weapon(){{ minShootVelocity = 0.75f; x = 3f; shootY = 0f; reload = 12f; shootCone = 180f; ejectEffect = Fx.none; inaccuracy = 15f; ignoreRotation = true; shootSound = Sounds.none; bullet = new BombBulletType(27f, 25f){{ width = 10f; height = 14f; hitEffect = Fx.flakExplosion; shootEffect = Fx.none; smokeEffect = Fx.none; status = StatusEffects.blasted; statusDuration = 60f; }}; }}); }}; zenith = new UnitType("zenith"){{ health = 700; speed = 1.7f; accel = 0.04f; drag = 0.016f; flying = true; range = 140f; hitSize = 20f; lowAltitude = true; forceMultiTarget = true; armor = 5f; targetFlags = new BlockFlag[]{BlockFlag.launchPad, BlockFlag.storage, BlockFlag.battery, null}; engineOffset = 12f; engineSize = 3f; ammoType = new ItemAmmoType(Items.graphite); weapons.add(new Weapon("zenith-missiles"){{ reload = 40f; x = 7f; rotate = true; shake = 1f; shoot.shots = 2; inaccuracy = 5f; velocityRnd = 0.2f; shootSound = Sounds.missile; bullet = new MissileBulletType(3f, 14){{ width = 8f; height = 8f; shrinkY = 0f; drag = -0.003f; homingRange = 60f; keepVelocity = false; splashDamageRadius = 25f; splashDamage = 15f; lifetime = 50f; trailColor = Pal.unitBack; backColor = Pal.unitBack; frontColor = Pal.unitFront; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; weaveScale = 6f; weaveMag = 1f; }}; }}); }}; antumbra = new UnitType("antumbra"){{ speed = 0.8f; accel = 0.04f; drag = 0.04f; rotateSpeed = 1.9f; flying = true; lowAltitude = true; health = 7200; armor = 9f; engineOffset = 21; engineSize = 5.3f; hitSize = 46f; targetFlags = new BlockFlag[]{BlockFlag.generator, BlockFlag.core, null}; ammoType = new ItemAmmoType(Items.thorium); BulletType missiles = new MissileBulletType(2.7f, 18){{ width = 8f; height = 8f; shrinkY = 0f; drag = -0.01f; splashDamageRadius = 20f; splashDamage = 37f; ammoMultiplier = 4f; lifetime = 50f; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; status = StatusEffects.blasted; statusDuration = 60f; }}; weapons.add( new Weapon("missiles-mount"){{ y = 8f; x = 17f; reload = 20f; ejectEffect = Fx.casing1; rotateSpeed = 8f; bullet = missiles; shootSound = Sounds.missile; rotate = true; shadow = 6f; }}, new Weapon("missiles-mount"){{ y = -8f; x = 17f; reload = 35; rotateSpeed = 8f; ejectEffect = Fx.casing1; bullet = missiles; shootSound = Sounds.missile; rotate = true; shadow = 6f; }}, new Weapon("large-bullet-mount"){{ y = 2f; x = 10f; shootY = 10f; reload = 12; shake = 1f; rotateSpeed = 2f; ejectEffect = Fx.casing1; shootSound = Sounds.shootBig; rotate = true; shadow = 8f; bullet = new BasicBulletType(7f, 55){{ width = 12f; height = 18f; lifetime = 25f; shootEffect = Fx.shootBig; }}; }} ); }}; eclipse = new UnitType("eclipse"){{ speed = 0.54f; accel = 0.04f; drag = 0.04f; rotateSpeed = 1f; flying = true; lowAltitude = true; health = 22000; engineOffset = 38; engineSize = 7.3f; hitSize = 58f; armor = 13f; targetFlags = new BlockFlag[]{BlockFlag.reactor, BlockFlag.battery, BlockFlag.core, null}; ammoType = new ItemAmmoType(Items.thorium); BulletType fragBullet = new FlakBulletType(4f, 15){{ shootEffect = Fx.shootBig; ammoMultiplier = 4f; splashDamage = 65f; splashDamageRadius = 25f; collidesGround = true; lifetime = 47f; status = StatusEffects.blasted; statusDuration = 60f; }}; weapons.add( new Weapon("large-laser-mount"){{ shake = 4f; shootY = 9f; x = 18f; y = 5f; rotateSpeed = 2f; reload = 45f; recoil = 4f; shootSound = Sounds.laser; shadow = 20f; rotate = true; bullet = new LaserBulletType(){{ damage = 115f; sideAngle = 20f; sideWidth = 1.5f; sideLength = 80f; width = 25f; length = 230f; shootEffect = Fx.shockwave; colors = new Color[]{Color.valueOf("ec7458aa"), Color.valueOf("ff9c5a"), Color.white}; }}; }}, new Weapon("large-artillery"){{ x = 11f; y = 27f; rotateSpeed = 2f; reload = 9f; shootSound = Sounds.shoot; shadow = 7f; rotate = true; recoil = 0.5f; shootY = 7.25f; bullet = fragBullet; }}, new Weapon("large-artillery"){{ y = -13f; x = 20f; reload = 12f; ejectEffect = Fx.casing1; rotateSpeed = 7f; shake = 1f; shootSound = Sounds.shoot; rotate = true; shadow = 12f; shootY = 7.25f; bullet = fragBullet; }}); }}; //endregion //region air support mono = new UnitType("mono"){{ //there's no reason to command monos anywhere. it's just annoying. controller = u -> new MinerAI(); defaultCommand = UnitCommand.mineCommand; flying = true; drag = 0.06f; accel = 0.12f; speed = 1.5f; health = 100; engineSize = 1.8f; engineOffset = 5.7f; range = 50f; isEnemy = false; ammoType = new PowerAmmoType(500); mineTier = 1; mineSpeed = 2.5f; }}; poly = new UnitType("poly"){{ defaultCommand = UnitCommand.rebuildCommand; flying = true; drag = 0.05f; speed = 2.6f; rotateSpeed = 15f; accel = 0.1f; range = 130f; health = 400; buildSpeed = 0.5f; engineOffset = 6.5f; hitSize = 9f; lowAltitude = true; ammoType = new PowerAmmoType(900); mineTier = 2; mineSpeed = 3.5f; abilities.add(new RepairFieldAbility(5f, 60f * 8, 50f)); weapons.add(new Weapon("poly-weapon"){{ top = false; y = -2.5f; x = 3.75f; reload = 30f; ejectEffect = Fx.none; recoil = 2f; shootSound = Sounds.missile; velocityRnd = 0.5f; inaccuracy = 15f; alternate = true; bullet = new MissileBulletType(4f, 12){{ homingPower = 0.08f; weaveMag = 4; weaveScale = 4; lifetime = 50f; keepVelocity = false; shootEffect = Fx.shootHeal; smokeEffect = Fx.hitLaser; hitEffect = despawnEffect = Fx.hitLaser; frontColor = Color.white; hitSound = Sounds.none; healPercent = 5.5f; collidesTeam = true; reflectable = false; backColor = Pal.heal; trailColor = Pal.heal; }}; }}); }}; mega = new UnitType("mega"){{ defaultCommand = UnitCommand.repairCommand; mineTier = 3; mineSpeed = 4f; health = 460; armor = 3f; speed = 2.5f; accel = 0.06f; drag = 0.017f; lowAltitude = true; flying = true; engineOffset = 10.5f; faceTarget = false; hitSize = 16.05f; engineSize = 3f; payloadCapacity = (2 * 2) * tilePayload; buildSpeed = 2.6f; isEnemy = false; ammoType = new PowerAmmoType(1100); weapons.add( new Weapon("heal-weapon-mount"){{ shootSound = Sounds.lasershoot; reload = 24f; x = 8f; y = -6f; rotate = true; bullet = new LaserBoltBulletType(5.2f, 10){{ lifetime = 35f; healPercent = 5.5f; collidesTeam = true; backColor = Pal.heal; frontColor = Color.white; }}; }}, new Weapon("heal-weapon-mount"){{ shootSound = Sounds.lasershoot; reload = 15f; x = 4f; y = 5f; rotate = true; bullet = new LaserBoltBulletType(5.2f, 8){{ lifetime = 35f; healPercent = 3f; collidesTeam = true; backColor = Pal.heal; frontColor = Color.white; }}; }}); }}; quad = new UnitType("quad"){{ armor = 8f; health = 6000; speed = 1.2f; rotateSpeed = 2f; accel = 0.05f; drag = 0.017f; lowAltitude = false; flying = true; circleTarget = true; engineOffset = 13f; engineSize = 7f; faceTarget = false; hitSize = 36f; payloadCapacity = (3 * 3) * tilePayload; buildSpeed = 2.5f; buildBeamOffset = 23; range = 140f; targetAir = false; targetFlags = new BlockFlag[]{BlockFlag.battery, BlockFlag.factory, null}; ammoType = new PowerAmmoType(3000); weapons.add( new Weapon(){{ x = y = 0f; mirror = false; reload = 55f; minShootVelocity = 0.01f; soundPitchMin = 1f; shootSound = Sounds.plasmadrop; bullet = new BasicBulletType(){{ sprite = "large-bomb"; width = height = 120/4f; maxRange = 30f; ignoreRotation = true; backColor = Pal.heal; frontColor = Color.white; mixColorTo = Color.white; hitSound = Sounds.plasmaboom; shootCone = 180f; ejectEffect = Fx.none; hitShake = 4f; collidesAir = false; lifetime = 70f; despawnEffect = Fx.greenBomb; hitEffect = Fx.massiveExplosion; keepVelocity = false; spin = 2f; shrinkX = shrinkY = 0.7f; speed = 0f; collides = false; healPercent = 15f; splashDamage = 220f; splashDamageRadius = 80f; }}; }}); }}; oct = new UnitType("oct"){{ aiController = DefenderAI::new; armor = 16f; health = 24000; speed = 0.8f; rotateSpeed = 1f; accel = 0.04f; drag = 0.018f; flying = true; engineOffset = 46f; engineSize = 7.8f; faceTarget = false; hitSize = 66f; payloadCapacity = (5.5f * 5.5f) * tilePayload; buildSpeed = 4f; drawShields = false; lowAltitude = true; buildBeamOffset = 43; ammoCapacity = 1; abilities.add(new ForceFieldAbility(140f, 4f, 7000f, 60f * 8, 8, 0f), new RepairFieldAbility(130f, 60f * 2, 140f)); }}; //endregion //region naval attack risso = new UnitType("risso"){{ speed = 1.1f; drag = 0.13f; hitSize = 10f; health = 280; accel = 0.4f; rotateSpeed = 3.3f; faceTarget = false; armor = 2f; weapons.add(new Weapon("mount-weapon"){{ reload = 13f; x = 4f; shootY = 4f; y = 1.5f; rotate = true; ejectEffect = Fx.casing1; bullet = new BasicBulletType(2.5f, 9){{ width = 7f; height = 9f; lifetime = 60f; ammoMultiplier = 2; }}; }}); weapons.add(new Weapon("missiles-mount"){{ mirror = false; reload = 25f; x = 0f; y = -5f; rotate = true; ejectEffect = Fx.casing1; shootSound = Sounds.missile; bullet = new MissileBulletType(2.7f, 12, "missile"){{ keepVelocity = true; width = 8f; height = 8f; shrinkY = 0f; drag = -0.003f; homingRange = 60f; splashDamageRadius = 25f; splashDamage = 10f; lifetime = 65f; trailColor = Color.gray; backColor = Pal.bulletYellowBack; frontColor = Pal.bulletYellow; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; weaveScale = 8f; weaveMag = 2f; }}; }}); }}; minke = new UnitType("minke"){{ health = 600; speed = 0.9f; drag = 0.15f; hitSize = 13f; armor = 4f; accel = 0.3f; rotateSpeed = 2.6f; faceTarget = false; ammoType = new ItemAmmoType(Items.graphite); trailLength = 20; waveTrailX = 5.5f; waveTrailY = -4f; trailScl = 1.9f; weapons.add(new Weapon("mount-weapon"){{ reload = 10f; x = 5f; y = 3.5f; rotate = true; rotateSpeed = 5f; inaccuracy = 8f; ejectEffect = Fx.casing1; shootSound = Sounds.shoot; bullet = new FlakBulletType(4.2f, 3){{ lifetime = 60f; ammoMultiplier = 4f; shootEffect = Fx.shootSmall; width = 6f; height = 8f; hitEffect = Fx.flakExplosion; splashDamage = 27f * 1.5f; splashDamageRadius = 15f; }}; }}); weapons.add(new Weapon("artillery-mount"){{ reload = 30f; x = 5f; y = -5f; rotate = true; inaccuracy = 2f; rotateSpeed = 2f; shake = 1.5f; ejectEffect = Fx.casing2; shootSound = Sounds.bang; bullet = new ArtilleryBulletType(3f, 20, "shell"){{ hitEffect = Fx.flakExplosion; knockback = 0.8f; lifetime = 80f; width = height = 11f; collidesTiles = false; splashDamageRadius = 30f * 0.75f; splashDamage = 40f; }}; }}); }}; bryde = new UnitType("bryde"){{ health = 910; speed = 0.85f; accel = 0.2f; rotateSpeed = 1.8f; drag = 0.17f; hitSize = 20f; armor = 7f; faceTarget = false; ammoType = new ItemAmmoType(Items.graphite); trailLength = 22; waveTrailX = 7f; waveTrailY = -9f; trailScl = 1.5f; abilities.add(new ShieldRegenFieldAbility(20f, 40f, 60f * 4, 60f)); weapons.add(new Weapon("large-artillery"){{ reload = 65f; mirror = false; x = 0f; y = -3.5f; rotateSpeed = 1.7f; rotate = true; shootY = 7f; shake = 5f; recoil = 4f; shadow = 12f; inaccuracy = 3f; ejectEffect = Fx.casing3; shootSound = Sounds.artillery; bullet = new ArtilleryBulletType(3.2f, 15){{ trailMult = 0.8f; hitEffect = Fx.massiveExplosion; knockback = 1.5f; lifetime = 84f; height = 15.5f; width = 15f; collidesTiles = false; splashDamageRadius = 40f; splashDamage = 70f; backColor = Pal.missileYellowBack; frontColor = Pal.missileYellow; trailEffect = Fx.artilleryTrail; trailSize = 6f; hitShake = 4f; shootEffect = Fx.shootBig2; status = StatusEffects.blasted; statusDuration = 60f; }}; }}); weapons.add(new Weapon("missiles-mount"){{ reload = 20f; x = 8.5f; y = -9f; shadow = 6f; rotateSpeed = 4f; rotate = true; shoot.shots = 2; shoot.shotDelay = 3f; inaccuracy = 5f; velocityRnd = 0.1f; shootSound = Sounds.missile; ammoType = new ItemAmmoType(Items.thorium); ejectEffect = Fx.none; bullet = new MissileBulletType(2.7f, 12){{ width = 8f; height = 8f; shrinkY = 0f; drag = -0.003f; homingRange = 60f; keepVelocity = false; splashDamageRadius = 25f; splashDamage = 10f; lifetime = 70f; trailColor = Color.gray; backColor = Pal.bulletYellowBack; frontColor = Pal.bulletYellow; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; weaveScale = 8f; weaveMag = 1f; }}; }}); }}; sei = new UnitType("sei"){{ health = 11000; armor = 12f; speed = 0.73f; drag = 0.17f; hitSize = 39f; accel = 0.2f; rotateSpeed = 1.3f; faceTarget = false; ammoType = new ItemAmmoType(Items.thorium); trailLength = 50; waveTrailX = 18f; waveTrailY = -21f; trailScl = 3f; weapons.add(new Weapon("sei-launcher"){{ x = 0f; y = 0f; rotate = true; rotateSpeed = 4f; mirror = false; shadow = 20f; shootY = 4.5f; recoil = 4f; reload = 45f; velocityRnd = 0.4f; inaccuracy = 7f; ejectEffect = Fx.none; shake = 1f; shootSound = Sounds.missile; shoot = new ShootAlternate(){{ shots = 6; shotDelay = 1.5f; spread = 4f; barrels = 3; }}; bullet = new MissileBulletType(4.2f, 42){{ homingPower = 0.12f; width = 8f; height = 8f; shrinkX = shrinkY = 0f; drag = -0.003f; homingRange = 80f; keepVelocity = false; splashDamageRadius = 35f; splashDamage = 45f; lifetime = 62f; trailColor = Pal.bulletYellowBack; backColor = Pal.bulletYellowBack; frontColor = Pal.bulletYellow; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; weaveScale = 8f; weaveMag = 2f; }}; }}); weapons.add(new Weapon("large-bullet-mount"){{ reload = 60f; cooldownTime = 90f; x = 70f/4f; y = -66f/4f; rotateSpeed = 4f; rotate = true; shootY = 7f; shake = 2f; recoil = 3f; shadow = 12f; ejectEffect = Fx.casing3; shootSound = Sounds.shootBig; shoot.shots = 3; shoot.shotDelay = 4f; inaccuracy = 1f; bullet = new BasicBulletType(7f, 57){{ width = 13f; height = 19f; shootEffect = Fx.shootBig; lifetime = 35f; }}; }}); }}; omura = new UnitType("omura"){{ health = 22000; speed = 0.62f; drag = 0.18f; hitSize = 58f; armor = 16f; accel = 0.19f; rotateSpeed = 0.9f; faceTarget = false; ammoType = new PowerAmmoType(4000); float spawnTime = 60f * 15f; abilities.add(new UnitSpawnAbility(flare, spawnTime, 19.25f, -31.75f), new UnitSpawnAbility(flare, spawnTime, -19.25f, -31.75f)); trailLength = 70; waveTrailX = 23f; waveTrailY = -32f; trailScl = 3.5f; weapons.add(new Weapon("omura-cannon"){{ reload = 110f; cooldownTime = 90f; mirror = false; x = 0f; y = -3.5f; rotateSpeed = 1.4f; rotate = true; shootY = 23f; shake = 6f; recoil = 10.5f; shadow = 50f; shootSound = Sounds.railgun; ejectEffect = Fx.none; bullet = new RailBulletType(){{ shootEffect = Fx.railShoot; length = 500; pointEffectSpace = 60f; pierceEffect = Fx.railHit; pointEffect = Fx.railTrail; hitEffect = Fx.massiveExplosion; smokeEffect = Fx.shootBig2; damage = 1250; pierceDamageFactor = 0.5f; }}; }}); }}; //endregion //region naval support retusa = new UnitType("retusa"){{ speed = 0.9f; targetAir = false; drag = 0.14f; hitSize = 11f; health = 270; accel = 0.4f; rotateSpeed = 5f; trailLength = 20; waveTrailX = 5f; trailScl = 1.3f; faceTarget = false; range = 100f; ammoType = new PowerAmmoType(900); armor = 3f; buildSpeed = 1.5f; rotateToBuilding = false; weapons.add(new RepairBeamWeapon("repair-beam-weapon-center"){{ x = 0f; y = -5.5f; shootY = 6f; beamWidth = 0.8f; mirror = false; repairSpeed = 0.75f; bullet = new BulletType(){{ maxRange = 120f; }}; }}); weapons.add(new Weapon(){{ mirror = false; rotate = true; reload = 90f; x = y = shootX = shootY = 0f; shootSound = Sounds.mineDeploy; rotateSpeed = 180f; targetAir = false; shoot.shots = 3; shoot.shotDelay = 7f; bullet = new BasicBulletType(){{ sprite = "mine-bullet"; width = height = 8f; layer = Layer.scorch; shootEffect = smokeEffect = Fx.none; maxRange = 50f; ignoreRotation = true; healPercent = 4f; backColor = Pal.heal; frontColor = Color.white; mixColorTo = Color.white; hitSound = Sounds.plasmaboom; ejectEffect = Fx.none; hitSize = 22f; collidesAir = false; lifetime = 87f; hitEffect = new MultiEffect(Fx.blastExplosion, Fx.greenCloud); keepVelocity = false; shrinkX = shrinkY = 0f; inaccuracy = 2f; weaveMag = 5f; weaveScale = 4f; speed = 0.7f; drag = -0.017f; homingPower = 0.05f; collideFloor = true; trailColor = Pal.heal; trailWidth = 3f; trailLength = 8; splashDamage = 33f; splashDamageRadius = 32f; }}; }}); }}; oxynoe = new UnitType("oxynoe"){{ health = 560; speed = 0.83f; drag = 0.14f; hitSize = 14f; armor = 4f; accel = 0.4f; rotateSpeed = 4f; faceTarget = false; trailLength = 22; waveTrailX = 5.5f; waveTrailY = -4f; trailScl = 1.9f; ammoType = new ItemAmmoType(Items.coal); abilities.add(new StatusFieldAbility(StatusEffects.overclock, 60f * 6, 60f * 6f, 60f)); buildSpeed = 2f; rotateToBuilding = false; weapons.add(new Weapon("plasma-mount-weapon"){{ reload = 5f; x = 4.5f; y = 6.5f; rotate = true; rotateSpeed = 5f; inaccuracy = 10f; ejectEffect = Fx.casing1; shootSound = Sounds.flame; shootCone = 30f; bullet = new BulletType(3.4f, 23f){{ healPercent = 1.5f; collidesTeam = true; ammoMultiplier = 3f; hitSize = 7f; lifetime = 18f; pierce = true; collidesAir = false; statusDuration = 60f * 4; hitEffect = Fx.hitFlamePlasma; ejectEffect = Fx.none; despawnEffect = Fx.none; status = StatusEffects.burning; keepVelocity = false; hittable = false; shootEffect = new Effect(32f, 80f, e -> { color(Color.white, Pal.heal, Color.gray, e.fin()); randLenVectors(e.id, 8, e.finpow() * 60f, e.rotation, 10f, (x, y) -> { Fill.circle(e.x + x, e.y + y, 0.65f + e.fout() * 1.5f); Drawf.light(e.x + x, e.y + y, 16f * e.fout(), Pal.heal, 0.6f); }); }); }}; }}); weapons.add(new PointDefenseWeapon("point-defense-mount"){{ mirror = false; x = 0f; y = 1f; reload = 9f; targetInterval = 10f; targetSwitchInterval = 15f; bullet = new BulletType(){{ shootEffect = Fx.sparkShoot; hitEffect = Fx.pointHit; maxRange = 100f; damage = 17f; }}; }}); }}; cyerce = new UnitType("cyerce"){{ health = 870; speed = 0.86f; accel = 0.22f; rotateSpeed = 2.6f; drag = 0.16f; hitSize = 20f; armor = 6f; faceTarget = false; ammoType = new ItemAmmoType(Items.graphite); trailLength = 23; waveTrailX = 9f; waveTrailY = -9f; trailScl = 2f; buildSpeed = 2f; rotateToBuilding = false; weapons.add(new RepairBeamWeapon("repair-beam-weapon-center"){{ x = 11f; y = -10f; shootY = 6f; beamWidth = 0.8f; repairSpeed = 0.7f; bullet = new BulletType(){{ maxRange = 130f; }}; }}); weapons.add(new Weapon("plasma-missile-mount"){{ reload = 60f; x = 9f; y = 3f; shadow = 5f; rotateSpeed = 4f; rotate = true; inaccuracy = 1f; velocityRnd = 0.1f; shootSound = Sounds.missile; ejectEffect = Fx.none; bullet = new FlakBulletType(2.5f, 25){{ sprite = "missile-large"; //for targeting collidesGround = collidesAir = true; explodeRange = 40f; width = height = 12f; shrinkY = 0f; drag = -0.003f; homingRange = 60f; keepVelocity = false; lightRadius = 60f; lightOpacity = 0.7f; lightColor = Pal.heal; splashDamageRadius = 30f; splashDamage = 25f; lifetime = 80f; backColor = Pal.heal; frontColor = Color.white; hitEffect = new ExplosionEffect(){{ lifetime = 28f; waveStroke = 6f; waveLife = 10f; waveRadBase = 7f; waveColor = Pal.heal; waveRad = 30f; smokes = 6; smokeColor = Color.white; sparkColor = Pal.heal; sparks = 6; sparkRad = 35f; sparkStroke = 1.5f; sparkLen = 4f; }}; weaveScale = 8f; weaveMag = 1f; trailColor = Pal.heal; trailWidth = 4.5f; trailLength = 29; fragBullets = 7; fragVelocityMin = 0.3f; fragBullet = new MissileBulletType(3.9f, 11){{ homingPower = 0.2f; weaveMag = 4; weaveScale = 4; lifetime = 60f; keepVelocity = false; shootEffect = Fx.shootHeal; smokeEffect = Fx.hitLaser; splashDamage = 13f; splashDamageRadius = 20f; frontColor = Color.white; hitSound = Sounds.none; lightColor = Pal.heal; lightRadius = 40f; lightOpacity = 0.7f; trailColor = Pal.heal; trailWidth = 2.5f; trailLength = 20; trailChance = -1f; healPercent = 2.8f; collidesTeam = true; backColor = Pal.heal; despawnEffect = Fx.none; hitEffect = new ExplosionEffect(){{ lifetime = 20f; waveStroke = 2f; waveColor = Pal.heal; waveRad = 12f; smokeSize = 0f; smokeSizeBase = 0f; sparkColor = Pal.heal; sparks = 9; sparkRad = 35f; sparkLen = 4f; sparkStroke = 1.5f; }}; }}; }}; }}); }}; aegires = new UnitType("aegires"){{ health = 12000; armor = 12f; speed = 0.7f; drag = 0.17f; hitSize = 44f; accel = 0.2f; rotateSpeed = 1.4f; faceTarget = false; ammoType = new PowerAmmoType(3500); ammoCapacity = 40; //clip size is massive due to energy field clipSize = 250f; trailLength = 50; waveTrailX = 18f; waveTrailY = -17f; trailScl = 3.2f; buildSpeed = 3f; rotateToBuilding = false; abilities.add(new EnergyFieldAbility(40f, 65f, 180f){{ statusDuration = 60f * 6f; maxTargets = 25; healPercent = 1.5f; sameTypeHealMult = 0.5f; }}); for(float mountY : new float[]{-18f, 14}){ weapons.add(new PointDefenseWeapon("point-defense-mount"){{ x = 12.5f; y = mountY; reload = 4f; targetInterval = 8f; targetSwitchInterval = 8f; bullet = new BulletType(){{ shootEffect = Fx.sparkShoot; hitEffect = Fx.pointHit; maxRange = 180f; damage = 30f; }}; }}); } }}; navanax = new UnitType("navanax"){{ health = 20000; speed = 0.65f; drag = 0.17f; hitSize = 58f; armor = 16f; accel = 0.2f; rotateSpeed = 1.1f; faceTarget = false; ammoType = new PowerAmmoType(4500); trailLength = 70; waveTrailX = 23f; waveTrailY = -32f; trailScl = 3.5f; buildSpeed = 3.5f; rotateToBuilding = false; for(float mountY : new float[]{-117/4f, 50/4f}){ for(float sign : Mathf.signs){ weapons.add(new Weapon("plasma-laser-mount"){{ shadow = 20f; controllable = false; autoTarget = true; mirror = false; shake = 3f; shootY = 7f; rotate = true; x = 84f/4f * sign; y = mountY; targetInterval = 20f; targetSwitchInterval = 35f; rotateSpeed = 3.5f; reload = 170f; recoil = 1f; shootSound = Sounds.beam; continuous = true; cooldownTime = reload; immunities.add(StatusEffects.burning); bullet = new ContinuousLaserBulletType(){{ maxRange = 90f; damage = 27f; length = 95f; hitEffect = Fx.hitMeltHeal; drawSize = 200f; lifetime = 155f; shake = 1f; shootEffect = Fx.shootHeal; smokeEffect = Fx.none; width = 4f; largeHit = false; incendChance = 0.03f; incendSpread = 5f; incendAmount = 1; healPercent = 0.4f; collidesTeam = true; colors = new Color[]{Pal.heal.cpy().a(.2f), Pal.heal.cpy().a(.5f), Pal.heal.cpy().mul(1.2f), Color.white}; }}; }}); } } abilities.add(new SuppressionFieldAbility(){{ orbRadius = 5; particleSize = 3; y = -10f; particles = 10; color = particleColor = effectColor = Pal.heal; }}); weapons.add(new Weapon("emp-cannon-mount"){{ rotate = true; x = 70f/4f; y = -26f/4f; reload = 65f; shake = 3f; rotateSpeed = 2f; shadow = 30f; shootY = 7f; recoil = 4f; cooldownTime = reload - 10f; //TODO better sound shootSound = Sounds.laser; bullet = new EmpBulletType(){{ float rad = 100f; scaleLife = true; lightOpacity = 0.7f; unitDamageScl = 0.8f; healPercent = 20f; timeIncrease = 3f; timeDuration = 60f * 20f; powerDamageScl = 3f; damage = 60; hitColor = lightColor = Pal.heal; lightRadius = 70f; clipSize = 250f; shootEffect = Fx.hitEmpSpark; smokeEffect = Fx.shootBigSmoke2; lifetime = 60f; sprite = "circle-bullet"; backColor = Pal.heal; frontColor = Color.white; width = height = 12f; shrinkY = 0f; speed = 5f; trailLength = 20; trailWidth = 6f; trailColor = Pal.heal; trailInterval = 3f; splashDamage = 70f; splashDamageRadius = rad; hitShake = 4f; trailRotation = true; status = StatusEffects.electrified; hitSound = Sounds.plasmaboom; trailEffect = new Effect(16f, e -> { color(Pal.heal); for(int s : Mathf.signs){ Drawf.tri(e.x, e.y, 4f, 30f * e.fslope(), e.rotation + 90f*s); } }); hitEffect = new Effect(50f, 100f, e -> { e.scaled(7f, b -> { color(Pal.heal, b.fout()); Fill.circle(e.x, e.y, rad); }); color(Pal.heal); stroke(e.fout() * 3f); Lines.circle(e.x, e.y, rad); int points = 10; float offset = Mathf.randomSeed(e.id, 360f); for(int i = 0; i < points; i++){ float angle = i* 360f / points + offset; //for(int s : Mathf.zeroOne){ Drawf.tri(e.x + Angles.trnsx(angle, rad), e.y + Angles.trnsy(angle, rad), 6f, 50f * e.fout(), angle/* + s*180f*/); //} } Fill.circle(e.x, e.y, 12f * e.fout()); color(); Fill.circle(e.x, e.y, 6f * e.fout()); Drawf.light(e.x, e.y, rad * 1.6f, Pal.heal, e.fout()); }); }}; }}); }}; //endregion //region core alpha = new UnitType("alpha"){{ aiController = BuilderAI::new; isEnemy = false; lowAltitude = true; flying = true; mineSpeed = 6.5f; mineTier = 1; buildSpeed = 0.5f; drag = 0.05f; speed = 3f; rotateSpeed = 15f; accel = 0.1f; fogRadius = 0f; itemCapacity = 30; health = 150f; engineOffset = 6f; hitSize = 8f; alwaysUnlocked = true; weapons.add(new Weapon("small-basic-weapon"){{ reload = 17f; x = 2.75f; y = 1f; top = false; ejectEffect = Fx.casing1; bullet = new BasicBulletType(2.5f, 11){{ width = 7f; height = 9f; lifetime = 60f; shootEffect = Fx.shootSmall; smokeEffect = Fx.shootSmallSmoke; buildingDamageMultiplier = 0.01f; }}; }}); }}; beta = new UnitType("beta"){{ aiController = BuilderAI::new; isEnemy = false; flying = true; mineSpeed = 7f; mineTier = 1; buildSpeed = 0.75f; drag = 0.05f; speed = 3.3f; rotateSpeed = 17f; accel = 0.1f; fogRadius = 0f; itemCapacity = 50; health = 170f; engineOffset = 6f; hitSize = 9f; faceTarget = false; lowAltitude = true; weapons.add(new Weapon("small-mount-weapon"){{ top = false; reload = 20f; x = 3f; y = 0.5f; rotate = true; shoot.shots = 2; shoot.shotDelay = 4f; ejectEffect = Fx.casing1; bullet = new BasicBulletType(3f, 11){{ width = 7f; height = 9f; lifetime = 60f; shootEffect = Fx.shootSmall; smokeEffect = Fx.shootSmallSmoke; buildingDamageMultiplier = 0.01f; }}; }}); }}; gamma = new UnitType("gamma"){{ aiController = BuilderAI::new; isEnemy = false; lowAltitude = true; flying = true; mineSpeed = 8f; mineTier = 2; buildSpeed = 1f; drag = 0.05f; speed = 3.55f; rotateSpeed = 19f; accel = 0.11f; fogRadius = 0f; itemCapacity = 70; health = 220f; engineOffset = 6f; hitSize = 11f; weapons.add(new Weapon("small-mount-weapon"){{ top = false; reload = 15f; x = 1f; y = 2f; shoot = new ShootSpread(){{ shots = 2; shotDelay = 3f; spread = 2f; }}; inaccuracy = 3f; ejectEffect = Fx.casing1; bullet = new BasicBulletType(3.5f, 11){{ width = 6.5f; height = 11f; lifetime = 70f; shootEffect = Fx.shootSmall; smokeEffect = Fx.shootSmallSmoke; buildingDamageMultiplier = 0.01f; homingPower = 0.04f; }}; }}); }}; //endregion //region erekir - tank stell = new TankUnitType("stell"){{ hitSize = 12f; treadPullOffset = 3; speed = 0.75f; rotateSpeed = 3.5f; health = 850; armor = 6f; itemCapacity = 0; treadRects = new Rect[]{new Rect(12 - 32f, 7 - 32f, 14, 51)}; researchCostMultiplier = 0f; weapons.add(new Weapon("stell-weapon"){{ layerOffset = 0.0001f; reload = 50f; shootY = 4.5f; recoil = 1f; rotate = true; rotateSpeed = 2.2f; mirror = false; x = 0f; y = -0.75f; heatColor = Color.valueOf("f9350f"); cooldownTime = 30f; bullet = new BasicBulletType(4f, 40){{ sprite = "missile-large"; smokeEffect = Fx.shootBigSmoke; shootEffect = Fx.shootBigColor; width = 5f; height = 7f; lifetime = 40f; hitSize = 4f; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 1.7f; trailLength = 5; despawnEffect = hitEffect = Fx.hitBulletColor; }}; }}); }}; locus = new TankUnitType("locus"){{ hitSize = 18f; treadPullOffset = 5; speed = 0.7f; rotateSpeed = 2.6f; health = 2100; armor = 8f; itemCapacity = 0; treadRects = new Rect[]{new Rect(17 - 96f/2f, 10 - 96f/2f, 19, 76)}; researchCostMultiplier = 0f; weapons.add(new Weapon("locus-weapon"){{ shootSound = Sounds.bolt; layerOffset = 0.0001f; reload = 18f; shootY = 10f; recoil = 1f; rotate = true; rotateSpeed = 1.4f; mirror = false; shootCone = 2f; x = 0f; y = 0f; heatColor = Color.valueOf("f9350f"); cooldownTime = 30f; shoot = new ShootAlternate(3.5f); bullet = new RailBulletType(){{ length = 160f; damage = 48f; hitColor = Color.valueOf("feb380"); hitEffect = endEffect = Fx.hitBulletColor; pierceDamageFactor = 0.8f; smokeEffect = Fx.colorSpark; endEffect = new Effect(14f, e -> { color(e.color); Drawf.tri(e.x, e.y, e.fout() * 1.5f, 5f, e.rotation); }); shootEffect = new Effect(10, e -> { color(e.color); float w = 1.2f + 7 * e.fout(); Drawf.tri(e.x, e.y, w, 30f * e.fout(), e.rotation); color(e.color); for(int i : Mathf.signs){ Drawf.tri(e.x, e.y, w * 0.9f, 18f * e.fout(), e.rotation + i * 90f); } Drawf.tri(e.x, e.y, w, 4f * e.fout(), e.rotation + 180f); }); lineEffect = new Effect(20f, e -> { if(!(e.data instanceof Vec2 v)) return; color(e.color); stroke(e.fout() * 0.9f + 0.6f); Fx.rand.setSeed(e.id); for(int i = 0; i < 7; i++){ Fx.v.trns(e.rotation, Fx.rand.random(8f, v.dst(e.x, e.y) - 8f)); Lines.lineAngleCenter(e.x + Fx.v.x, e.y + Fx.v.y, e.rotation + e.finpow(), e.foutpowdown() * 20f * Fx.rand.random(0.5f, 1f) + 0.3f); } e.scaled(14f, b -> { stroke(b.fout() * 1.5f); color(e.color); Lines.line(e.x, e.y, v.x, v.y); }); }); }}; }}); }}; precept = new TankUnitType("precept"){{ hitSize = 26f; treadPullOffset = 5; speed = 0.64f; rotateSpeed = 1.5f; health = 5000; armor = 11f; itemCapacity = 0; treadRects = new Rect[]{new Rect(16 - 60f, 48 - 70f, 30, 75), new Rect(44 - 60f, 17 - 70f, 17, 60)}; researchCostMultiplier = 0f; weapons.add(new Weapon("precept-weapon"){{ shootSound = Sounds.dullExplosion; layerOffset = 0.0001f; reload = 80f; shootY = 16f; recoil = 3f; rotate = true; rotateSpeed = 1.3f; mirror = false; shootCone = 2f; x = 0f; y = -1f; heatColor = Color.valueOf("f9350f"); cooldownTime = 30f; bullet = new BasicBulletType(7f, 120){{ sprite = "missile-large"; width = 7.5f; height = 13f; lifetime = 28f; hitSize = 6f; pierceCap = 2; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 2.8f; trailLength = 8; hitEffect = despawnEffect = Fx.blastExplosion; shootEffect = Fx.shootTitan; smokeEffect = Fx.shootSmokeTitan; splashDamageRadius = 20f; splashDamage = 50f; trailEffect = Fx.hitSquaresColor; trailRotation = true; trailInterval = 3f; fragBullets = 4; fragBullet = new BasicBulletType(5f, 35){{ sprite = "missile-large"; width = 5f; height = 7f; lifetime = 15f; hitSize = 4f; pierceCap = 3; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 1.7f; trailLength = 3; drag = 0.01f; despawnEffect = hitEffect = Fx.hitBulletColor; }}; }}; }}); }}; vanquish = new TankUnitType("vanquish"){{ hitSize = 28f; treadPullOffset = 4; speed = 0.63f; health = 11000; armor = 20f; itemCapacity = 0; crushDamage = 13f / 5f; treadRects = new Rect[]{new Rect(22 - 154f/2f, 16 - 154f/2f, 28, 130)}; weapons.add(new Weapon("vanquish-weapon"){{ shootSound = Sounds.mediumCannon; layerOffset = 0.0001f; reload = 70f; shootY = 71f / 4f; shake = 5f; recoil = 4f; rotate = true; rotateSpeed = 1f; mirror = false; x = 0f; y = 0; shadow = 28f; heatColor = Color.valueOf("f9350f"); cooldownTime = 80f; bullet = new BasicBulletType(8f, 190){{ sprite = "missile-large"; width = 9.5f; height = 13f; lifetime = 18f; hitSize = 6f; shootEffect = Fx.shootTitan; smokeEffect = Fx.shootSmokeTitan; pierceCap = 2; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 3.1f; trailLength = 8; hitEffect = despawnEffect = Fx.blastExplosion; splashDamageRadius = 20f; splashDamage = 50f; fragOnHit = false; fragRandomSpread = 0f; fragSpread = 10f; fragBullets = 5; fragVelocityMin = 1f; despawnSound = Sounds.dullExplosion; fragBullet = new BasicBulletType(8f, 35){{ sprite = "missile-large"; width = 8f; height = 12f; lifetime = 15f; hitSize = 4f; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 2.8f; trailLength = 6; hitEffect = despawnEffect = Fx.blastExplosion; splashDamageRadius = 10f; splashDamage = 20f; }}; }}; }}); int i = 0; for(float f : new float[]{34f / 4f, -36f / 4f}){ int fi = i ++; weapons.add(new Weapon("vanquish-point-weapon"){{ reload = 35f + fi * 5; x = 48f / 4f; y = f; shootY = 5.5f; recoil = 2f; rotate = true; rotateSpeed = 2f; bullet = new BasicBulletType(4.5f, 25){{ width = 6.5f; height = 11f; shootEffect = Fx.sparkShoot; smokeEffect = Fx.shootBigSmoke; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 1.5f; trailLength = 4; hitEffect = despawnEffect = Fx.hitBulletColor; }}; }}); } }}; conquer = new TankUnitType("conquer"){{ hitSize = 46f; treadPullOffset = 1; speed = 0.48f; health = 22000; armor = 26f; crushDamage = 25f / 5f; rotateSpeed = 0.8f; float xo = 231f/2f, yo = 231f/2f; treadRects = new Rect[]{new Rect(27 - xo, 152 - yo, 56, 73), new Rect(24 - xo, 51 - 9 - yo, 29, 17), new Rect(59 - xo, 18 - 9 - yo, 39, 19)}; weapons.add(new Weapon("conquer-weapon"){{ shootSound = Sounds.largeCannon; layerOffset = 0.1f; reload = 100f; shootY = 32.5f; shake = 5f; recoil = 5f; rotate = true; rotateSpeed = 0.6f; mirror = false; x = 0f; y = -2f; shadow = 50f; heatColor = Color.valueOf("f9350f"); shootWarmupSpeed = 0.06f; cooldownTime = 110f; heatColor = Color.valueOf("f9350f"); minWarmup = 0.9f; parts.addAll( new RegionPart("-glow"){{ color = Color.red; blending = Blending.additive; outline = mirror = false; }}, new RegionPart("-sides"){{ progress = PartProgress.warmup; mirror = true; under = true; moveX = 0.75f; moveY = 0.75f; moveRot = 82f; x = 37 / 4f; y = 8 / 4f; }}, new RegionPart("-sinks"){{ progress = PartProgress.warmup; mirror = true; under = true; heatColor = new Color(1f, 0.1f, 0.1f); moveX = 17f / 4f; moveY = -15f / 4f; x = 32 / 4f; y = -34 / 4f; }}, new RegionPart("-sinks-heat"){{ blending = Blending.additive; progress = PartProgress.warmup; mirror = true; outline = false; colorTo = new Color(1f, 0f, 0f, 0.5f); color = colorTo.cpy().a(0f); moveX = 17f / 4f; moveY = -15f / 4f; x = 32 / 4f; y = -34 / 4f; }} ); for(int i = 1; i <= 3; i++){ int fi = i; parts.add(new RegionPart("-blade"){{ progress = PartProgress.warmup.delay((3 - fi) * 0.3f).blend(PartProgress.reload, 0.3f); heatProgress = PartProgress.heat.add(0.3f).min(PartProgress.warmup); heatColor = new Color(1f, 0.1f, 0.1f); mirror = true; under = true; moveRot = -40f * fi; moveX = 3f; layerOffset = -0.002f; x = 11 / 4f; }}); } bullet = new BasicBulletType(8f, 360f){{ sprite = "missile-large"; width = 12f; height = 20f; lifetime = 35f; hitSize = 6f; smokeEffect = Fx.shootSmokeTitan; pierceCap = 3; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 4f; trailLength = 9; hitEffect = despawnEffect = Fx.massiveExplosion; shootEffect = new ExplosionEffect(){{ lifetime = 40f; waveStroke = 4f; waveColor = sparkColor = trailColor; waveRad = 15f; smokeSize = 5f; smokes = 8; smokeSizeBase = 0f; smokeColor = trailColor; sparks = 8; sparkRad = 40f; sparkLen = 4f; sparkStroke = 3f; }}; int count = 6; for(int j = 0; j < count; j++){ int s = j; for(int i : Mathf.signs){ float fin = 0.05f + (j + 1) / (float)count; float spd = speed; float life = lifetime / Mathf.lerp(fin, 1f, 0.5f); spawnBullets.add(new BasicBulletType(spd * fin, 60){{ drag = 0.002f; width = 12f; height = 11f; lifetime = life + 5f; weaveRandom = false; hitSize = 5f; pierceCap = 2; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Color.valueOf("feb380"); frontColor = Color.white; trailWidth = 2.5f; trailLength = 7; weaveScale = (3f + s/2f) / 1.2f; weaveMag = i * (4f - fin * 2f); splashDamage = 65f; splashDamageRadius = 30f; despawnEffect = new ExplosionEffect(){{ lifetime = 50f; waveStroke = 4f; waveColor = sparkColor = trailColor; waveRad = 30f; smokeSize = 7f; smokes = 6; smokeSizeBase = 0f; smokeColor = trailColor; sparks = 5; sparkRad = 30f; sparkLen = 3f; sparkStroke = 1.5f; }}; }}); } } }}; }}); parts.add(new RegionPart("-glow"){{ color = Color.red; blending = Blending.additive; layer = -1f; outline = false; }}); }}; //endregion //region erekir - mech merui = new ErekirUnitType("merui"){{ speed = 0.72f; drag = 0.11f; hitSize = 9f; rotateSpeed = 3f; health = 680; armor = 4f; legStraightness = 0.3f; stepShake = 0f; legCount = 6; legLength = 8f; lockLegBase = true; legContinuousMove = true; legExtension = -2f; legBaseOffset = 3f; legMaxLength = 1.1f; legMinLength = 0.2f; legLengthScl = 0.96f; legForwardScl = 1.1f; legGroupSize = 3; rippleScale = 0.2f; legMoveSpace = 1f; allowLegStep = true; hovering = true; legPhysicsLayer = false; shadowElevation = 0.1f; groundLayer = Layer.legUnit - 1f; targetAir = false; researchCostMultiplier = 0f; weapons.add(new Weapon("merui-weapon"){{ shootSound = Sounds.missile; mirror = false; showStatSprite = false; x = 0f; y = 1f; shootY = 4f; reload = 60f; cooldownTime = 42f; heatColor = Pal.turretHeat; bullet = new ArtilleryBulletType(3f, 40){{ shootEffect = new MultiEffect(Fx.shootSmallColor, new Effect(9, e -> { color(Color.white, e.color, e.fin()); stroke(0.7f + e.fout()); Lines.square(e.x, e.y, e.fin() * 5f, e.rotation + 45f); Drawf.light(e.x, e.y, 23f, e.color, e.fout() * 0.7f); })); collidesTiles = true; backColor = hitColor = Pal.techBlue; frontColor = Color.white; knockback = 0.8f; lifetime = 50f; width = height = 9f; splashDamageRadius = 19f; splashDamage = 30f; trailLength = 27; trailWidth = 2.5f; trailEffect = Fx.none; trailColor = backColor; trailInterp = Interp.slope; shrinkX = 0.6f; shrinkY = 0.2f; hitEffect = despawnEffect = new MultiEffect(Fx.hitSquaresColor, new WaveEffect(){{ colorFrom = colorTo = Pal.techBlue; sizeTo = splashDamageRadius + 2f; lifetime = 9f; strokeFrom = 2f; }}); }}; }}); }}; cleroi = new ErekirUnitType("cleroi"){{ speed = 0.7f; drag = 0.1f; hitSize = 14f; rotateSpeed = 3f; health = 1100; armor = 5f; stepShake = 0f; legCount = 4; legLength = 14f; lockLegBase = true; legContinuousMove = true; legExtension = -3f; legBaseOffset = 5f; legMaxLength = 1.1f; legMinLength = 0.2f; legLengthScl = 0.95f; legForwardScl = 0.7f; legMoveSpace = 1f; hovering = true; shadowElevation = 0.2f; groundLayer = Layer.legUnit - 1f; for(int i = 0; i < 5; i++){ int fi = i; parts.add(new RegionPart("-spine"){{ y = 21f / 4f - 45f / 4f * fi / 4f; moveX = 21f / 4f + Mathf.slope(fi / 4f) * 1.25f; moveRot = 10f - fi * 14f; float fin = fi / 4f; progress = PartProgress.reload.inv().mul(1.3f).add(0.1f).sustain(fin * 0.34f, 0.14f, 0.14f); layerOffset = -0.001f; mirror = true; }}); } weapons.add(new Weapon("cleroi-weapon"){{ shootSound = Sounds.blaster; x = 14f / 4f; y = 33f / 4f; reload = 30f; layerOffset = -0.002f; alternate = false; heatColor = Color.red; cooldownTime = 25f; smoothReloadSpeed = 0.15f; recoil = 2f; bullet = new BasicBulletType(3.5f, 30){{ backColor = trailColor = hitColor = Pal.techBlue; frontColor = Color.white; width = 7.5f; height = 10f; lifetime = 40f; trailWidth = 2f; trailLength = 4; shake = 1f; trailEffect = Fx.missileTrail; trailParam = 1.8f; trailInterval = 6f; splashDamageRadius = 30f; splashDamage = 43f; hitEffect = despawnEffect = new MultiEffect(Fx.hitBulletColor, new WaveEffect(){{ colorFrom = colorTo = Pal.techBlue; sizeTo = splashDamageRadius + 3f; lifetime = 9f; strokeFrom = 3f; }}); shootEffect = new MultiEffect(Fx.shootBigColor, new Effect(9, e -> { color(Color.white, e.color, e.fin()); stroke(0.7f + e.fout()); Lines.square(e.x, e.y, e.fin() * 5f, e.rotation + 45f); Drawf.light(e.x, e.y, 23f, e.color, e.fout() * 0.7f); })); smokeEffect = Fx.shootSmokeSquare; ammoMultiplier = 2; }}; }}); weapons.add(new PointDefenseWeapon("cleroi-point-defense"){{ x = 16f / 4f; y = -20f / 4f; reload = 9f; targetInterval = 9f; targetSwitchInterval = 12f; recoil = 0.5f; bullet = new BulletType(){{ shootSound = Sounds.lasershoot; shootEffect = Fx.sparkShoot; hitEffect = Fx.pointHit; maxRange = 100f; damage = 38f; }}; }}); }}; anthicus = new ErekirUnitType("anthicus"){{ speed = 0.65f; drag = 0.1f; hitSize = 21f; rotateSpeed = 3f; health = 2900; armor = 7f; fogRadius = 40f; stepShake = 0f; legCount = 6; legLength = 18f; legGroupSize = 3; lockLegBase = true; legContinuousMove = true; legExtension = -3f; legBaseOffset = 7f; legMaxLength = 1.1f; legMinLength = 0.2f; legLengthScl = 0.95f; legForwardScl = 0.9f; legMoveSpace = 1f; hovering = true; shadowElevation = 0.2f; groundLayer = Layer.legUnit - 1f; for(int j = 0; j < 3; j++){ int i = j; parts.add(new RegionPart("-blade"){{ layerOffset = -0.01f; heatLayerOffset = 0.005f; x = 2f; moveX = 6f + i * 1.9f; moveY = 8f + -4f * i; moveRot = 40f - i * 25f; mirror = true; progress = PartProgress.warmup.delay(i * 0.2f); heatProgress = p -> Mathf.absin(Time.time + i * 14f, 7f, 1f); heatColor = Pal.techBlue; }}); } weapons.add(new Weapon("anthicus-weapon"){{ shootSound = Sounds.missileLarge; x = 29f / 4f; y = -11f / 4f; shootY = 1.5f; showStatSprite = false; reload = 130f; layerOffset = 0.01f; heatColor = Color.red; cooldownTime = 60f; smoothReloadSpeed = 0.15f; shootWarmupSpeed = 0.05f; minWarmup = 0.9f; rotationLimit = 70f; rotateSpeed = 2f; inaccuracy = 20f; shootStatus = StatusEffects.slow; alwaysShootWhenMoving = true; rotate = true; shoot = new ShootPattern(){{ shots = 2; shotDelay = 6f; }}; parts.add(new RegionPart("-blade"){{ mirror = true; moveRot = -25f; under = true; moves.add(new PartMove(PartProgress.reload, 1f, 0f, 0f)); heatColor = Color.red; cooldownTime = 60f; }}); parts.add(new RegionPart("-blade"){{ mirror = true; moveRot = -50f; moveY = -2f; moves.add(new PartMove(PartProgress.reload.shorten(0.5f), 1f, 0f, -15f)); under = true; heatColor = Color.red; cooldownTime = 60f; }}); bullet = new BulletType(){{ shootEffect = new MultiEffect(Fx.shootBigColor, new Effect(9, e -> { color(Color.white, e.color, e.fin()); stroke(0.7f + e.fout()); Lines.square(e.x, e.y, e.fin() * 5f, e.rotation + 45f); Drawf.light(e.x, e.y, 23f, e.color, e.fout() * 0.7f); }), new WaveEffect(){{ colorFrom = colorTo = Pal.techBlue; sizeTo = 15f; lifetime = 12f; strokeFrom = 3f; }}); smokeEffect = Fx.shootBigSmoke2; shake = 2f; speed = 0f; keepVelocity = false; inaccuracy = 2f; spawnUnit = new MissileUnitType("anthicus-missile"){{ trailColor = engineColor = Pal.techBlue; engineSize = 1.75f; engineLayer = Layer.effect; speed = 3.7f; maxRange = 6f; lifetime = 60f * 1.5f; outlineColor = Pal.darkOutline; health = 55; lowAltitude = true; parts.add(new FlarePart(){{ progress = PartProgress.life.slope().curve(Interp.pow2In); radius = 0f; radiusTo = 35f; stroke = 3f; rotation = 45f; y = -5f; followRotation = true; }}); weapons.add(new Weapon(){{ shootCone = 360f; mirror = false; reload = 1f; shootOnDeath = true; bullet = new ExplosionBulletType(140f, 25f){{ shootEffect = new MultiEffect(Fx.massiveExplosion, new WrapEffect(Fx.dynamicSpikes, Pal.techBlue, 24f), new WaveEffect(){{ colorFrom = colorTo = Pal.techBlue; sizeTo = 40f; lifetime = 12f; strokeFrom = 4f; }}); }}; }}); }}; }}; }}); }}; tecta = new ErekirUnitType("tecta"){{ drag = 0.1f; speed = 0.6f; hitSize = 23f; health = 7300; armor = 5f; lockLegBase = true; legContinuousMove = true; legGroupSize = 3; legStraightness = 0.4f; baseLegStraightness = 0.5f; legMaxLength = 1.3f; researchCostMultiplier = 0f; abilities.add(new ShieldArcAbility(){{ region = "tecta-shield"; radius = 36f; angle = 82f; regen = 0.6f; cooldown = 60f * 8f; max = 2000f; y = -20f; width = 6f; whenShooting = false; }}); rotateSpeed = 2.1f; legCount = 6; legLength = 15f; legForwardScl = 0.45f; legMoveSpace = 1.4f; rippleScale = 2f; stepShake = 0.5f; legExtension = -5f; legBaseOffset = 5f; ammoType = new PowerAmmoType(2000); legSplashDamage = 32; legSplashRange = 30; drownTimeMultiplier = 2f; hovering = true; shadowElevation = 0.4f; groundLayer = Layer.legUnit; weapons.add(new Weapon("tecta-weapon"){{ shootSound = Sounds.malignShoot; mirror = true; top = false; x = 62/4f; y = 1f; shootY = 47 / 4f; recoil = 3f; reload = 40f; shake = 3f; cooldownTime = 40f; shoot.shots = 3; inaccuracy = 3f; velocityRnd = 0.33f; heatColor = Color.red; bullet = new MissileBulletType(4.2f, 60){{ homingPower = 0.2f; weaveMag = 4; weaveScale = 4; lifetime = 55f; shootEffect = Fx.shootBig2; smokeEffect = Fx.shootSmokeTitan; splashDamage = 70f; splashDamageRadius = 30f; frontColor = Color.white; hitSound = Sounds.none; width = height = 10f; lightColor = trailColor = backColor = Pal.techBlue; lightRadius = 40f; lightOpacity = 0.7f; trailWidth = 2.8f; trailLength = 20; trailChance = -1f; despawnSound = Sounds.dullExplosion; despawnEffect = Fx.none; hitEffect = new ExplosionEffect(){{ lifetime = 20f; waveStroke = 2f; waveColor = sparkColor = trailColor; waveRad = 12f; smokeSize = 0f; smokeSizeBase = 0f; sparks = 10; sparkRad = 35f; sparkLen = 4f; sparkStroke = 1.5f; }}; }}; }}); }}; collaris = new ErekirUnitType("collaris"){{ drag = 0.1f; speed = 1.1f; hitSize = 44f; health = 18000; armor = 9f; rotateSpeed = 1.6f; lockLegBase = true; legContinuousMove = true; legStraightness = 0.6f; baseLegStraightness = 0.5f; legCount = 8; legLength = 30f; legForwardScl = 2.1f; legMoveSpace = 1.05f; rippleScale = 1.2f; stepShake = 0.5f; legGroupSize = 2; legExtension = -6f; legBaseOffset = 19f; legStraightLength = 0.9f; legMaxLength = 1.2f; ammoType = new PowerAmmoType(2000); legSplashDamage = 32; legSplashRange = 32; drownTimeMultiplier = 2f; hovering = true; shadowElevation = 0.4f; groundLayer = Layer.legUnit; targetAir = false; alwaysShootWhenMoving = true; weapons.add(new Weapon("collaris-weapon"){{ shootSound = Sounds.pulseBlast; mirror = true; rotationLimit = 30f; rotateSpeed = 0.4f; rotate = true; x = 48 / 4f; y = -28f / 4f; shootY = 64f / 4f; recoil = 4f; reload = 130f; cooldownTime = reload * 1.2f; shake = 7f; layerOffset = 0.02f; shadow = 10f; shootStatus = StatusEffects.slow; shootStatusDuration = reload + 1f; shoot.shots = 1; heatColor = Color.red; for(int i = 0; i < 5; i++){ int fi = i; parts.add(new RegionPart("-blade"){{ under = true; layerOffset = -0.001f; heatColor = Pal.techBlue; heatProgress = PartProgress.heat.add(0.2f).min(PartProgress.warmup); progress = PartProgress.warmup.blend(PartProgress.reload, 0.1f); x = 13.5f / 4f; y = 10f / 4f - fi * 2f; moveY = 1f - fi * 1f; moveX = fi * 0.3f; moveRot = -45f - fi * 17f; moves.add(new PartMove(PartProgress.reload.inv().mul(1.8f).inv().curve(fi / 5f, 0.2f), 0f, 0f, 36f)); }}); } bullet = new ArtilleryBulletType(5.5f, 260){{ collidesTiles = collides = true; lifetime = 70f; shootEffect = Fx.shootBigColor; smokeEffect = Fx.shootSmokeSquareBig; frontColor = Color.white; trailEffect = new MultiEffect(Fx.artilleryTrail, Fx.artilleryTrailSmoke); hitSound = Sounds.none; width = 18f; height = 24f; lightColor = trailColor = hitColor = backColor = Pal.techBlue; lightRadius = 40f; lightOpacity = 0.7f; trailWidth = 4.5f; trailLength = 19; trailChance = -1f; despawnEffect = Fx.none; despawnSound = Sounds.dullExplosion; hitEffect = despawnEffect = new ExplosionEffect(){{ lifetime = 34f; waveStroke = 4f; waveColor = sparkColor = trailColor; waveRad = 25f; smokeSize = 0f; smokeSizeBase = 0f; sparks = 10; sparkRad = 25f; sparkLen = 8f; sparkStroke = 3f; }}; splashDamage = 85f; splashDamageRadius = 20f; fragBullets = 15; fragVelocityMin = 0.5f; fragRandomSpread = 130f; fragLifeMin = 0.3f; despawnShake = 5f; fragBullet = new BasicBulletType(5.5f, 50){{ pierceCap = 2; pierceBuilding = true; homingPower = 0.09f; homingRange = 150f; lifetime = 50f; shootEffect = Fx.shootBigColor; smokeEffect = Fx.shootSmokeSquareBig; frontColor = Color.white; hitSound = Sounds.none; width = 12f; height = 20f; lightColor = trailColor = hitColor = backColor = Pal.techBlue; lightRadius = 40f; lightOpacity = 0.7f; trailWidth = 2.2f; trailLength = 7; trailChance = -1f; collidesAir = false; despawnEffect = Fx.none; splashDamage = 46f; splashDamageRadius = 30f; hitEffect = despawnEffect = new MultiEffect(new ExplosionEffect(){{ lifetime = 30f; waveStroke = 2f; waveColor = sparkColor = trailColor; waveRad = 5f; smokeSize = 0f; smokeSizeBase = 0f; sparks = 5; sparkRad = 20f; sparkLen = 6f; sparkStroke = 2f; }}, Fx.blastExplosion); }}; }}; }}); }}; //endregion //region erekir - flying elude = new ErekirUnitType("elude"){{ hovering = true; shadowElevation = 0.1f; drag = 0.07f; speed = 1.8f; rotateSpeed = 5f; accel = 0.09f; health = 600f; armor = 1f; hitSize = 11f; engineOffset = 7f; engineSize = 2f; itemCapacity = 0; useEngineElevation = false; researchCostMultiplier = 0f; abilities.add(new MoveEffectAbility(0f, -7f, Pal.sapBulletBack, Fx.missileTrailShort, 4f){{ teamColor = true; }}); for(float f : new float[]{-3f, 3f}){ parts.add(new HoverPart(){{ x = 3.9f; y = f; mirror = true; radius = 6f; phase = 90f; stroke = 2f; layerOffset = -0.001f; color = Color.valueOf("bf92f9"); }}); } weapons.add(new Weapon("elude-weapon"){{ shootSound = Sounds.blaster; y = -2f; x = 4f; top = true; mirror = true; reload = 40f; baseRotation = -35f; shootCone = 360f; shoot = new ShootSpread(2, 11f); bullet = new BasicBulletType(5f, 16){{ homingPower = 0.19f; homingDelay = 4f; width = 7f; height = 12f; lifetime = 30f; shootEffect = Fx.sparkShoot; smokeEffect = Fx.shootBigSmoke; hitColor = backColor = trailColor = Pal.suppress; frontColor = Color.white; trailWidth = 1.5f; trailLength = 5; hitEffect = despawnEffect = Fx.hitBulletColor; }}; }}); }}; avert = new ErekirUnitType("avert"){{ lowAltitude = false; flying = true; drag = 0.08f; speed = 2f; rotateSpeed = 4f; accel = 0.09f; health = 1100f; armor = 3f; hitSize = 12f; engineSize = 0; fogRadius = 25; itemCapacity = 0; setEnginesMirror( new UnitEngine(35 / 4f, -38 / 4f, 3f, 315f), new UnitEngine(39 / 4f, -16 / 4f, 3f, 315f) ); weapons.add(new Weapon("avert-weapon"){{ shootSound = Sounds.blaster; reload = 35f; x = 0f; y = 6.5f; shootY = 5f; recoil = 1f; top = false; layerOffset = -0.01f; rotate = false; mirror = false; shoot = new ShootHelix(); bullet = new BasicBulletType(5f, 34){{ width = 7f; height = 12f; lifetime = 18f; shootEffect = Fx.sparkShoot; smokeEffect = Fx.shootBigSmoke; hitColor = backColor = trailColor = Pal.suppress; frontColor = Color.white; trailWidth = 1.5f; trailLength = 5; hitEffect = despawnEffect = Fx.hitBulletColor; }}; }}); }}; obviate = new ErekirUnitType("obviate"){{ flying = true; drag = 0.08f; speed = 1.8f; rotateSpeed = 2.5f; accel = 0.09f; health = 2300f; armor = 6f; hitSize = 25f; engineSize = 4.3f; engineOffset = 54f / 4f; fogRadius = 25; itemCapacity = 0; lowAltitude = true; setEnginesMirror( new UnitEngine(38 / 4f, -46 / 4f, 3.1f, 315f) ); parts.add( new RegionPart("-blade"){{ moveRot = -10f; moveX = -1f; moves.add(new PartMove(PartProgress.reload, 2f, 1f, -5f)); progress = PartProgress.warmup; mirror = true; children.add(new RegionPart("-side"){{ moveX = 2f; moveY = -2f; progress = PartProgress.warmup; under = true; mirror = true; moves.add(new PartMove(PartProgress.reload, -2f, 2f, 0f)); }}); }}); weapons.add(new Weapon(){{ shootSound = Sounds.shockBlast; x = 0f; y = -2f; shootY = 0f; reload = 140f; mirror = false; minWarmup = 0.95f; shake = 3f; cooldownTime = reload - 10f; bullet = new BasicBulletType(){{ shoot = new ShootHelix(){{ mag = 1f; scl = 5f; }}; shootEffect = new MultiEffect(Fx.shootTitan, new WaveEffect(){{ colorTo = Pal.sapBulletBack; sizeTo = 26f; lifetime = 14f; strokeFrom = 4f; }}); smokeEffect = Fx.shootSmokeTitan; hitColor = Pal.sapBullet; despawnSound = Sounds.spark; sprite = "large-orb"; trailEffect = Fx.missileTrail; trailInterval = 3f; trailParam = 4f; speed = 3f; damage = 75f; lifetime = 60f; width = height = 15f; backColor = Pal.sapBulletBack; frontColor = Pal.sapBullet; shrinkX = shrinkY = 0f; trailColor = Pal.sapBulletBack; trailLength = 12; trailWidth = 2.2f; despawnEffect = hitEffect = new ExplosionEffect(){{ waveColor = Pal.sapBullet; smokeColor = Color.gray; sparkColor = Pal.sap; waveStroke = 4f; waveRad = 40f; }}; intervalBullet = new LightningBulletType(){{ damage = 16; collidesAir = false; ammoMultiplier = 1f; lightningColor = Pal.sapBullet; lightningLength = 3; lightningLengthRand = 6; //for visual stats only. buildingDamageMultiplier = 0.25f; lightningType = new BulletType(0.0001f, 0f){{ lifetime = Fx.lightning.lifetime; hitEffect = Fx.hitLancer; despawnEffect = Fx.none; status = StatusEffects.shocked; statusDuration = 10f; hittable = false; lightColor = Color.white; buildingDamageMultiplier = 0.25f; }}; }}; bulletInterval = 4f; lightningColor = Pal.sapBullet; lightningDamage = 17; lightning = 8; lightningLength = 2; lightningLengthRand = 8; }}; }}); }}; quell = new ErekirUnitType("quell"){{ aiController = FlyingFollowAI::new; envDisabled = 0; lowAltitude = false; flying = true; drag = 0.06f; speed = 1.1f; rotateSpeed = 3.2f; accel = 0.1f; health = 6000f; armor = 4f; hitSize = 36f; payloadCapacity = Mathf.sqr(3f) * tilePayload; researchCostMultiplier = 0f; targetAir = false; engineSize = 4.8f; engineOffset = 61 / 4f; abilities.add(new SuppressionFieldAbility(){{ orbRadius = 5.3f; y = 1f; }}); weapons.add(new Weapon("quell-weapon"){{ shootSound = Sounds.missileSmall; x = 51 / 4f; y = 5 / 4f; rotate = true; rotateSpeed = 2f; reload = 55f; layerOffset = -0.001f; recoil = 1f; rotationLimit = 60f; bullet = new BulletType(){{ shootEffect = Fx.shootBig; smokeEffect = Fx.shootBigSmoke2; shake = 1f; speed = 0f; keepVelocity = false; collidesAir = false; spawnUnit = new MissileUnitType("quell-missile"){{ targetAir = false; speed = 4.3f; maxRange = 6f; lifetime = 60f * 1.4f; outlineColor = Pal.darkOutline; engineColor = trailColor = Pal.sapBulletBack; engineLayer = Layer.effect; health = 45; loopSoundVolume = 0.1f; weapons.add(new Weapon(){{ shootCone = 360f; mirror = false; reload = 1f; shootOnDeath = true; bullet = new ExplosionBulletType(110f, 25f){{ shootEffect = Fx.massiveExplosion; collidesAir = false; }}; }}); }}; }}; }}); setEnginesMirror( new UnitEngine(62 / 4f, -60 / 4f, 3.9f, 315f), new UnitEngine(72 / 4f, -29 / 4f, 3f, 315f) ); }}; disrupt = new ErekirUnitType("disrupt"){{ aiController = FlyingFollowAI::new; envDisabled = 0; lowAltitude = false; flying = true; drag = 0.07f; speed = 1f; rotateSpeed = 2f; accel = 0.1f; health = 12000f; armor = 9f; hitSize = 46f; payloadCapacity = Mathf.sqr(6f) * tilePayload; targetAir = false; engineSize = 6f; engineOffset = 25.25f; float orbRad = 5f, partRad = 3f; int parts = 10; abilities.add(new SuppressionFieldAbility(){{ orbRadius = orbRad; particleSize = partRad; y = 10f; particles = parts; }}); for(int i : Mathf.signs){ abilities.add(new SuppressionFieldAbility(){{ orbRadius = orbRad; particleSize = partRad; y = -32f / 4f; x = 43f * i / 4f; particles = parts; //visual only, the middle one does the actual suppressing active = false; }}); } weapons.add(new Weapon("disrupt-weapon"){{ shootSound = Sounds.missileLarge; x = 78f / 4f; y = -10f / 4f; mirror = true; rotate = true; rotateSpeed = 0.4f; reload = 70f; layerOffset = -20f; recoil = 1f; rotationLimit = 22f; minWarmup = 0.95f; shootWarmupSpeed = 0.1f; shootY = 2f; shootCone = 40f; shoot.shots = 3; shoot.shotDelay = 5f; inaccuracy = 28f; parts.add(new RegionPart("-blade"){{ heatProgress = PartProgress.warmup; progress = PartProgress.warmup.blend(PartProgress.reload, 0.15f); heatColor = Color.valueOf("9c50ff"); x = 5 / 4f; y = 0f; moveRot = -33f; moveY = -1f; moveX = -1f; under = true; mirror = true; }}); bullet = new BulletType(){{ shootEffect = Fx.sparkShoot; smokeEffect = Fx.shootSmokeTitan; hitColor = Pal.suppress; shake = 1f; speed = 0f; keepVelocity = false; collidesAir = false; spawnUnit = new MissileUnitType("disrupt-missile"){{ targetAir = false; speed = 4.6f; maxRange = 5f; outlineColor = Pal.darkOutline; health = 70; homingDelay = 10f; lowAltitude = true; engineSize = 3f; engineColor = trailColor = Pal.sapBulletBack; engineLayer = Layer.effect; deathExplosionEffect = Fx.none; loopSoundVolume = 0.1f; parts.add(new ShapePart(){{ layer = Layer.effect; circle = true; y = -0.25f; radius = 1.5f; color = Pal.suppress; colorTo = Color.white; progress = PartProgress.life.curve(Interp.pow5In); }}); parts.add(new RegionPart("-fin"){{ mirror = true; progress = PartProgress.life.mul(3f).curve(Interp.pow5In); moveRot = 32f; rotation = -6f; moveY = 1.5f; x = 3f / 4f; y = -6f / 4f; }}); weapons.add(new Weapon(){{ shootCone = 360f; mirror = false; reload = 1f; shootOnDeath = true; bullet = new ExplosionBulletType(140f, 25f){{ collidesAir = false; suppressionRange = 140f; shootEffect = new ExplosionEffect(){{ lifetime = 50f; waveStroke = 5f; waveLife = 8f; waveColor = Color.white; sparkColor = smokeColor = Pal.suppress; waveRad = 40f; smokeSize = 4f; smokes = 7; smokeSizeBase = 0f; sparks = 10; sparkRad = 40f; sparkLen = 6f; sparkStroke = 2f; }}; }}; }}); }}; }}; }}); setEnginesMirror( new UnitEngine(95 / 4f, -56 / 4f, 5f, 330f), new UnitEngine(89 / 4f, -95 / 4f, 4f, 315f) ); }}; //endregion //region erekir - neoplasm renale = new NeoplasmUnitType("renale"){{ health = 500; armor = 2; hitSize = 9f; omniMovement = false; rotateSpeed = 2.5f; drownTimeMultiplier = 2f; segments = 3; drawBody = false; hidden = true; crushDamage = 0.5f; aiController = HugAI::new; targetAir = false; segmentScl = 3f; segmentPhase = 5f; segmentMag = 0.5f; speed = 1.2f; }}; latum = new NeoplasmUnitType("latum"){{ health = 20000; armor = 12; hitSize = 48f; omniMovement = false; rotateSpeed = 1.7f; drownTimeMultiplier = 4f; segments = 4; drawBody = false; hidden = true; crushDamage = 2f; aiController = HugAI::new; targetAir = false; segmentScl = 4f; segmentPhase = 5f; speed = 1f; abilities.add(new SpawnDeathAbility(renale, 5, 11f)); }}; //endregion //region erekir - core float coreFleeRange = 500f; evoke = new ErekirUnitType("evoke"){{ coreUnitDock = true; controller = u -> new BuilderAI(true, coreFleeRange); isEnemy = false; envDisabled = 0; range = 60f; faceTarget = true; targetPriority = -2; lowAltitude = false; mineWalls = true; mineFloor = false; mineHardnessScaling = false; flying = true; mineSpeed = 6f; mineTier = 3; buildSpeed = 1.2f; drag = 0.08f; speed = 5.6f; rotateSpeed = 7f; accel = 0.09f; itemCapacity = 60; health = 300f; armor = 1f; hitSize = 9f; engineSize = 0; payloadCapacity = 2f * 2f * tilesize * tilesize; pickupUnits = false; vulnerableWithPayloads = true; fogRadius = 0f; targetable = false; hittable = false; setEnginesMirror( new UnitEngine(21 / 4f, 19 / 4f, 2.2f, 45f), new UnitEngine(23 / 4f, -22 / 4f, 2.2f, 315f) ); weapons.add(new RepairBeamWeapon(){{ widthSinMag = 0.11f; reload = 20f; x = 0f; y = 6.5f; rotate = false; shootY = 0f; beamWidth = 0.7f; repairSpeed = 3.1f; fractionRepairSpeed = 0.06f; aimDst = 0f; shootCone = 15f; mirror = false; targetUnits = false; targetBuildings = true; autoTarget = false; controllable = true; laserColor = Pal.accent; healColor = Pal.accent; bullet = new BulletType(){{ maxRange = 60f; }}; }}); }}; incite = new ErekirUnitType("incite"){{ coreUnitDock = true; controller = u -> new BuilderAI(true, coreFleeRange); isEnemy = false; envDisabled = 0; range = 60f; targetPriority = -2; lowAltitude = false; faceTarget = true; mineWalls = true; mineFloor = false; mineHardnessScaling = false; flying = true; mineSpeed = 8f; mineTier = 3; buildSpeed = 1.4f; drag = 0.08f; speed = 7f; rotateSpeed = 8f; accel = 0.09f; itemCapacity = 90; health = 500f; armor = 2f; hitSize = 11f; payloadCapacity = 2f * 2f * tilesize * tilesize; pickupUnits = false; vulnerableWithPayloads = true; fogRadius = 0f; targetable = false; hittable = false; engineOffset = 7.2f; engineSize = 3.1f; setEnginesMirror( new UnitEngine(25 / 4f, -1 / 4f, 2.4f, 300f) ); weapons.add(new RepairBeamWeapon(){{ widthSinMag = 0.11f; reload = 20f; x = 0f; y = 7.5f; rotate = false; shootY = 0f; beamWidth = 0.7f; aimDst = 0f; shootCone = 15f; mirror = false; repairSpeed = 3.3f; fractionRepairSpeed = 0.06f; targetUnits = false; targetBuildings = true; autoTarget = false; controllable = true; laserColor = Pal.accent; healColor = Pal.accent; bullet = new BulletType(){{ maxRange = 60f; }}; }}); drawBuildBeam = false; weapons.add(new BuildWeapon("build-weapon"){{ rotate = true; rotateSpeed = 7f; x = 14/4f; y = 15/4f; layerOffset = -0.001f; shootY = 3f; }}); }}; emanate = new ErekirUnitType("emanate"){{ coreUnitDock = true; controller = u -> new BuilderAI(true, coreFleeRange); isEnemy = false; envDisabled = 0; range = 65f; faceTarget = true; targetPriority = -2; lowAltitude = false; mineWalls = true; mineFloor = false; mineHardnessScaling = false; flying = true; mineSpeed = 9f; mineTier = 3; buildSpeed = 1.5f; drag = 0.08f; speed = 7.5f; rotateSpeed = 8f; accel = 0.08f; itemCapacity = 110; health = 700f; armor = 3f; hitSize = 12f; buildBeamOffset = 8f; payloadCapacity = 2f * 2f * tilesize * tilesize; pickupUnits = false; vulnerableWithPayloads = true; fogRadius = 0f; targetable = false; hittable = false; engineOffset = 7.5f; engineSize = 3.4f; setEnginesMirror( new UnitEngine(35 / 4f, -13 / 4f, 2.7f, 315f), new UnitEngine(28 / 4f, -35 / 4f, 2.7f, 315f) ); weapons.add(new RepairBeamWeapon(){{ widthSinMag = 0.11f; reload = 20f; x = 19f/4f; y = 19f/4f; rotate = false; shootY = 0f; beamWidth = 0.7f; aimDst = 0f; shootCone = 40f; mirror = true; repairSpeed = 3.6f / 2f; fractionRepairSpeed = 0.03f; targetUnits = false; targetBuildings = true; autoTarget = false; controllable = true; laserColor = Pal.accent; healColor = Pal.accent; bullet = new BulletType(){{ maxRange = 65f; }}; }}); }}; //endregion //region internal + special block = new UnitType("block"){{ speed = 0f; hitSize = 0f; health = 1; rotateSpeed = 360f; itemCapacity = 0; hidden = true; internal = true; }}; manifold = new ErekirUnitType("manifold"){{ controller = u -> new CargoAI(); isEnemy = false; allowedInPayloads = false; logicControllable = false; playerControllable = false; envDisabled = 0; payloadCapacity = 0f; lowAltitude = false; flying = true; drag = 0.06f; speed = 3.5f; rotateSpeed = 9f; accel = 0.1f; itemCapacity = 100; health = 200f; hitSize = 11f; engineSize = 2.3f; engineOffset = 6.5f; hidden = true; setEnginesMirror( new UnitEngine(24 / 4f, -24 / 4f, 2.3f, 315f) ); }}; assemblyDrone = new ErekirUnitType("assembly-drone"){{ controller = u -> new AssemblerAI(); flying = true; drag = 0.06f; accel = 0.11f; speed = 1.3f; health = 90; engineSize = 2f; engineOffset = 6.5f; payloadCapacity = 0f; targetable = false; bounded = false; outlineColor = Pal.darkOutline; isEnemy = false; hidden = true; useUnitCap = false; logicControllable = false; playerControllable = false; allowedInPayloads = false; createWreck = false; envEnabled = Env.any; envDisabled = Env.none; }}; //endregion } }
Anuken/Mindustry
core/src/mindustry/content/UnitTypes.java
902
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect.testing; import com.google.common.annotations.GwtCompatible; import java.util.Map; import org.checkerframework.checker.nullness.qual.Nullable; /** * Creates maps, containing sample elements, to be tested. * * @author George van den Driessche */ @GwtCompatible @ElementTypesAreNonnullByDefault public interface TestMapGenerator<K extends @Nullable Object, V extends @Nullable Object> extends TestContainerGenerator<Map<K, V>, Map.Entry<K, V>> { K[] createKeyArray(int length); V[] createValueArray(int length); }
google/guava
android/guava-testlib/src/com/google/common/collect/testing/TestMapGenerator.java
903
package jadx.api; import java.util.List; import jadx.api.metadata.ICodeAnnotation; import jadx.api.metadata.ICodeNodeRef; public interface JavaNode { ICodeNodeRef getCodeNodeRef(); String getName(); String getFullName(); JavaClass getDeclaringClass(); JavaClass getTopParentClass(); int getDefPos(); List<JavaNode> getUseIn(); void removeAlias(); boolean isOwnCodeAnnotation(ICodeAnnotation ann); }
skylot/jadx
jadx-core/src/main/java/jadx/api/JavaNode.java
904
// Companion Code to the paper "Generative Trees: Adversarial and Copycat" by R. Nock and M. // Guillame-Bert, in ICML'22 import java.io.*; import java.util.*; class Algorithm implements Debuggable { // General variables public static int MAX_DEPTH_DT = 10000; public static int MAX_DEPTH_GT = 10000; public static int FEAT_SIZE = 0; public static int FEAT_SIZE_2 = 0; public static String IRRELEVANT_STRING = "IRRELEVANT"; public static int IRRELEVANT_INT = -1; public static boolean IRRELEVANT_BOOLEAN = false; public static int IRRELEVANT_MAX_INT = 10001; public static String STRATEGY_DT_GROW_ONE_LEAF_HEAVIEST = "STRATEGY_DT_GROW_ONE_LEAF_HEAVIEST"; // STRATEGY_DT_GROW_ONE_LEAF_HEAVIEST: DT | leaf | picks the heaviest leaf wrt current examples public static String[] ALL_STRATEGIES_DT_GROW = {STRATEGY_DT_GROW_ONE_LEAF_HEAVIEST}; public static int STRATEGY_DT_GROW(String s) { int i = 0; do { if (ALL_STRATEGIES_DT_GROW[i].equals(s)) return i; i++; } while (i < ALL_STRATEGIES_DT_GROW.length); return -1; } public static void CHECK_STRATEGY_DT_GROW_CONTAINS(String s) { if (STRATEGY_DT_GROW(s) == -1) Dataset.perror("Algorithm.class :: no such STRATEGY_DT_GROW as " + s); } public static String NO_DT_SPLIT_MAX_SIZE = "NO_DT_SPLIT_MAX_SIZE", NO_DT_SPLIT_MAX_DEPTH = "NO_DT_SPLIT_MAX_DEPTH", NO_DT_SPLIT_NO_SPLITTABLE_LEAF_FOUND = "NO_DT_SPLIT_NO_SPLITTABLE_LEAF_FOUND", DT_SPLIT_OK = "DT_SPLIT_OK"; public static String GT_SPLIT_OK = "GT_SPLIT_OK"; public static String NOW; Vector<Boost> all_algorithms; Domain myDomain; double alpha; public static String[] MONTHS = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; public static Random R = new Random(); Algorithm(Domain dom) { all_algorithms = new Vector<>(); myDomain = dom; } public static double RANDOM_P_NOT_HALF() { double vv; do { vv = R.nextDouble(); } while (vv == 0.5); return vv; } public static double RANDOM_P_NOT(double p) { double vv; do { vv = R.nextDouble(); } while (vv == p); return vv; } public static void INIT() { Calendar cal = Calendar.getInstance(); NOW = Algorithm.MONTHS[cal.get(Calendar.MONTH)] + "_" + cal.get(Calendar.DAY_OF_MONTH) + "th__" + cal.get(Calendar.HOUR_OF_DAY) + "h_" + cal.get(Calendar.MINUTE) + "m_" + cal.get(Calendar.SECOND) + "s"; } public void addAlgorithm(Vector all_params) { String strategy_dt_grow_one_leaf; int gaming_iters; int i = 0; String n = (String) all_params.elementAt(i); // 0 i++; double alpha = Double.parseDouble((String) all_params.elementAt(i)); // 1 i++; String strategy_game = (String) all_params.elementAt(i); // 2 i++; strategy_dt_grow_one_leaf = Algorithm.STRATEGY_DT_GROW_ONE_LEAF_HEAVIEST; gaming_iters = Integer.parseInt((String) all_params.elementAt(i)); // 3 i++; Boost.COPYCAT_GENERATE_WITH_WHOLE_GT = !Boolean.parseBoolean((String) all_params.elementAt(i)); // 4 i++; all_algorithms.addElement( new Boost( myDomain, n, // keep alpha, // keep strategy_game, // keep strategy_dt_grow_one_leaf, // keep gaming_iters)); // keep } public Generator_Tree simple_go() { return ((Boost) all_algorithms.elementAt(0)).simple_boost(0); } }
google-research/google-research
generative_trees/src/Algorithm.java
905
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.function.Function; /** * Manages loading information about nested documents */ public class NestedDocuments { private final Map<String, BitSetProducer> parentObjectFilters = new HashMap<>(); private final Map<String, Weight> childObjectFilters = new HashMap<>(); private final BitSetProducer parentDocumentFilter; private final NestedLookup nestedLookup; /** * Create a new NestedDocuments object for an index * @param mappingLookup the index's mapping * @param filterProducer a function to build BitSetProducers from filter queries * @param indexVersionCreated the index creation version */ public NestedDocuments(MappingLookup mappingLookup, Function<Query, BitSetProducer> filterProducer, IndexVersion indexVersionCreated) { this.nestedLookup = mappingLookup.nestedLookup(); if (this.nestedLookup == NestedLookup.EMPTY) { this.parentDocumentFilter = null; } else { this.parentDocumentFilter = filterProducer.apply(Queries.newNonNestedFilter(indexVersionCreated)); nestedLookup.getNestedParentFilters().forEach((k, v) -> parentObjectFilters.put(k, filterProducer.apply(v))); for (String nestedPath : nestedLookup.getNestedMappers().keySet()) { childObjectFilters.put(nestedPath, null); } } } /** * Returns a LeafNestedDocuments for an index segment */ public LeafNestedDocuments getLeafNestedDocuments(LeafReaderContext ctx) throws IOException { if (parentDocumentFilter == null) { return LeafNestedDocuments.NO_NESTED_MAPPERS; } return new HasNestedDocuments(ctx); } private Weight getNestedChildWeight(LeafReaderContext ctx, String path) throws IOException { if (childObjectFilters.containsKey(path) == false) { throw new IllegalStateException("Cannot find object mapper for path " + path); } if (childObjectFilters.get(path) == null) { IndexSearcher searcher = new IndexSearcher(ReaderUtil.getTopLevelContext(ctx)); NestedObjectMapper childMapper = nestedLookup.getNestedMappers().get(path); childObjectFilters.put( path, searcher.createWeight(searcher.rewrite(childMapper.nestedTypeFilter()), ScoreMode.COMPLETE_NO_SCORES, 1) ); } return childObjectFilters.get(path); } private class HasNestedDocuments implements LeafNestedDocuments { final LeafReaderContext ctx; final BitSet parentFilter; final Map<String, BitSet> objectFilters = new HashMap<>(); final Map<String, Scorer> childScorers = new HashMap<>(); int doc = -1; int rootDoc = -1; SearchHit.NestedIdentity nestedIdentity = null; private HasNestedDocuments(LeafReaderContext ctx) throws IOException { this.ctx = ctx; this.parentFilter = parentDocumentFilter.getBitSet(ctx); for (Map.Entry<String, BitSetProducer> filter : parentObjectFilters.entrySet()) { BitSet bits = filter.getValue().getBitSet(ctx); if (bits != null) { objectFilters.put(filter.getKey(), bits); } } for (Map.Entry<String, Weight> childFilter : childObjectFilters.entrySet()) { Scorer scorer = getNestedChildWeight(ctx, childFilter.getKey()).scorer(ctx); if (scorer != null) { childScorers.put(childFilter.getKey(), scorer); } } } @Override public SearchHit.NestedIdentity advance(int doc) throws IOException { assert doc >= 0 && doc < ctx.reader().maxDoc(); if (parentFilter.get(doc)) { // parent doc, no nested identity this.nestedIdentity = null; this.doc = doc; this.rootDoc = doc; return null; } else { this.doc = doc; this.rootDoc = parentFilter.nextSetBit(doc); return this.nestedIdentity = loadNestedIdentity(); } } @Override public int doc() { assert doc != -1 : "Called doc() when unpositioned"; return doc; } @Override public int rootDoc() { assert doc != -1 : "Called rootDoc() when unpositioned"; return rootDoc; } @Override public SearchHit.NestedIdentity nestedIdentity() { assert doc != -1 : "Called nestedIdentity() when unpositioned"; return nestedIdentity; } private String findObjectPath(int doc) throws IOException { String path = null; for (Map.Entry<String, Scorer> objectFilter : childScorers.entrySet()) { DocIdSetIterator it = objectFilter.getValue().iterator(); if (it.docID() == doc || it.docID() < doc && it.advance(doc) == doc) { if (path == null || path.length() > objectFilter.getKey().length()) { path = objectFilter.getKey(); } } } if (path == null) { throw new IllegalStateException("Cannot find object path for document " + doc); } return path; } private SearchHit.NestedIdentity loadNestedIdentity() throws IOException { SearchHit.NestedIdentity ni = null; int currentLevelDoc = doc; int parentNameLength; String path = findObjectPath(doc); while (path != null) { String parent = nestedLookup.getNestedParent(path); // We have to pull a new scorer for each document here, because we advance from // the last parent which will be behind the doc Scorer childScorer = getNestedChildWeight(ctx, path).scorer(ctx); if (childScorer == null) { throw new IllegalStateException("Cannot find object mapper for path " + path + " in doc " + doc); } BitSet parentBitSet; if (parent == null) { parentBitSet = parentFilter; parentNameLength = 0; } else { if (objectFilters.containsKey(parent) == false) { throw new IllegalStateException( "Cannot find parent mapper " + parent + " for path " + path + " in doc " + doc + " - known parents are " + objectFilters.keySet() ); } parentBitSet = objectFilters.get(parent); parentNameLength = parent.length() + 1; } int lastParent = parentBitSet.prevSetBit(currentLevelDoc); int offset = 0; DocIdSetIterator childIt = childScorer.iterator(); for (int i = childIt.advance(lastParent + 1); i < currentLevelDoc; i = childIt.nextDoc()) { offset++; } ni = new SearchHit.NestedIdentity(path.substring(parentNameLength), offset, ni); path = parent; currentLevelDoc = parentBitSet.nextSetBit(currentLevelDoc); } return ni; } } }
elastic/elasticsearch
server/src/main/java/org/elasticsearch/search/NestedDocuments.java
906
/* * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.util; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Spliterator; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.springframework.lang.Nullable; /** * Unmodifiable wrapper for {@link MultiValueMap}. * * @author Arjen Poutsma * @since 6.0 * @param <K> the key type * @param <V> the value element type */ final class UnmodifiableMultiValueMap<K,V> implements MultiValueMap<K,V>, Serializable { private static final long serialVersionUID = -8697084563854098920L; private final MultiValueMap<K, V> delegate; @Nullable private transient Set<K> keySet; @Nullable private transient Set<Entry<K, List<V>>> entrySet; @Nullable private transient Collection<List<V>> values; @SuppressWarnings("unchecked") public UnmodifiableMultiValueMap(MultiValueMap<? extends K, ? extends V> delegate) { Assert.notNull(delegate, "Delegate must not be null"); this.delegate = (MultiValueMap<K, V>) delegate; } // delegation @Override public int size() { return this.delegate.size(); } @Override public boolean isEmpty() { return this.delegate.isEmpty(); } @Override public boolean containsKey(Object key) { return this.delegate.containsKey(key); } @Override public boolean containsValue(Object value) { return this.delegate.containsValue(value); } @Override @Nullable public List<V> get(Object key) { List<V> result = this.delegate.get(key); return (result != null ? Collections.unmodifiableList(result) : null); } @Override @Nullable public V getFirst(K key) { return this.delegate.getFirst(key); } @Override public List<V> getOrDefault(Object key, List<V> defaultValue) { List<V> result = this.delegate.getOrDefault(key, defaultValue); if (result != defaultValue) { result = Collections.unmodifiableList(result); } return result; } @Override public void forEach(BiConsumer<? super K, ? super List<V>> action) { this.delegate.forEach((k, vs) -> action.accept(k, Collections.unmodifiableList(vs))); } @Override public Map<K, V> toSingleValueMap() { return this.delegate.toSingleValueMap(); } @Override public boolean equals(@Nullable Object other) { return (this == other || this.delegate.equals(other)); } @Override public int hashCode() { return this.delegate.hashCode(); } @Override public String toString() { return this.delegate.toString(); } // lazy init @Override public Set<K> keySet() { if (this.keySet == null) { this.keySet = Collections.unmodifiableSet(this.delegate.keySet()); } return this.keySet; } @Override public Set<Entry<K, List<V>>> entrySet() { if (this.entrySet == null) { this.entrySet = new UnmodifiableEntrySet<>(this.delegate.entrySet()); } return this.entrySet; } @Override public Collection<List<V>> values() { if (this.values == null) { this.values = new UnmodifiableValueCollection<>(this.delegate.values()); } return this.values; } // unsupported @Nullable @Override public List<V> put(K key, List<V> value) { throw new UnsupportedOperationException(); } @Override public List<V> putIfAbsent(K key, List<V> value) { throw new UnsupportedOperationException(); } @Override public void putAll(Map<? extends K, ? extends List<V>> m) { throw new UnsupportedOperationException(); } @Override public List<V> remove(Object key) { throw new UnsupportedOperationException(); } @Override public void add(K key, @Nullable V value) { throw new UnsupportedOperationException(); } @Override public void addAll(K key, List<? extends V> values) { throw new UnsupportedOperationException(); } @Override public void addAll(MultiValueMap<K, V> values) { throw new UnsupportedOperationException(); } @Override public void addIfAbsent(K key, @Nullable V value) { throw new UnsupportedOperationException(); } @Override public void set(K key, @Nullable V value) { throw new UnsupportedOperationException(); } @Override public void setAll(Map<K, V> values) { throw new UnsupportedOperationException(); } @Override public void replaceAll(BiFunction<? super K, ? super List<V>, ? extends List<V>> function) { throw new UnsupportedOperationException(); } @Override public boolean remove(Object key, Object value) { throw new UnsupportedOperationException(); } @Override public boolean replace(K key, List<V> oldValue, List<V> newValue) { throw new UnsupportedOperationException(); } @Override public List<V> replace(K key, List<V> value) { throw new UnsupportedOperationException(); } @Override public List<V> computeIfAbsent(K key, Function<? super K, ? extends List<V>> mappingFunction) { throw new UnsupportedOperationException(); } @Override public List<V> computeIfPresent(K key, BiFunction<? super K, ? super List<V>, ? extends List<V>> remappingFunction) { throw new UnsupportedOperationException(); } @Override public List<V> compute(K key, BiFunction<? super K, ? super List<V>, ? extends List<V>> remappingFunction) { throw new UnsupportedOperationException(); } @Override public List<V> merge(K key, List<V> value, BiFunction<? super List<V>, ? super List<V>, ? extends List<V>> remappingFunction) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } private static class UnmodifiableEntrySet<K,V> implements Set<Map.Entry<K, List<V>>>, Serializable { private static final long serialVersionUID = 2407578793783925203L; private final Set<Entry<K, List<V>>> delegate; @SuppressWarnings("unchecked") public UnmodifiableEntrySet(Set<? extends Entry<? extends K, ? extends List<? extends V>>> delegate) { this.delegate = (Set<Entry<K, List<V>>>) delegate; } // delegation @Override public int size() { return this.delegate.size(); } @Override public boolean isEmpty() { return this.delegate.isEmpty(); } @Override public boolean contains(Object o) { return this.delegate.contains(o); } @Override public boolean containsAll(Collection<?> c) { return this.delegate.containsAll(c); } @Override public Iterator<Entry<K, List<V>>> iterator() { Iterator<? extends Entry<? extends K, ? extends List<? extends V>>> iterator = this.delegate.iterator(); return new Iterator<>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Entry<K, List<V>> next() { return new UnmodifiableEntry<>(iterator.next()); } }; } @Override public Object[] toArray() { Object[] result = this.delegate.toArray(); filterArray(result); return result; } @Override public <T> T[] toArray(T[] a) { T[] result = this.delegate.toArray(a); filterArray(result); return result; } @SuppressWarnings("unchecked") private void filterArray(Object[] result) { for (int i = 0; i < result.length; i++) { if (result[i] instanceof Map.Entry<?,?> entry) { result[i] = new UnmodifiableEntry<>((Entry<K, List<V>>) entry); } } } @Override public void forEach(Consumer<? super Entry<K, List<V>>> action) { this.delegate.forEach(e -> action.accept(new UnmodifiableEntry<>(e))); } @Override public Stream<Entry<K, List<V>>> stream() { return StreamSupport.stream(spliterator(), false); } @Override public Stream<Entry<K, List<V>>> parallelStream() { return StreamSupport.stream(spliterator(), true); } @Override public Spliterator<Entry<K, List<V>>> spliterator() { return new UnmodifiableEntrySpliterator<>(this.delegate.spliterator()); } @Override public boolean equals(@Nullable Object other) { return (this == other || other instanceof Set<?> that && size() == that.size() && containsAll(that)); } @Override public int hashCode() { return this.delegate.hashCode(); } @Override public String toString() { return this.delegate.toString(); } // unsupported @Override public boolean add(Entry<K, List<V>> kListEntry) { throw new UnsupportedOperationException(); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override public boolean removeIf(Predicate<? super Entry<K, List<V>>> filter) { throw new UnsupportedOperationException(); } @Override public boolean addAll(Collection<? extends Entry<K, List<V>>> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } private static class UnmodifiableEntrySpliterator<K,V> implements Spliterator<Entry<K,List<V>>> { private final Spliterator<Entry<K, List<V>>> delegate; @SuppressWarnings("unchecked") public UnmodifiableEntrySpliterator( Spliterator<? extends Entry<? extends K, ? extends List<? extends V>>> delegate) { this.delegate = (Spliterator<Entry<K, List<V>>>) delegate; } @Override public boolean tryAdvance(Consumer<? super Entry<K, List<V>>> action) { return this.delegate.tryAdvance(entry -> action.accept(new UnmodifiableEntry<>(entry))); } @Override public void forEachRemaining(Consumer<? super Entry<K, List<V>>> action) { this.delegate.forEachRemaining(entry -> action.accept(new UnmodifiableEntry<>(entry))); } @Override @Nullable public Spliterator<Entry<K, List<V>>> trySplit() { Spliterator<? extends Entry<? extends K, ? extends List<? extends V>>> split = this.delegate.trySplit(); if (split != null) { return new UnmodifiableEntrySpliterator<>(split); } else { return null; } } @Override public long estimateSize() { return this.delegate.estimateSize(); } @Override public long getExactSizeIfKnown() { return this.delegate.getExactSizeIfKnown(); } @Override public int characteristics() { return this.delegate.characteristics(); } @Override public boolean hasCharacteristics(int characteristics) { return this.delegate.hasCharacteristics(characteristics); } @Override public Comparator<? super Entry<K, List<V>>> getComparator() { return this.delegate.getComparator(); } } private static class UnmodifiableEntry<K,V> implements Map.Entry<K,List<V>> { private final Entry<K, List<V>> delegate; @SuppressWarnings("unchecked") public UnmodifiableEntry(Entry<? extends K, ? extends List<? extends V>> delegate) { Assert.notNull(delegate, "Delegate must not be null"); this.delegate = (Entry<K, List<V>>) delegate; } @Override public K getKey() { return this.delegate.getKey(); } @Override public List<V> getValue() { return Collections.unmodifiableList(this.delegate.getValue()); } @Override public List<V> setValue(List<V> value) { throw new UnsupportedOperationException(); } @Override public boolean equals(@Nullable Object other) { return (this == other || (other instanceof Map.Entry<?, ?> that && getKey().equals(that.getKey()) && getValue().equals(that.getValue()))); } @Override public int hashCode() { return this.delegate.hashCode(); } @Override public String toString() { return this.delegate.toString(); } } } private static class UnmodifiableValueCollection<V> implements Collection<List<V>>, Serializable { private static final long serialVersionUID = 5518377583904339588L; private final Collection<List<V>> delegate; public UnmodifiableValueCollection(Collection<List<V>> delegate) { this.delegate = delegate; } // delegation @Override public int size() { return this.delegate.size(); } @Override public boolean isEmpty() { return this.delegate.isEmpty(); } @Override public boolean contains(Object o) { return this.delegate.contains(o); } @Override public boolean containsAll(Collection<?> c) { return this.delegate.containsAll(c); } @Override public Object[] toArray() { Object[] result = this.delegate.toArray(); filterArray(result); return result; } @Override public <T> T[] toArray(T[] a) { T[] result = this.delegate.toArray(a); filterArray(result); return result; } private void filterArray(Object[] array) { for (int i = 0; i < array.length; i++) { if (array[i] instanceof List<?> list) { array[i] = Collections.unmodifiableList(list); } } } @Override public Iterator<List<V>> iterator() { Iterator<List<V>> iterator = this.delegate.iterator(); return new Iterator<>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public List<V> next() { return Collections.unmodifiableList(iterator.next()); } }; } @Override public void forEach(Consumer<? super List<V>> action) { this.delegate.forEach(list -> action.accept(Collections.unmodifiableList(list))); } @Override public Spliterator<List<V>> spliterator() { return new UnmodifiableValueSpliterator<>(this.delegate.spliterator()); } @Override public Stream<List<V>> stream() { return StreamSupport.stream(spliterator(), false); } @Override public Stream<List<V>> parallelStream() { return StreamSupport.stream(spliterator(), true); } @Override public boolean equals(@Nullable Object other) { return (this == other || this.delegate.equals(other)); } @Override public int hashCode() { return this.delegate.hashCode(); } @Override public String toString() { return this.delegate.toString(); } // unsupported @Override public boolean add(List<V> ts) { throw new UnsupportedOperationException(); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override public boolean addAll(Collection<? extends List<V>> c) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean removeIf(Predicate<? super List<V>> filter) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } private static class UnmodifiableValueSpliterator<T> implements Spliterator<List<T>> { private final Spliterator<List<T>> delegate; public UnmodifiableValueSpliterator(Spliterator<List<T>> delegate) { this.delegate = delegate; } @Override public boolean tryAdvance(Consumer<? super List<T>> action) { return this.delegate.tryAdvance(l -> action.accept(Collections.unmodifiableList(l))); } @Override public void forEachRemaining(Consumer<? super List<T>> action) { this.delegate.forEachRemaining(l -> action.accept(Collections.unmodifiableList(l))); } @Override @Nullable public Spliterator<List<T>> trySplit() { Spliterator<List<T>> split = this.delegate.trySplit(); if (split != null) { return new UnmodifiableValueSpliterator<>(split); } else { return null; } } @Override public long estimateSize() { return this.delegate.estimateSize(); } @Override public long getExactSizeIfKnown() { return this.delegate.getExactSizeIfKnown(); } @Override public int characteristics() { return this.delegate.characteristics(); } @Override public boolean hasCharacteristics(int characteristics) { return this.delegate.hasCharacteristics(characteristics); } @Override public Comparator<? super List<T>> getComparator() { return this.delegate.getComparator(); } } } }
spring-projects/spring-framework
spring-core/src/main/java/org/springframework/util/UnmodifiableMultiValueMap.java
907
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.unsafe.map; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.LinkedList; import com.google.common.annotations.VisibleForTesting; import com.google.common.io.Closeables; import org.apache.spark.SparkEnv; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.internal.LogKeys; import org.apache.spark.internal.SparkLogger; import org.apache.spark.internal.SparkLoggerFactory; import org.apache.spark.internal.MDC; import org.apache.spark.memory.MemoryConsumer; import org.apache.spark.memory.SparkOutOfMemoryError; import org.apache.spark.memory.TaskMemoryManager; import org.apache.spark.serializer.SerializerManager; import org.apache.spark.storage.BlockManager; import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.UnsafeAlignedOffset; import org.apache.spark.unsafe.array.ByteArrayMethods; import org.apache.spark.unsafe.array.LongArray; import org.apache.spark.unsafe.hash.Murmur3_x86_32; import org.apache.spark.unsafe.memory.MemoryBlock; import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterSpillReader; import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterSpillWriter; /** * An append-only hash map where keys and values are contiguous regions of bytes. * * This is backed by a power-of-2-sized hash table, using quadratic probing with triangular numbers, * which is guaranteed to exhaust the space. * * The map can support up to 2^29 keys. If the key cardinality is higher than this, you should * probably be using sorting instead of hashing for better cache locality. * * The key and values under the hood are stored together, in the following format: * First uaoSize bytes: len(k) (key length in bytes) + len(v) (value length in bytes) + uaoSize * Next uaoSize bytes: len(k) * Next len(k) bytes: key data * Next len(v) bytes: value data * Last 8 bytes: pointer to next pair * * It means first uaoSize bytes store the entire record (key + value + uaoSize) length. This format * is compatible with {@link org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter}, * so we can pass records from this map directly into the sorter to sort records in place. */ public final class BytesToBytesMap extends MemoryConsumer { private static final SparkLogger logger = SparkLoggerFactory.getLogger(BytesToBytesMap.class); private static final HashMapGrowthStrategy growthStrategy = HashMapGrowthStrategy.DOUBLING; private final TaskMemoryManager taskMemoryManager; /** * A linked list for tracking all allocated data pages so that we can free all of our memory. */ private final LinkedList<MemoryBlock> dataPages = new LinkedList<>(); /** * The data page that will be used to store keys and values for new hashtable entries. When this * page becomes full, a new page will be allocated and this pointer will change to point to that * new page. */ private MemoryBlock currentPage = null; /** * Offset into `currentPage` that points to the location where new data can be inserted into * the page. This does not incorporate the page's base offset. */ private long pageCursor = 0; /** * The maximum number of keys that BytesToBytesMap supports. The hash table has to be * power-of-2-sized and its backing Java array can contain at most (1 &lt;&lt; 30) elements, * since that's the largest power-of-2 that's less than Integer.MAX_VALUE. We need two long array * entries per key, giving us a maximum capacity of (1 &lt;&lt; 29). */ public static final int MAX_CAPACITY = (1 << 29); // This choice of page table size and page size means that we can address up to 500 gigabytes // of memory. /** * A single array to store the key and value. * * Position {@code 2 * i} in the array is used to track a pointer to the key at index {@code i}, * while position {@code 2 * i + 1} in the array holds key's full 32-bit hashcode. */ @Nullable private LongArray longArray; // TODO: we're wasting 32 bits of space here; we can probably store fewer bits of the hashcode // and exploit word-alignment to use fewer bits to hold the address. This might let us store // only one long per map entry, increasing the chance that this array will fit in cache at the // expense of maybe performing more lookups if we have hash collisions. Say that we stored only // 27 bits of the hashcode and 37 bits of the address. 37 bits is enough to address 1 terabyte // of RAM given word-alignment. If we use 13 bits of this for our page table, that gives us a // maximum page size of 2^24 * 8 = ~134 megabytes per page. This change will require us to store // full base addresses in the page table for off-heap mode so that we can reconstruct the full // absolute memory addresses. /** * Whether or not the longArray can grow. We will not insert more elements if it's false. */ private boolean canGrowArray = true; private final double loadFactor; /** * The size of the data pages that hold key and value data. Map entries cannot span multiple * pages, so this limits the maximum entry size. */ private final long pageSizeBytes; /** * Number of keys defined in the map. */ private int numKeys; /** * Number of values defined in the map. A key could have multiple values. */ private int numValues; /** * The map will be expanded once the number of keys exceeds this threshold. */ private int growthThreshold; /** * Mask for truncating hashcodes so that they do not exceed the long array's size. * This is a strength reduction optimization; we're essentially performing a modulus operation, * but doing so with a bitmask because this is a power-of-2-sized hash map. */ private int mask; /** * Return value of {@link BytesToBytesMap#lookup(Object, long, int)}. */ private final Location loc; private long numProbes = 0L; private long numKeyLookups = 0L; private long peakMemoryUsedBytes = 0L; private final int initialCapacity; private final BlockManager blockManager; private final SerializerManager serializerManager; private volatile MapIterator destructiveIterator = null; private LinkedList<UnsafeSorterSpillWriter> spillWriters = new LinkedList<>(); public BytesToBytesMap( TaskMemoryManager taskMemoryManager, BlockManager blockManager, SerializerManager serializerManager, int initialCapacity, double loadFactor, long pageSizeBytes) { super(taskMemoryManager, pageSizeBytes, taskMemoryManager.getTungstenMemoryMode()); this.taskMemoryManager = taskMemoryManager; this.blockManager = blockManager; this.serializerManager = serializerManager; this.loadFactor = loadFactor; this.loc = new Location(); this.pageSizeBytes = pageSizeBytes; if (initialCapacity <= 0) { throw new IllegalArgumentException("Initial capacity must be greater than 0"); } if (initialCapacity > MAX_CAPACITY) { throw new IllegalArgumentException( "Initial capacity " + initialCapacity + " exceeds maximum capacity of " + MAX_CAPACITY); } if (pageSizeBytes > TaskMemoryManager.MAXIMUM_PAGE_SIZE_BYTES) { throw new IllegalArgumentException("Page size " + pageSizeBytes + " cannot exceed " + TaskMemoryManager.MAXIMUM_PAGE_SIZE_BYTES); } this.initialCapacity = initialCapacity; allocate(initialCapacity); } public BytesToBytesMap( TaskMemoryManager taskMemoryManager, int initialCapacity, long pageSizeBytes) { this( taskMemoryManager, SparkEnv.get() != null ? SparkEnv.get().blockManager() : null, SparkEnv.get() != null ? SparkEnv.get().serializerManager() : null, initialCapacity, // In order to re-use the longArray for sorting, the load factor cannot be larger than 0.5. 0.5, pageSizeBytes); } /** * Returns the number of keys defined in the map. */ public int numKeys() { return numKeys; } /** * Returns the number of values defined in the map. A key could have multiple values. */ public int numValues() { return numValues; } public final class MapIterator implements Iterator<Location> { private int numRecords; private final Location loc; private MemoryBlock currentPage = null; private int recordsInPage = 0; private Object pageBaseObject; private long offsetInPage; // If this iterator destructive or not. When it is true, it frees each page as it moves onto // next one. private boolean destructive = false; private UnsafeSorterSpillReader reader = null; private MapIterator(int numRecords, Location loc, boolean destructive) { this.numRecords = numRecords; this.loc = loc; this.destructive = destructive; if (destructive) { destructiveIterator = this; // longArray will not be used anymore if destructive is true, release it now. if (longArray != null) { freeArray(longArray); longArray = null; } } } private void advanceToNextPage() { // SPARK-26265: We will first lock this `MapIterator` and then `TaskMemoryManager` when going // to free a memory page by calling `freePage`. At the same time, it is possibly that another // memory consumer first locks `TaskMemoryManager` and then this `MapIterator` when it // acquires memory and causes spilling on this `MapIterator`. To avoid deadlock here, we keep // reference to the page to free and free it after releasing the lock of `MapIterator`. MemoryBlock pageToFree = null; try { synchronized (this) { int nextIdx = dataPages.indexOf(currentPage) + 1; if (destructive && currentPage != null) { dataPages.remove(currentPage); pageToFree = currentPage; nextIdx--; } if (dataPages.size() > nextIdx) { currentPage = dataPages.get(nextIdx); pageBaseObject = currentPage.getBaseObject(); offsetInPage = currentPage.getBaseOffset(); recordsInPage = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage); offsetInPage += UnsafeAlignedOffset.getUaoSize(); } else { currentPage = null; if (reader != null) { handleFailedDelete(); } try { Closeables.close(reader, /* swallowIOException = */ false); reader = spillWriters.getFirst().getReader(serializerManager); recordsInPage = -1; } catch (IOException e) { // Scala iterator does not handle exception Platform.throwException(e); } } } } finally { if (pageToFree != null) { freePage(pageToFree); } } } @Override public boolean hasNext() { if (numRecords == 0) { if (reader != null) { handleFailedDelete(); } } return numRecords > 0; } @Override public Location next() { if (recordsInPage == 0) { advanceToNextPage(); } numRecords--; if (currentPage != null) { int totalLength = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage); loc.with(currentPage, offsetInPage); // [total size] [key size] [key] [value] [pointer to next] offsetInPage += UnsafeAlignedOffset.getUaoSize() + totalLength + 8; recordsInPage --; return loc; } else { assert(reader != null); if (!reader.hasNext()) { advanceToNextPage(); } try { reader.loadNext(); } catch (IOException e) { try { reader.close(); } catch(IOException e2) { logger.error("Error while closing spill reader", e2); } // Scala iterator does not handle exception Platform.throwException(e); } loc.with(reader.getBaseObject(), reader.getBaseOffset(), reader.getRecordLength()); return loc; } } public synchronized long spill(long numBytes) throws IOException { if (!destructive || dataPages.size() == 1) { return 0L; } updatePeakMemoryUsed(); // TODO: use existing ShuffleWriteMetrics ShuffleWriteMetrics writeMetrics = new ShuffleWriteMetrics(); long released = 0L; while (dataPages.size() > 0) { MemoryBlock block = dataPages.getLast(); // The currentPage is used, cannot be released if (block == currentPage) { break; } Object base = block.getBaseObject(); long offset = block.getBaseOffset(); int numRecords = UnsafeAlignedOffset.getSize(base, offset); int uaoSize = UnsafeAlignedOffset.getUaoSize(); offset += uaoSize; final UnsafeSorterSpillWriter writer = new UnsafeSorterSpillWriter(blockManager, 32 * 1024, writeMetrics, numRecords); while (numRecords > 0) { int length = UnsafeAlignedOffset.getSize(base, offset); writer.write(base, offset + uaoSize, length, 0); offset += uaoSize + length + 8; numRecords--; } writer.close(); spillWriters.add(writer); dataPages.removeLast(); released += block.size(); freePage(block); if (released >= numBytes) { break; } } return released; } private void handleFailedDelete() { if (spillWriters.size() > 0) { // remove the spill file from disk File file = spillWriters.removeFirst().getFile(); if (file != null && file.exists() && !file.delete()) { logger.error("Was unable to delete spill file {}", MDC.of(LogKeys.PATH$.MODULE$, file.getAbsolutePath())); } } } } /** * Returns an iterator for iterating over the entries of this map. * * For efficiency, all calls to `next()` will return the same {@link Location} object. * * The returned iterator is thread-safe. However if the map is modified while iterating over it, * the behavior of the returned iterator is undefined. */ public MapIterator iterator() { return new MapIterator(numValues, new Location(), false); } /** * Returns a destructive iterator for iterating over the entries of this map. It frees each page * as it moves onto next one. Notice: it is illegal to call any method on the map after * `destructiveIterator()` has been called. * * For efficiency, all calls to `next()` will return the same {@link Location} object. * * The returned iterator is thread-safe. However if the map is modified while iterating over it, * the behavior of the returned iterator is undefined. */ public MapIterator destructiveIterator() { updatePeakMemoryUsed(); return new MapIterator(numValues, new Location(), true); } /** * Iterator for the entries of this map. This is to first iterate over key indices in * `longArray` then accessing values in `dataPages`. NOTE: this is different from `MapIterator` * in the sense that key index is preserved here * (See `UnsafeHashedRelation` for example of usage). */ public final class MapIteratorWithKeyIndex implements Iterator<Location> { /** * The index in `longArray` where the key is stored. */ private int keyIndex = 0; private int numRecords; private final Location loc; private MapIteratorWithKeyIndex() { this.numRecords = numValues; this.loc = new Location(); } @Override public boolean hasNext() { return numRecords > 0; } @Override public Location next() { if (!loc.isDefined() || !loc.nextValue()) { while (longArray.get(keyIndex * 2) == 0) { keyIndex++; } loc.with(keyIndex, 0, true); keyIndex++; } numRecords--; return loc; } } /** * Returns an iterator for iterating over the entries of this map, * by first iterating over the key index inside hash map's `longArray`. * * For efficiency, all calls to `next()` will return the same {@link Location} object. * * The returned iterator is NOT thread-safe. If the map is modified while iterating over it, * the behavior of the returned iterator is undefined. */ public MapIteratorWithKeyIndex iteratorWithKeyIndex() { return new MapIteratorWithKeyIndex(); } /** * The maximum number of allowed keys index. * * The value of allowed keys index is in the range of [0, maxNumKeysIndex - 1]. */ public int maxNumKeysIndex() { return (int) (longArray.size() / 2); } /** * Looks up a key, and return a {@link Location} handle that can be used to test existence * and read/write values. * * This function always returns the same {@link Location} instance to avoid object allocation. * This function is not thread-safe. */ public Location lookup(Object keyBase, long keyOffset, int keyLength) { safeLookup(keyBase, keyOffset, keyLength, loc, Murmur3_x86_32.hashUnsafeWords(keyBase, keyOffset, keyLength, 42)); return loc; } /** * Looks up a key, and return a {@link Location} handle that can be used to test existence * and read/write values. * * This function always returns the same {@link Location} instance to avoid object allocation. * This function is not thread-safe. */ public Location lookup(Object keyBase, long keyOffset, int keyLength, int hash) { safeLookup(keyBase, keyOffset, keyLength, loc, hash); return loc; } /** * Looks up a key, and saves the result in provided `loc`. * * This is a thread-safe version of `lookup`, could be used by multiple threads. */ public void safeLookup(Object keyBase, long keyOffset, int keyLength, Location loc, int hash) { assert(longArray != null); numKeyLookups++; int pos = hash & mask; int step = 1; while (true) { numProbes++; if (longArray.get(pos * 2) == 0) { // This is a new key. loc.with(pos, hash, false); return; } else { long stored = longArray.get(pos * 2 + 1); if ((int) (stored) == hash) { // Full hash code matches. Let's compare the keys for equality. loc.with(pos, hash, true); if (loc.getKeyLength() == keyLength) { final boolean areEqual = ByteArrayMethods.arrayEquals( keyBase, keyOffset, loc.getKeyBase(), loc.getKeyOffset(), keyLength ); if (areEqual) { return; } } } } pos = (pos + step) & mask; step++; } } /** * Handle returned by {@link BytesToBytesMap#lookup(Object, long, int)} function. */ public final class Location { /** An index into the hash map's Long array */ private int pos; /** True if this location points to a position where a key is defined, false otherwise */ private boolean isDefined; /** * The hashcode of the most recent key passed to * {@link BytesToBytesMap#lookup(Object, long, int, int)}. Caching this hashcode here allows us * to avoid re-hashing the key when storing a value for that key. */ private int keyHashcode; private Object baseObject; // the base object for key and value private long keyOffset; private int keyLength; private long valueOffset; private int valueLength; /** * Memory page containing the record. Only set if created by {@link BytesToBytesMap#iterator()}. */ @Nullable private MemoryBlock memoryPage; private void updateAddressesAndSizes(long fullKeyAddress) { updateAddressesAndSizes( taskMemoryManager.getPage(fullKeyAddress), taskMemoryManager.getOffsetInPage(fullKeyAddress)); } private void updateAddressesAndSizes(final Object base, long offset) { baseObject = base; final int totalLength = UnsafeAlignedOffset.getSize(base, offset); int uaoSize = UnsafeAlignedOffset.getUaoSize(); offset += uaoSize; keyLength = UnsafeAlignedOffset.getSize(base, offset); offset += uaoSize; keyOffset = offset; valueOffset = offset + keyLength; valueLength = totalLength - keyLength - uaoSize; } private Location with(int pos, int keyHashcode, boolean isDefined) { assert(longArray != null); this.pos = pos; this.isDefined = isDefined; this.keyHashcode = keyHashcode; if (isDefined) { final long fullKeyAddress = longArray.get(pos * 2); updateAddressesAndSizes(fullKeyAddress); } return this; } private Location with(MemoryBlock page, long offsetInPage) { this.isDefined = true; this.memoryPage = page; updateAddressesAndSizes(page.getBaseObject(), offsetInPage); return this; } /** * This is only used for spilling */ private Location with(Object base, long offset, int length) { this.isDefined = true; this.memoryPage = null; baseObject = base; int uaoSize = UnsafeAlignedOffset.getUaoSize(); keyOffset = offset + uaoSize; keyLength = UnsafeAlignedOffset.getSize(base, offset); valueOffset = offset + uaoSize + keyLength; valueLength = length - uaoSize - keyLength; return this; } /** * Find the next pair that has the same key as current one. */ public boolean nextValue() { assert isDefined; long nextAddr = Platform.getLong(baseObject, valueOffset + valueLength); if (nextAddr == 0) { return false; } else { updateAddressesAndSizes(nextAddr); return true; } } /** * Returns the memory page that contains the current record. * This is only valid if this is returned by {@link BytesToBytesMap#iterator()}. */ public MemoryBlock getMemoryPage() { return this.memoryPage; } /** * Returns true if the key is defined at this position, and false otherwise. */ public boolean isDefined() { return isDefined; } /** * Returns index for key. */ public int getKeyIndex() { assert (isDefined); return pos; } /** * Returns the base object for key. */ public Object getKeyBase() { assert (isDefined); return baseObject; } /** * Returns the offset for key. */ public long getKeyOffset() { assert (isDefined); return keyOffset; } /** * Returns the base object for value. */ public Object getValueBase() { assert (isDefined); return baseObject; } /** * Returns the offset for value. */ public long getValueOffset() { assert (isDefined); return valueOffset; } /** * Returns the length of the key defined at this position. * Unspecified behavior if the key is not defined. */ public int getKeyLength() { assert (isDefined); return keyLength; } /** * Returns the length of the value defined at this position. * Unspecified behavior if the key is not defined. */ public int getValueLength() { assert (isDefined); return valueLength; } /** * Append a new value for the key. This method could be called multiple times for a given key. * The return value indicates whether the put succeeded or whether it failed because additional * memory could not be acquired. * <p> * It is only valid to call this method immediately after calling `lookup()` using the same key. * </p> * <p> * The key and value must be word-aligned (that is, their sizes must be a multiple of 8). * </p> * <p> * After calling this method, calls to `get[Key|Value]Address()` and `get[Key|Value]Length` * will return information on the data stored by this `append` call. * </p> * <p> * As an example usage, here's the proper way to store a new key: * </p> * <pre> * Location loc = map.lookup(keyBase, keyOffset, keyLength); * if (!loc.isDefined()) { * if (!loc.append(keyBase, keyOffset, keyLength, ...)) { * // handle failure to grow map (by spilling, for example) * } * } * </pre> * <p> * Unspecified behavior if the key is not defined. * </p> * * @return true if the put() was successful and false if the put() failed because memory could * not be acquired. */ public boolean append(Object kbase, long koff, int klen, Object vbase, long voff, int vlen) { assert (klen % 8 == 0); assert (vlen % 8 == 0); assert (longArray != null); // We should not increase number of keys to be MAX_CAPACITY. The usage pattern of this map is // lookup + append. If we append key until the number of keys to be MAX_CAPACITY, next time // the call of lookup will hang forever because it cannot find an empty slot. if (numKeys == MAX_CAPACITY - 1 // The map could be reused from last spill (because of no enough memory to grow), // then we don't try to grow again if hit the `growthThreshold`. || !canGrowArray && numKeys >= growthThreshold) { return false; } // Here, we'll copy the data into our data pages. Because we only store a relative offset from // the key address instead of storing the absolute address of the value, the key and value // must be stored in the same memory page. // (total length) (key length) (key) (value) (8 byte pointer to next value) int uaoSize = UnsafeAlignedOffset.getUaoSize(); final long recordLength = (2L * uaoSize) + klen + vlen + 8; if (currentPage == null || currentPage.size() - pageCursor < recordLength) { if (!acquireNewPage(recordLength + uaoSize)) { return false; } } // --- Append the key and value data to the current data page -------------------------------- final Object base = currentPage.getBaseObject(); long offset = currentPage.getBaseOffset() + pageCursor; final long recordOffset = offset; UnsafeAlignedOffset.putSize(base, offset, klen + vlen + uaoSize); UnsafeAlignedOffset.putSize(base, offset + uaoSize, klen); offset += (2L * uaoSize); Platform.copyMemory(kbase, koff, base, offset, klen); offset += klen; Platform.copyMemory(vbase, voff, base, offset, vlen); offset += vlen; // put this value at the beginning of the list Platform.putLong(base, offset, isDefined ? longArray.get(pos * 2) : 0); // --- Update bookkeeping data structures ---------------------------------------------------- offset = currentPage.getBaseOffset(); UnsafeAlignedOffset.putSize(base, offset, UnsafeAlignedOffset.getSize(base, offset) + 1); pageCursor += recordLength; final long storedKeyAddress = taskMemoryManager.encodePageNumberAndOffset( currentPage, recordOffset); longArray.set(pos * 2, storedKeyAddress); updateAddressesAndSizes(storedKeyAddress); numValues++; if (!isDefined) { numKeys++; longArray.set(pos * 2 + 1, keyHashcode); isDefined = true; // If the map has reached its growth threshold, try to grow it. if (numKeys >= growthThreshold) { // We use two array entries per key, so the array size is twice the capacity. // We should compare the current capacity of the array, instead of its size. if (longArray.size() / 2 < MAX_CAPACITY) { try { growAndRehash(); } catch (SparkOutOfMemoryError oom) { canGrowArray = false; } } else { // The map is already at MAX_CAPACITY and cannot grow. Instead, we prevent it from // accepting any more new elements to make sure we don't exceed the load factor. If we // need to spill later, this allows UnsafeKVExternalSorter to reuse the array for // sorting. canGrowArray = false; } } } return true; } } /** * Acquire a new page from the memory manager. * @return whether there is enough space to allocate the new page. */ private boolean acquireNewPage(long required) { try { currentPage = allocatePage(required); } catch (SparkOutOfMemoryError e) { return false; } dataPages.add(currentPage); UnsafeAlignedOffset.putSize(currentPage.getBaseObject(), currentPage.getBaseOffset(), 0); pageCursor = UnsafeAlignedOffset.getUaoSize(); return true; } @Override public long spill(long size, MemoryConsumer trigger) throws IOException { if (trigger != this && destructiveIterator != null) { return destructiveIterator.spill(size); } return 0L; } /** * Allocate new data structures for this map. When calling this outside of the constructor, * make sure to keep references to the old data structures so that you can free them. * * @param capacity the new map capacity */ private void allocate(int capacity) { assert (capacity >= 0); capacity = Math.max((int) Math.min(MAX_CAPACITY, ByteArrayMethods.nextPowerOf2(capacity)), 64); assert (capacity <= MAX_CAPACITY); longArray = allocateArray(capacity * 2L); longArray.zeroOut(); this.growthThreshold = (int) (capacity * loadFactor); this.mask = capacity - 1; } /** * Free all allocated memory associated with this map, including the storage for keys and values * as well as the hash map array itself. * * This method is idempotent and can be called multiple times. */ public void free() { updatePeakMemoryUsed(); if (longArray != null) { freeArray(longArray); longArray = null; } Iterator<MemoryBlock> dataPagesIterator = dataPages.iterator(); while (dataPagesIterator.hasNext()) { MemoryBlock dataPage = dataPagesIterator.next(); dataPagesIterator.remove(); freePage(dataPage); } assert(dataPages.isEmpty()); while (!spillWriters.isEmpty()) { File file = spillWriters.removeFirst().getFile(); if (file != null && file.exists()) { if (!file.delete()) { logger.error("Was unable to delete spill file {}", MDC.of(LogKeys.PATH$.MODULE$, file.getAbsolutePath())); } } } } public TaskMemoryManager getTaskMemoryManager() { return taskMemoryManager; } public long getPageSizeBytes() { return pageSizeBytes; } /** * Returns the total amount of memory, in bytes, consumed by this map's managed structures. */ public long getTotalMemoryConsumption() { long totalDataPagesSize = 0L; for (MemoryBlock dataPage : dataPages) { totalDataPagesSize += dataPage.size(); } return totalDataPagesSize + ((longArray != null) ? longArray.memoryBlock().size() : 0L); } private void updatePeakMemoryUsed() { long mem = getTotalMemoryConsumption(); if (mem > peakMemoryUsedBytes) { peakMemoryUsedBytes = mem; } } /** * Return the peak memory used so far, in bytes. */ public long getPeakMemoryUsedBytes() { updatePeakMemoryUsed(); return peakMemoryUsedBytes; } /** * Returns the average number of probes per key lookup. */ public double getAvgHashProbesPerKey() { return (1.0 * numProbes) / numKeyLookups; } @VisibleForTesting public int getNumDataPages() { return dataPages.size(); } /** * Returns the underline long[] of longArray. */ public LongArray getArray() { assert(longArray != null); return longArray; } /** * Reset this map to initialized state. */ public void reset() { updatePeakMemoryUsed(); numKeys = 0; numValues = 0; freeArray(longArray); longArray = null; while (dataPages.size() > 0) { MemoryBlock dataPage = dataPages.removeLast(); freePage(dataPage); } allocate(initialCapacity); canGrowArray = true; currentPage = null; pageCursor = 0; } /** * Grows the size of the hash table and re-hash everything. */ @VisibleForTesting void growAndRehash() { assert(longArray != null); // Store references to the old data structures to be used when we re-hash final LongArray oldLongArray = longArray; final int oldCapacity = (int) oldLongArray.size() / 2; // Allocate the new data structures allocate(Math.min(growthStrategy.nextCapacity(oldCapacity), MAX_CAPACITY)); // Re-mask (we don't recompute the hashcode because we stored all 32 bits of it) for (int i = 0; i < oldLongArray.size(); i += 2) { final long keyPointer = oldLongArray.get(i); if (keyPointer == 0) { continue; } final int hashcode = (int) oldLongArray.get(i + 1); int newPos = hashcode & mask; int step = 1; while (longArray.get(newPos * 2) != 0) { newPos = (newPos + step) & mask; step++; } longArray.set(newPos * 2, keyPointer); longArray.set(newPos * 2 + 1, hashcode); } freeArray(oldLongArray); } }
apache/spark
core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
908
package com.baeldung.falsesharing; import sun.misc.Unsafe; import java.lang.reflect.Field; import java.util.function.LongBinaryOperator; import java.util.function.DoubleBinaryOperator; import java.util.concurrent.ThreadLocalRandom; /** * Copy-pasted from {@code java.util.concurrent.atomic.Striped64} class. * * A package-local class holding common representation and mechanics * for classes supporting dynamic striping on 64bit values. The class * extends Number so that concrete subclasses must publicly do so. */ @SuppressWarnings("serial") abstract class Striped64 extends Number { /* * This class maintains a lazily-initialized table of atomically * updated variables, plus an extra "base" field. The table size * is a power of two. Indexing uses masked per-thread hash codes. * Nearly all declarations in this class are package-private, * accessed directly by subclasses. * * Table entries are of class Cell; a variant of AtomicLong padded * (via @sun.misc.Contended) to reduce cache contention. Padding * is overkill for most Atomics because they are usually * irregularly scattered in memory and thus don't interfere much * with each other. But Atomic objects residing in arrays will * tend to be placed adjacent to each other, and so will most * often share cache lines (with a huge negative performance * impact) without this precaution. * * In part because Cells are relatively large, we avoid creating * them until they are needed. When there is no contention, all * updates are made to the base field. Upon first contention (a * failed CAS on base update), the table is initialized to size 2. * The table size is doubled upon further contention until * reaching the nearest power of two greater than or equal to the * number of CPUS. Table slots remain empty (null) until they are * needed. * * A single spinlock ("cellsBusy") is used for initializing and * resizing the table, as well as populating slots with new Cells. * There is no need for a blocking lock; when the lock is not * available, threads try other slots (or the base). During these * retries, there is increased contention and reduced locality, * which is still better than alternatives. * * The Thread probe fields maintained via ThreadLocalRandom serve * as per-thread hash codes. We let them remain uninitialized as * zero (if they come in this way) until they contend at slot * 0. They are then initialized to values that typically do not * often conflict with others. Contention and/or table collisions * are indicated by failed CASes when performing an update * operation. Upon a collision, if the table size is less than * the capacity, it is doubled in size unless some other thread * holds the lock. If a hashed slot is empty, and lock is * available, a new Cell is created. Otherwise, if the slot * exists, a CAS is tried. Retries proceed by "double hashing", * using a secondary hash (Marsaglia XorShift) to try to find a * free slot. * * The table size is capped because, when there are more threads * than CPUs, supposing that each thread were bound to a CPU, * there would exist a perfect hash function mapping threads to * slots that eliminates collisions. When we reach capacity, we * search for this mapping by randomly varying the hash codes of * colliding threads. Because search is random, and collisions * only become known via CAS failures, convergence can be slow, * and because threads are typically not bound to CPUS forever, * may not occur at all. However, despite these limitations, * observed contention rates are typically low in these cases. * * It is possible for a Cell to become unused when threads that * once hashed to it terminate, as well as in the case where * doubling the table causes no thread to hash to it under * expanded mask. We do not try to detect or remove such cells, * under the assumption that for long-running instances, observed * contention levels will recur, so the cells will eventually be * needed again; and for short-lived ones, it does not matter. */ /** * Padded variant of AtomicLong supporting only raw accesses plus CAS. * * JVM intrinsics note: It would be possible to use a release-only * form of CAS here, if it were provided. */ @jdk.internal.vm.annotation.Contended static final class Cell { volatile long value; Cell(long x) { value = x; } final boolean cas(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, valueOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long valueOffset; static { try { UNSAFE = getUnsafe(); Class<?> ak = Striped64.Cell.class; valueOffset = UNSAFE.objectFieldOffset (ak.getDeclaredField("value")); } catch (Exception e) { throw new Error(e); } } } /** Number of CPUS, to place bound on table size */ static final int NCPU = Runtime.getRuntime().availableProcessors(); /** * Table of cells. When non-null, size is a power of 2. */ transient volatile Striped64.Cell[] cells; /** * Base value, used mainly when there is no contention, but also as * a fallback during table initialization races. Updated via CAS. */ transient volatile long base; /** * Spinlock (locked via CAS) used when resizing and/or creating Cells. */ transient volatile int cellsBusy; /** * Package-private default constructor */ Striped64() { } /** * CASes the base field. */ final boolean casBase(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, BASE, cmp, val); } /** * CASes the cellsBusy field from 0 to 1 to acquire lock. */ final boolean casCellsBusy() { return UNSAFE.compareAndSwapInt(this, CELLSBUSY, 0, 1); } /** * Returns the probe value for the current thread. * Duplicated from ThreadLocalRandom because of packaging restrictions. */ static final int getProbe() { return UNSAFE.getInt(Thread.currentThread(), PROBE); } /** * Pseudo-randomly advances and records the given probe value for the * given thread. * Duplicated from ThreadLocalRandom because of packaging restrictions. */ static final int advanceProbe(int probe) { probe ^= probe << 13; // xorshift probe ^= probe >>> 17; probe ^= probe << 5; UNSAFE.putInt(Thread.currentThread(), PROBE, probe); return probe; } /** * Handles cases of updates involving initialization, resizing, * creating new Cells, and/or contention. See above for * explanation. This method suffers the usual non-modularity * problems of optimistic retry code, relying on rechecked sets of * reads. * * @param x the value * @param fn the update function, or null for add (this convention * avoids the need for an extra field or function in LongAdder). * @param wasUncontended false if CAS failed before call */ final void longAccumulate(long x, LongBinaryOperator fn, boolean wasUncontended) { int h; if ((h = getProbe()) == 0) { ThreadLocalRandom.current(); // force initialization h = getProbe(); wasUncontended = true; } boolean collide = false; // True if last slot nonempty for (;;) { Striped64.Cell[] as; Striped64.Cell a; int n; long v; if ((as = cells) != null && (n = as.length) > 0) { if ((a = as[(n - 1) & h]) == null) { if (cellsBusy == 0) { // Try to attach new Cell Striped64.Cell r = new Striped64.Cell(x); // Optimistically create if (cellsBusy == 0 && casCellsBusy()) { boolean created = false; try { // Recheck under lock Striped64.Cell[] rs; int m, j; if ((rs = cells) != null && (m = rs.length) > 0 && rs[j = (m - 1) & h] == null) { rs[j] = r; created = true; } } finally { cellsBusy = 0; } if (created) break; continue; // Slot is now non-empty } } collide = false; } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash else if (a.cas(v = a.value, ((fn == null) ? v + x : fn.applyAsLong(v, x)))) break; else if (n >= NCPU || cells != as) collide = false; // At max size or stale else if (!collide) collide = true; else if (cellsBusy == 0 && casCellsBusy()) { try { if (cells == as) { // Expand table unless stale Striped64.Cell[] rs = new Striped64.Cell[n << 1]; for (int i = 0; i < n; ++i) rs[i] = as[i]; cells = rs; } } finally { cellsBusy = 0; } collide = false; continue; // Retry with expanded table } h = advanceProbe(h); } else if (cellsBusy == 0 && cells == as && casCellsBusy()) { boolean init = false; try { // Initialize table if (cells == as) { Striped64.Cell[] rs = new Striped64.Cell[2]; rs[h & 1] = new Striped64.Cell(x); cells = rs; init = true; } } finally { cellsBusy = 0; } if (init) break; } else if (casBase(v = base, ((fn == null) ? v + x : fn.applyAsLong(v, x)))) break; // Fall back on using base } } /** * Same as longAccumulate, but injecting long/double conversions * in too many places to sensibly merge with long version, given * the low-overhead requirements of this class. So must instead be * maintained by copy/paste/adapt. */ final void doubleAccumulate(double x, DoubleBinaryOperator fn, boolean wasUncontended) { int h; if ((h = getProbe()) == 0) { ThreadLocalRandom.current(); // force initialization h = getProbe(); wasUncontended = true; } boolean collide = false; // True if last slot nonempty for (;;) { Striped64.Cell[] as; Striped64.Cell a; int n; long v; if ((as = cells) != null && (n = as.length) > 0) { if ((a = as[(n - 1) & h]) == null) { if (cellsBusy == 0) { // Try to attach new Cell Striped64.Cell r = new Striped64.Cell(Double.doubleToRawLongBits(x)); if (cellsBusy == 0 && casCellsBusy()) { boolean created = false; try { // Recheck under lock Striped64.Cell[] rs; int m, j; if ((rs = cells) != null && (m = rs.length) > 0 && rs[j = (m - 1) & h] == null) { rs[j] = r; created = true; } } finally { cellsBusy = 0; } if (created) break; continue; // Slot is now non-empty } } collide = false; } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash else if (a.cas(v = a.value, ((fn == null) ? Double.doubleToRawLongBits (Double.longBitsToDouble(v) + x) : Double.doubleToRawLongBits (fn.applyAsDouble (Double.longBitsToDouble(v), x))))) break; else if (n >= NCPU || cells != as) collide = false; // At max size or stale else if (!collide) collide = true; else if (cellsBusy == 0 && casCellsBusy()) { try { if (cells == as) { // Expand table unless stale Striped64.Cell[] rs = new Striped64.Cell[n << 1]; for (int i = 0; i < n; ++i) rs[i] = as[i]; cells = rs; } } finally { cellsBusy = 0; } collide = false; continue; // Retry with expanded table } h = advanceProbe(h); } else if (cellsBusy == 0 && cells == as && casCellsBusy()) { boolean init = false; try { // Initialize table if (cells == as) { Striped64.Cell[] rs = new Striped64.Cell[2]; rs[h & 1] = new Striped64.Cell(Double.doubleToRawLongBits(x)); cells = rs; init = true; } } finally { cellsBusy = 0; } if (init) break; } else if (casBase(v = base, ((fn == null) ? Double.doubleToRawLongBits (Double.longBitsToDouble(v) + x) : Double.doubleToRawLongBits (fn.applyAsDouble (Double.longBitsToDouble(v), x))))) break; // Fall back on using base } } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long BASE; private static final long CELLSBUSY; private static final long PROBE; static { try { UNSAFE = getUnsafe(); Class<?> sk = Striped64.class; BASE = UNSAFE.objectFieldOffset (sk.getDeclaredField("base")); CELLSBUSY = UNSAFE.objectFieldOffset (sk.getDeclaredField("cellsBusy")); Class<?> tk = Thread.class; PROBE = UNSAFE.objectFieldOffset (tk.getDeclaredField("threadLocalRandomProbe")); } catch (Exception e) { throw new Error(e); } } private static Unsafe getUnsafe() { try { Field field = Unsafe.class.getDeclaredField("theUnsafe"); field.setAccessible(true); return (Unsafe) field.get(null); } catch (Exception e) { throw new RuntimeException(e); } } }
eugenp/tutorials
jmh/src/main/java/com/baeldung/falsesharing/Striped64.java
909
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd package com.google.protobuf; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InvalidObjectException; import java.io.ObjectInputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; /** * Class to represent {@code ByteStrings} formed by concatenation of other ByteStrings, without * copying the data in the pieces. The concatenation is represented as a tree whose leaf nodes are * each a {@link com.google.protobuf.ByteString.LeafByteString}. * * <p>Most of the operation here is inspired by the now-famous paper <a * href="https://web.archive.org/web/20060202015456/http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf"> * BAP95 </a> Ropes: an Alternative to Strings hans-j. boehm, russ atkinson and michael plass * * <p>The algorithms described in the paper have been implemented for character strings in {@code * com.google.common.string.Rope} and in the c++ class {@code cord.cc}. * * <p>Fundamentally the Rope algorithm represents the collection of pieces as a binary tree. BAP95 * uses a Fibonacci bound relating depth to a minimum sequence length, sequences that are too short * relative to their depth cause a tree rebalance. More precisely, a tree of depth d is "balanced" * in the terminology of BAP95 if its length is at least F(d+2), where F(n) is the n-th Fibonacci * number. Thus for depths 0, 1, 2, 3, 4, 5,... we have minimum lengths 1, 2, 3, 5, 8, 13,... * * @author [email protected] (Carl Haverl) */ final class RopeByteString extends ByteString { /** * BAP95. Let Fn be the nth Fibonacci number. A {@link RopeByteString} of depth n is "balanced", * i.e flat enough, if its length is at least Fn+2, e.g. a "balanced" {@link RopeByteString} of * depth 1 must have length at least 2, of depth 4 must have length >= 8, etc. * * <p>There's nothing special about using the Fibonacci numbers for this, but they are a * reasonable sequence for encapsulating the idea that we are OK with longer strings being encoded * in deeper binary trees. * * <p>For 32-bit integers, this array has length 46. * * <p>The correctness of this constant array is validated in tests. */ static final int[] minLengthByDepth = { 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368, 75025, 121393, 196418, 317811, 514229, 832040, 1346269, 2178309, 3524578, 5702887, 9227465, 14930352, 24157817, 39088169, 63245986, 102334155, 165580141, 267914296, 433494437, 701408733, 1134903170, 1836311903, Integer.MAX_VALUE }; private final int totalLength; private final ByteString left; private final ByteString right; private final int leftLength; private final int treeDepth; /** * Create a new RopeByteString, which can be thought of as a new tree node, by recording * references to the two given strings. * * @param left string on the left of this node, should have {@code size() > 0} * @param right string on the right of this node, should have {@code size() > 0} */ private RopeByteString(ByteString left, ByteString right) { this.left = left; this.right = right; leftLength = left.size(); totalLength = leftLength + right.size(); treeDepth = Math.max(left.getTreeDepth(), right.getTreeDepth()) + 1; } /** * Concatenate the given strings while performing various optimizations to slow the growth rate of * tree depth and tree node count. The result is either a {@link * com.google.protobuf.ByteString.LeafByteString} or a {@link RopeByteString} depending on which * optimizations, if any, were applied. * * <p>Small pieces of length less than {@link ByteString#CONCATENATE_BY_COPY_SIZE} may be copied * by value here, as in BAP95. Large pieces are referenced without copy. * * @param left string on the left * @param right string on the right * @return concatenation representing the same sequence as the given strings */ static ByteString concatenate(ByteString left, ByteString right) { if (right.size() == 0) { return left; } if (left.size() == 0) { return right; } final int newLength = left.size() + right.size(); if (newLength < ByteString.CONCATENATE_BY_COPY_SIZE) { // Optimization from BAP95: For short (leaves in paper, but just short // here) total length, do a copy of data to a new leaf. return concatenateBytes(left, right); } if (left instanceof RopeByteString) { final RopeByteString leftRope = (RopeByteString) left; if (leftRope.right.size() + right.size() < CONCATENATE_BY_COPY_SIZE) { // Optimization from BAP95: As an optimization of the case where the // ByteString is constructed by repeated concatenate, recognize the case // where a short string is concatenated to a left-hand node whose // right-hand branch is short. In the paper this applies to leaves, but // we just look at the length here. This has the advantage of shedding // references to unneeded data when substrings have been taken. // // When we recognize this case, we do a copy of the data and create a // new parent node so that the depth of the result is the same as the // given left tree. ByteString newRight = concatenateBytes(leftRope.right, right); return new RopeByteString(leftRope.left, newRight); } if (leftRope.left.getTreeDepth() > leftRope.right.getTreeDepth() && leftRope.getTreeDepth() > right.getTreeDepth()) { // Typically for concatenate-built strings the left-side is deeper than // the right. This is our final attempt to concatenate without // increasing the tree depth. We'll redo the node on the RHS. This // is yet another optimization for building the string by repeatedly // concatenating on the right. ByteString newRight = new RopeByteString(leftRope.right, right); return new RopeByteString(leftRope.left, newRight); } } // Fine, we'll add a node and increase the tree depth--unless we rebalance ;^) int newDepth = Math.max(left.getTreeDepth(), right.getTreeDepth()) + 1; if (newLength >= minLength(newDepth)) { // The tree is shallow enough, so don't rebalance return new RopeByteString(left, right); } return new Balancer().balance(left, right); } /** * Concatenates two strings by copying data values. This is called in a few cases in order to * reduce the growth of the number of tree nodes. * * @param left string on the left * @param right string on the right * @return string formed by copying data bytes */ private static ByteString concatenateBytes(ByteString left, ByteString right) { int leftSize = left.size(); int rightSize = right.size(); byte[] bytes = new byte[leftSize + rightSize]; left.copyTo(bytes, 0, 0, leftSize); right.copyTo(bytes, 0, leftSize, rightSize); return ByteString.wrap(bytes); // Constructor wraps bytes } /** * Create a new RopeByteString for testing only while bypassing all the defenses of {@link * #concatenate(ByteString, ByteString)}. This allows testing trees of specific structure. We are * also able to insert empty leaves, though these are dis-allowed, so that we can make sure the * implementation can withstand their presence. * * @param left string on the left of this node * @param right string on the right of this node * @return an unsafe instance for testing only */ static RopeByteString newInstanceForTest(ByteString left, ByteString right) { return new RopeByteString(left, right); } /** * Returns the minimum length for which a tree of the given depth is considered balanced according * to BAP95, which means the tree is flat-enough with respect to the bounds. Defaults to {@code * Integer.MAX_VALUE} if {@code depth >= minLengthByDepth.length} in order to avoid an {@code * ArrayIndexOutOfBoundsException}. * * @param depth tree depth * @return minimum balanced length */ static int minLength(int depth) { if (depth >= minLengthByDepth.length) { return Integer.MAX_VALUE; } return minLengthByDepth[depth]; } /** * Gets the byte at the given index. Throws {@link ArrayIndexOutOfBoundsException} for * backwards-compatibility reasons although it would more properly be {@link * IndexOutOfBoundsException}. * * @param index index of byte * @return the value * @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size */ @Override public byte byteAt(int index) { checkIndex(index, totalLength); return internalByteAt(index); } @Override byte internalByteAt(int index) { // Find the relevant piece by recursive descent if (index < leftLength) { return left.internalByteAt(index); } return right.internalByteAt(index - leftLength); } @Override public int size() { return totalLength; } @Override public ByteIterator iterator() { return new AbstractByteIterator() { final PieceIterator pieces = new PieceIterator(RopeByteString.this); ByteIterator current = nextPiece(); private ByteIterator nextPiece() { // NOTE: PieceIterator is guaranteed to return non-empty pieces, so this method will always // return non-empty iterators (or null) return pieces.hasNext() ? pieces.next().iterator() : null; } @Override public boolean hasNext() { return current != null; } @Override public byte nextByte() { if (current == null) { throw new NoSuchElementException(); } byte b = current.nextByte(); if (!current.hasNext()) { current = nextPiece(); } return b; } }; } // ================================================================= // Pieces @Override protected int getTreeDepth() { return treeDepth; } /** * Determines if the tree is balanced according to BAP95, which means the tree is flat-enough with * respect to the bounds. Note that this definition of balanced is one where sub-trees of balanced * trees are not necessarily balanced. * * @return true if the tree is balanced */ @Override protected boolean isBalanced() { return totalLength >= minLength(treeDepth); } /** * Takes a substring of this one. This involves recursive descent along the left and right edges * of the substring, and referencing any wholly contained segments in between. Any leaf nodes * entirely uninvolved in the substring will not be referenced by the substring. * * <p>Substrings of {@code length < 2} should result in at most a single recursive call chain, * terminating at a leaf node. Thus the result will be a {@link * com.google.protobuf.ByteString.LeafByteString}. * * @param beginIndex start at this index * @param endIndex the last character is the one before this index * @return substring leaf node or tree */ @Override public ByteString substring(int beginIndex, int endIndex) { final int length = checkRange(beginIndex, endIndex, totalLength); if (length == 0) { // Empty substring return ByteString.EMPTY; } if (length == totalLength) { // The whole string return this; } // Proper substring if (endIndex <= leftLength) { // Substring on the left return left.substring(beginIndex, endIndex); } if (beginIndex >= leftLength) { // Substring on the right return right.substring(beginIndex - leftLength, endIndex - leftLength); } // Split substring ByteString leftSub = left.substring(beginIndex); ByteString rightSub = right.substring(0, endIndex - leftLength); // Intentionally not rebalancing, since in many cases these two // substrings will already be less deep than the top-level // RopeByteString we're taking a substring of. return new RopeByteString(leftSub, rightSub); } // ================================================================= // ByteString -> byte[] @Override protected void copyToInternal( byte[] target, int sourceOffset, int targetOffset, int numberToCopy) { if (sourceOffset + numberToCopy <= leftLength) { left.copyToInternal(target, sourceOffset, targetOffset, numberToCopy); } else if (sourceOffset >= leftLength) { right.copyToInternal(target, sourceOffset - leftLength, targetOffset, numberToCopy); } else { int leftLength = this.leftLength - sourceOffset; left.copyToInternal(target, sourceOffset, targetOffset, leftLength); right.copyToInternal(target, 0, targetOffset + leftLength, numberToCopy - leftLength); } } @Override public void copyTo(ByteBuffer target) { left.copyTo(target); right.copyTo(target); } @Override public ByteBuffer asReadOnlyByteBuffer() { ByteBuffer byteBuffer = ByteBuffer.wrap(toByteArray()); return byteBuffer.asReadOnlyBuffer(); } @Override public List<ByteBuffer> asReadOnlyByteBufferList() { // Walk through the list of LeafByteString's that make up this // rope, and add each one as a read-only ByteBuffer. List<ByteBuffer> result = new ArrayList<ByteBuffer>(); PieceIterator pieces = new PieceIterator(this); while (pieces.hasNext()) { LeafByteString byteString = pieces.next(); result.add(byteString.asReadOnlyByteBuffer()); } return result; } @Override public void writeTo(OutputStream outputStream) throws IOException { left.writeTo(outputStream); right.writeTo(outputStream); } @Override void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException { if (sourceOffset + numberToWrite <= leftLength) { left.writeToInternal(out, sourceOffset, numberToWrite); } else if (sourceOffset >= leftLength) { right.writeToInternal(out, sourceOffset - leftLength, numberToWrite); } else { int numberToWriteInLeft = leftLength - sourceOffset; left.writeToInternal(out, sourceOffset, numberToWriteInLeft); right.writeToInternal(out, 0, numberToWrite - numberToWriteInLeft); } } @Override void writeTo(ByteOutput output) throws IOException { left.writeTo(output); right.writeTo(output); } @Override void writeToReverse(ByteOutput output) throws IOException { right.writeToReverse(output); left.writeToReverse(output); } @Override protected String toStringInternal(Charset charset) { return new String(toByteArray(), charset); } // ================================================================= // UTF-8 decoding @Override public boolean isValidUtf8() { int leftPartial = left.partialIsValidUtf8(Utf8.COMPLETE, 0, leftLength); int state = right.partialIsValidUtf8(leftPartial, 0, right.size()); return state == Utf8.COMPLETE; } @Override protected int partialIsValidUtf8(int state, int offset, int length) { int toIndex = offset + length; if (toIndex <= leftLength) { return left.partialIsValidUtf8(state, offset, length); } else if (offset >= leftLength) { return right.partialIsValidUtf8(state, offset - leftLength, length); } else { int leftLength = this.leftLength - offset; int leftPartial = left.partialIsValidUtf8(state, offset, leftLength); return right.partialIsValidUtf8(leftPartial, 0, length - leftLength); } } // ================================================================= // equals() and hashCode() @Override public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof ByteString)) { return false; } ByteString otherByteString = (ByteString) other; if (totalLength != otherByteString.size()) { return false; } if (totalLength == 0) { return true; } // You don't really want to be calling equals on long strings, but since // we cache the hashCode, we effectively cache inequality. We use the cached // hashCode if it's already computed. It's arguable we should compute the // hashCode here, and if we're going to be testing a bunch of byteStrings, // it might even make sense. int thisHash = peekCachedHashCode(); int thatHash = otherByteString.peekCachedHashCode(); if (thisHash != 0 && thatHash != 0 && thisHash != thatHash) { return false; } return equalsFragments(otherByteString); } /** * Determines if this string is equal to another of the same length by iterating over the leaf * nodes. On each step of the iteration, the overlapping segments of the leaves are compared. * * @param other string of the same length as this one * @return true if the values of this string equals the value of the given one */ private boolean equalsFragments(ByteString other) { int thisOffset = 0; Iterator<LeafByteString> thisIter = new PieceIterator(this); LeafByteString thisString = thisIter.next(); int thatOffset = 0; Iterator<LeafByteString> thatIter = new PieceIterator(other); LeafByteString thatString = thatIter.next(); int pos = 0; while (true) { int thisRemaining = thisString.size() - thisOffset; int thatRemaining = thatString.size() - thatOffset; int bytesToCompare = Math.min(thisRemaining, thatRemaining); // At least one of the offsets will be zero boolean stillEqual = (thisOffset == 0) ? thisString.equalsRange(thatString, thatOffset, bytesToCompare) : thatString.equalsRange(thisString, thisOffset, bytesToCompare); if (!stillEqual) { return false; } pos += bytesToCompare; if (pos >= totalLength) { if (pos == totalLength) { return true; } throw new IllegalStateException(); } // We always get to the end of at least one of the pieces if (bytesToCompare == thisRemaining) { // If reached end of this thisOffset = 0; thisString = thisIter.next(); } else { thisOffset += bytesToCompare; } if (bytesToCompare == thatRemaining) { // If reached end of that thatOffset = 0; thatString = thatIter.next(); } else { thatOffset += bytesToCompare; } } } @Override protected int partialHash(int h, int offset, int length) { int toIndex = offset + length; if (toIndex <= leftLength) { return left.partialHash(h, offset, length); } else if (offset >= leftLength) { return right.partialHash(h, offset - leftLength, length); } else { int leftLength = this.leftLength - offset; int leftPartial = left.partialHash(h, offset, leftLength); return right.partialHash(leftPartial, 0, length - leftLength); } } // ================================================================= // Input stream @Override public CodedInputStream newCodedInput() { // Passing along direct references to internal ByteBuffers can support more efficient parsing // via aliasing in CodedInputStream for users who wish to use it. // // Otherwise we force data copies, both in copying as an input stream and in buffering in the // CodedInputSteam. return CodedInputStream.newInstance(asReadOnlyByteBufferList(), /* bufferIsImmutable= */ true); } @Override public InputStream newInput() { return new RopeInputStream(); } /** * This class implements the balancing algorithm of BAP95. In the paper the authors use an array * to keep track of pieces, while here we use a stack. The tree is balanced by traversing subtrees * in left to right order, and the stack always contains the part of the string we've traversed so * far. * * <p>One surprising aspect of the algorithm is the result of balancing is not necessarily * balanced, though it is nearly balanced. For details, see BAP95. */ private static class Balancer { // Stack containing the part of the string, starting from the left, that // we've already traversed. The final string should be the equivalent of // concatenating the strings on the stack from bottom to top. private final ArrayDeque<ByteString> prefixesStack = new ArrayDeque<>(); private ByteString balance(ByteString left, ByteString right) { doBalance(left); doBalance(right); // Sweep stack to gather the result ByteString partialString = prefixesStack.pop(); while (!prefixesStack.isEmpty()) { ByteString newLeft = prefixesStack.pop(); partialString = new RopeByteString(newLeft, partialString); } // We should end up with a RopeByteString since at a minimum we will // create one from concatenating left and right return partialString; } private void doBalance(ByteString root) { // BAP95: Insert balanced subtrees whole. This means the result might not // be balanced, leading to repeated rebalancings on concatenate. However, // these rebalancings are shallow due to ignoring balanced subtrees, and // relatively few calls to insert() result. if (root.isBalanced()) { insert(root); } else if (root instanceof RopeByteString) { RopeByteString rbs = (RopeByteString) root; doBalance(rbs.left); doBalance(rbs.right); } else { throw new IllegalArgumentException( "Has a new type of ByteString been created? Found " + root.getClass()); } } /** * Push a string on the balance stack (BAP95). BAP95 uses an array and calls the elements in the * array 'bins'. We instead use a stack, so the 'bins' of lengths are represented by differences * between the elements of minLengthByDepth. * * <p>If the length bin for our string, and all shorter length bins, are empty, we just push it * on the stack. Otherwise, we need to start concatenating, putting the given string in the * "middle" and continuing until we land in an empty length bin that matches the length of our * concatenation. * * @param byteString string to place on the balance stack */ private void insert(ByteString byteString) { int depthBin = getDepthBinForLength(byteString.size()); int binEnd = minLength(depthBin + 1); // BAP95: Concatenate all trees occupying bins representing the length of // our new piece or of shorter pieces, to the extent that is possible. // The goal is to clear the bin which our piece belongs in, but that may // not be entirely possible if there aren't enough longer bins occupied. if (prefixesStack.isEmpty() || prefixesStack.peek().size() >= binEnd) { prefixesStack.push(byteString); } else { int binStart = minLength(depthBin); // Concatenate the subtrees of shorter length ByteString newTree = prefixesStack.pop(); while (!prefixesStack.isEmpty() && prefixesStack.peek().size() < binStart) { ByteString left = prefixesStack.pop(); newTree = new RopeByteString(left, newTree); } // Concatenate the given string newTree = new RopeByteString(newTree, byteString); // Continue concatenating until we land in an empty bin while (!prefixesStack.isEmpty()) { depthBin = getDepthBinForLength(newTree.size()); binEnd = minLength(depthBin + 1); if (prefixesStack.peek().size() < binEnd) { ByteString left = prefixesStack.pop(); newTree = new RopeByteString(left, newTree); } else { break; } } prefixesStack.push(newTree); } } private int getDepthBinForLength(int length) { int depth = Arrays.binarySearch(minLengthByDepth, length); if (depth < 0) { // It wasn't an exact match, so convert to the index of the containing // fragment, which is one less even than the insertion point. int insertionPoint = -(depth + 1); depth = insertionPoint - 1; } return depth; } } /** * This class is a continuable tree traversal, which keeps the state information which would exist * on the stack in a recursive traversal instead on a stack of "Bread Crumbs". The maximum depth * of the stack in this iterator is the same as the depth of the tree being traversed. * * <p>This iterator is used to implement {@link RopeByteString#equalsFragments(ByteString)}. */ private static final class PieceIterator implements Iterator<LeafByteString> { private final ArrayDeque<RopeByteString> breadCrumbs; private LeafByteString next; private PieceIterator(ByteString root) { if (root instanceof RopeByteString) { RopeByteString rbs = (RopeByteString) root; breadCrumbs = new ArrayDeque<>(rbs.getTreeDepth()); breadCrumbs.push(rbs); next = getLeafByLeft(rbs.left); } else { breadCrumbs = null; next = (LeafByteString) root; } } private LeafByteString getLeafByLeft(ByteString root) { ByteString pos = root; while (pos instanceof RopeByteString) { RopeByteString rbs = (RopeByteString) pos; breadCrumbs.push(rbs); pos = rbs.left; } return (LeafByteString) pos; } private LeafByteString getNextNonEmptyLeaf() { while (true) { // Almost always, we go through this loop exactly once. However, if // we discover an empty string in the rope, we toss it and try again. if (breadCrumbs == null || breadCrumbs.isEmpty()) { return null; } else { LeafByteString result = getLeafByLeft(breadCrumbs.pop().right); if (!result.isEmpty()) { return result; } } } } @Override public boolean hasNext() { return next != null; } /** * Returns the next item and advances one {@link com.google.protobuf.ByteString.LeafByteString}. * * @return next non-empty LeafByteString or {@code null} */ @Override public LeafByteString next() { if (next == null) { throw new NoSuchElementException(); } LeafByteString result = next; next = getNextNonEmptyLeaf(); return result; } @Override public void remove() { throw new UnsupportedOperationException(); } } // ================================================================= // Serializable private static final long serialVersionUID = 1L; Object writeReplace() { return ByteString.wrap(toByteArray()); } private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException { throw new InvalidObjectException("RopeByteStream instances are not to be serialized directly"); } /** This class is the {@link RopeByteString} equivalent for {@link ByteArrayInputStream}. */ private class RopeInputStream extends InputStream { // Iterates through the pieces of the rope private PieceIterator pieceIterator; // The current piece private LeafByteString currentPiece; // The size of the current piece private int currentPieceSize; // The index of the next byte to read in the current piece private int currentPieceIndex; // The offset of the start of the current piece in the rope byte string private int currentPieceOffsetInRope; // Offset in the buffer at which user called mark(); private int mark; public RopeInputStream() { initialize(); } /** * Reads up to {@code len} bytes of data into array {@code b}. * * <p>Note that {@link InputStream#read(byte[], int, int)} and {@link * ByteArrayInputStream#read(byte[], int, int)} behave inconsistently when reading 0 bytes at * EOF; the interface defines the return value to be 0 and the latter returns -1. We use the * latter behavior so that all ByteString streams are consistent. * * @return -1 if at EOF, otherwise the actual number of bytes read. */ @Override public int read(byte[] b, int offset, int length) { if (b == null) { throw new NullPointerException(); } else if (offset < 0 || length < 0 || length > b.length - offset) { throw new IndexOutOfBoundsException(); } int bytesRead = readSkipInternal(b, offset, length); if (bytesRead == 0 && (length > 0 || availableInternal() == 0)) { // Modeling ByteArrayInputStream.read(byte[], int, int) behavior noted above: // It's ok to read 0 bytes on purpose (length == 0) from a stream that isn't at EOF. // It's not ok to try to read bytes (even 0 bytes) from a stream that is at EOF. return -1; } else { return bytesRead; } } @Override public long skip(long length) { if (length < 0) { throw new IndexOutOfBoundsException(); } else if (length > Integer.MAX_VALUE) { length = Integer.MAX_VALUE; } return readSkipInternal(null, 0, (int) length); } /** * Internal implementation of read and skip. If b != null, then read the next {@code length} * bytes into the buffer {@code b} at offset {@code offset}. If b == null, then skip the next * {@code length} bytes. * * <p>This method assumes that all error checking has already happened. * * <p>Returns the actual number of bytes read or skipped. */ private int readSkipInternal(byte[] b, int offset, int length) { int bytesRemaining = length; while (bytesRemaining > 0) { advanceIfCurrentPieceFullyRead(); if (currentPiece == null) { break; } else { // Copy the bytes from this piece. int currentPieceRemaining = currentPieceSize - currentPieceIndex; int count = Math.min(currentPieceRemaining, bytesRemaining); if (b != null) { currentPiece.copyTo(b, currentPieceIndex, offset, count); offset += count; } currentPieceIndex += count; bytesRemaining -= count; } } // Return the number of bytes read. return length - bytesRemaining; } @Override public int read() throws IOException { advanceIfCurrentPieceFullyRead(); if (currentPiece == null) { return -1; } else { return currentPiece.byteAt(currentPieceIndex++) & 0xFF; } } @Override public int available() throws IOException { return availableInternal(); } @Override public boolean markSupported() { return true; } @Override public void mark(int readAheadLimit) { // Set the mark to our position in the byte string mark = currentPieceOffsetInRope + currentPieceIndex; } @Override public synchronized void reset() { // Just reinitialize and skip the specified number of bytes. initialize(); readSkipInternal(null, 0, mark); } /** Common initialization code used by both the constructor and reset() */ private void initialize() { pieceIterator = new PieceIterator(RopeByteString.this); currentPiece = pieceIterator.next(); currentPieceSize = currentPiece.size(); currentPieceIndex = 0; currentPieceOffsetInRope = 0; } /** * Skips to the next piece if we have read all the data in the current piece. Sets currentPiece * to null if we have reached the end of the input. */ private void advanceIfCurrentPieceFullyRead() { if (currentPiece != null && currentPieceIndex == currentPieceSize) { // Generally, we can only go through this loop at most once, since // empty strings can't end up in a rope. But better to test. currentPieceOffsetInRope += currentPieceSize; currentPieceIndex = 0; if (pieceIterator.hasNext()) { currentPiece = pieceIterator.next(); currentPieceSize = currentPiece.size(); } else { currentPiece = null; currentPieceSize = 0; } } } /** Computes the number of bytes still available to read. */ private int availableInternal() { int bytesRead = currentPieceOffsetInRope + currentPieceIndex; return RopeByteString.this.size() - bytesRead; } } }
protocolbuffers/protobuf
java/core/src/main/java/com/google/protobuf/RopeByteString.java
911
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.yoga; import com.facebook.yoga.annotations.DoNotStrip; import com.facebook.soloader.SoLoader; @DoNotStrip public class YogaNative { static { SoLoader.loadLibrary("yoga"); } // JNI methods that use Vanilla JNI // YGConfig related static native long jni_YGConfigNewJNI(); static native void jni_YGConfigFreeJNI(long nativePointer); static native void jni_YGConfigSetExperimentalFeatureEnabledJNI(long nativePointer, int feature, boolean enabled); static native void jni_YGConfigSetUseWebDefaultsJNI(long nativePointer, boolean useWebDefaults); static native void jni_YGConfigSetPointScaleFactorJNI(long nativePointer, float pixelsInPoint); static native void jni_YGConfigSetErrataJNI(long nativePointer, int errata); static native int jni_YGConfigGetErrataJNI(long nativePointer); static native void jni_YGConfigSetLoggerJNI(long nativePointer, YogaLogger logger); // YGNode related static native long jni_YGNodeNewJNI(); static native long jni_YGNodeNewWithConfigJNI(long configPointer); static native void jni_YGNodeFinalizeJNI(long nativePointer); static native void jni_YGNodeResetJNI(long nativePointer); static native void jni_YGNodeInsertChildJNI(long nativePointer, long childPointer, int index); static native void jni_YGNodeSwapChildJNI(long nativePointer, long childPointer, int index); static native void jni_YGNodeSetIsReferenceBaselineJNI(long nativePointer, boolean isReferenceBaseline); static native boolean jni_YGNodeIsReferenceBaselineJNI(long nativePointer); static native void jni_YGNodeRemoveAllChildrenJNI(long nativePointer); static native void jni_YGNodeRemoveChildJNI(long nativePointer, long childPointer); static native void jni_YGNodeCalculateLayoutJNI(long nativePointer, float width, float height, long[] nativePointers, YogaNodeJNIBase[] nodes); static native void jni_YGNodeMarkDirtyJNI(long nativePointer); static native boolean jni_YGNodeIsDirtyJNI(long nativePointer); static native void jni_YGNodeCopyStyleJNI(long dstNativePointer, long srcNativePointer); static native int jni_YGNodeStyleGetDirectionJNI(long nativePointer); static native void jni_YGNodeStyleSetDirectionJNI(long nativePointer, int direction); static native int jni_YGNodeStyleGetFlexDirectionJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexDirectionJNI(long nativePointer, int flexDirection); static native int jni_YGNodeStyleGetJustifyContentJNI(long nativePointer); static native void jni_YGNodeStyleSetJustifyContentJNI(long nativePointer, int justifyContent); static native int jni_YGNodeStyleGetAlignItemsJNI(long nativePointer); static native void jni_YGNodeStyleSetAlignItemsJNI(long nativePointer, int alignItems); static native int jni_YGNodeStyleGetAlignSelfJNI(long nativePointer); static native void jni_YGNodeStyleSetAlignSelfJNI(long nativePointer, int alignSelf); static native int jni_YGNodeStyleGetAlignContentJNI(long nativePointer); static native void jni_YGNodeStyleSetAlignContentJNI(long nativePointer, int alignContent); static native int jni_YGNodeStyleGetPositionTypeJNI(long nativePointer); static native void jni_YGNodeStyleSetPositionTypeJNI(long nativePointer, int positionType); static native int jni_YGNodeStyleGetFlexWrapJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexWrapJNI(long nativePointer, int wrapType); static native int jni_YGNodeStyleGetOverflowJNI(long nativePointer); static native void jni_YGNodeStyleSetOverflowJNI(long nativePointer, int overflow); static native int jni_YGNodeStyleGetDisplayJNI(long nativePointer); static native void jni_YGNodeStyleSetDisplayJNI(long nativePointer, int display); static native float jni_YGNodeStyleGetFlexJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexJNI(long nativePointer, float flex); static native float jni_YGNodeStyleGetFlexGrowJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexGrowJNI(long nativePointer, float flexGrow); static native float jni_YGNodeStyleGetFlexShrinkJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexShrinkJNI(long nativePointer, float flexShrink); static native long jni_YGNodeStyleGetFlexBasisJNI(long nativePointer); static native void jni_YGNodeStyleSetFlexBasisJNI(long nativePointer, float flexBasis); static native void jni_YGNodeStyleSetFlexBasisPercentJNI(long nativePointer, float percent); static native void jni_YGNodeStyleSetFlexBasisAutoJNI(long nativePointer); static native long jni_YGNodeStyleGetMarginJNI(long nativePointer, int edge); static native void jni_YGNodeStyleSetMarginJNI(long nativePointer, int edge, float margin); static native void jni_YGNodeStyleSetMarginPercentJNI(long nativePointer, int edge, float percent); static native void jni_YGNodeStyleSetMarginAutoJNI(long nativePointer, int edge); static native long jni_YGNodeStyleGetPaddingJNI(long nativePointer, int edge); static native void jni_YGNodeStyleSetPaddingJNI(long nativePointer, int edge, float padding); static native void jni_YGNodeStyleSetPaddingPercentJNI(long nativePointer, int edge, float percent); static native float jni_YGNodeStyleGetBorderJNI(long nativePointer, int edge); static native void jni_YGNodeStyleSetBorderJNI(long nativePointer, int edge, float border); static native long jni_YGNodeStyleGetPositionJNI(long nativePointer, int edge); static native void jni_YGNodeStyleSetPositionJNI(long nativePointer, int edge, float position); static native void jni_YGNodeStyleSetPositionPercentJNI(long nativePointer, int edge, float percent); static native long jni_YGNodeStyleGetWidthJNI(long nativePointer); static native void jni_YGNodeStyleSetWidthJNI(long nativePointer, float width); static native void jni_YGNodeStyleSetWidthPercentJNI(long nativePointer, float percent); static native void jni_YGNodeStyleSetWidthAutoJNI(long nativePointer); static native long jni_YGNodeStyleGetHeightJNI(long nativePointer); static native void jni_YGNodeStyleSetHeightJNI(long nativePointer, float height); static native void jni_YGNodeStyleSetHeightPercentJNI(long nativePointer, float percent); static native void jni_YGNodeStyleSetHeightAutoJNI(long nativePointer); static native long jni_YGNodeStyleGetMinWidthJNI(long nativePointer); static native void jni_YGNodeStyleSetMinWidthJNI(long nativePointer, float minWidth); static native void jni_YGNodeStyleSetMinWidthPercentJNI(long nativePointer, float percent); static native long jni_YGNodeStyleGetMinHeightJNI(long nativePointer); static native void jni_YGNodeStyleSetMinHeightJNI(long nativePointer, float minHeight); static native void jni_YGNodeStyleSetMinHeightPercentJNI(long nativePointer, float percent); static native long jni_YGNodeStyleGetMaxWidthJNI(long nativePointer); static native void jni_YGNodeStyleSetMaxWidthJNI(long nativePointer, float maxWidth); static native void jni_YGNodeStyleSetMaxWidthPercentJNI(long nativePointer, float percent); static native long jni_YGNodeStyleGetMaxHeightJNI(long nativePointer); static native void jni_YGNodeStyleSetMaxHeightJNI(long nativePointer, float maxheight); static native void jni_YGNodeStyleSetMaxHeightPercentJNI(long nativePointer, float percent); static native float jni_YGNodeStyleGetAspectRatioJNI(long nativePointer); static native void jni_YGNodeStyleSetAspectRatioJNI(long nativePointer, float aspectRatio); static native float jni_YGNodeStyleGetGapJNI(long nativePointer, int gutter); static native void jni_YGNodeStyleSetGapJNI(long nativePointer, int gutter, float gapLength); static native void jni_YGNodeStyleSetGapPercentJNI(long nativePointer, int gutter, float gapLength); static native void jni_YGNodeSetHasMeasureFuncJNI(long nativePointer, boolean hasMeasureFunc); static native void jni_YGNodeSetHasBaselineFuncJNI(long nativePointer, boolean hasMeasureFunc); static native void jni_YGNodeSetStyleInputsJNI(long nativePointer, float[] styleInputsArray, int size); static native long jni_YGNodeCloneJNI(long nativePointer); static native void jni_YGNodeSetAlwaysFormsContainingBlockJNI(long nativePointer, boolean alwaysFormContainingBlock); }
facebook/react-native
packages/react-native/ReactAndroid/src/main/java/com/facebook/yoga/YogaNative.java
912
/** * The most relevant definition for this problem is 2a: An integer g > 1 is said to be prime if and only * if its only positive divisors are itself and one (otherwise it is said to be composite). For example, the * number 21 is composite; the number 23 is prime. Note that the decompositon of a positive number g * into its prime factors, i.e., * g = f1 × f2 × · · · × fn * is unique if we assert that fi > 1 for all i and fi ≤ fj for i < j. * One interesting class of prime numbers are the so-called Mersenne primes which are of the form * 2 * p − 1. Euler proved that 2 * 31 − 1 is prime in 1772 — all without the aid of a computer. * Input * The input will consist of a sequence of numbers. Each line of input will contain one number g in the * range −2 * 31 < g < 2 * 31, but different of -1 and 1. The end of input will be indicated by an input line * having a value of zero. * Output * For each line of input, your program should print a line of output consisting of the input number and * its prime factors. For an input number g > 0, g = f1 × f2 × · · · × fn, where each fi * is a prime number * greater than unity (with fi ≤ fj for i < j), the format of the output line should be * g = f1 x f2 x . . . x fn * When g < 0, if | g |= f1 × f2 × · · · × fn, the format of the output line should be * g = -1 x f1 x f2 x . . . x fn * Sample Input * -190 * -191 * -192 * -193 * -194 * 195 * 196 * 197 * 198 * 199 * 200 * 0 * Sample Output * -190 = -1 x 2 x 5 x 19 * -191 = -1 x 191 * -192 = -1 x 2 x 2 x 2 x 2 x 2 x 2 x 3 * -193 = -1 x 193 * -194 = -1 x 2 x 97 * 195 = 3 x 5 x 13 * 196 = 2 x 2 x 7 x 7 * 197 = 197 * 198 = 2 x 3 x 3 x 11 * 199 = 199 * 200 = 2 x 2 x 2 x 5 x 5 */ //https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=524 import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Scanner; public class PrimeFactors { public static void main(String[] args) { Scanner input = new Scanner(System.in); int number = input.nextInt(); boolean[] isPrime = generatePrimeNumbers(); while (number != 0) { boolean isNegative = false; if (number < 0) { isNegative = true; number = Math.abs(number); } int originalNumber = number; formatOutput(originalNumber, sieveOfEratosthenes(isPrime, originalNumber), isNegative); number = input.nextInt(); } } public static List<Integer> sieveOfEratosthenes(boolean[] isPrime, int number) { List<Integer> primeFactors = new ArrayList<Integer>(); int squareRootOfOriginalNumber = (int) Math.sqrt(number); for (int i = 2; i <= squareRootOfOriginalNumber; i++) { if (isPrime[i]) { while (number % i == 0) { primeFactors.add(i); number = number / i; } } } if (number != 1) { primeFactors.add(number); } return primeFactors; } static void formatOutput(int number, List<Integer> primeFactors, boolean isNegative) { if (isNegative) { number *= -1; } StringBuilder output = new StringBuilder(number + " = "); int numberOfPrimeFactors = primeFactors.size(); if (numberOfPrimeFactors == 1) { if (isNegative) { output.append("-1 x " + (number * (-1))); } else { output.append(number); } } else { Collections.sort(primeFactors); if (isNegative) { output.append("-1 x "); } for (int i = 0; i < numberOfPrimeFactors - 1; i++) { output.append(primeFactors.get(i) + " x "); } output.append(primeFactors.get(numberOfPrimeFactors - 1)); } System.out.println(output); } static boolean[] generatePrimeNumbers() { int number = (int) Math.sqrt(Integer.MAX_VALUE); boolean[] isPrime = new boolean[number + 1]; for (int i = 2; i < number + 1; i++) { isPrime[i] = true; } for (int factor = 2; factor * factor < number + 1; factor++) { if (isPrime[factor]) { for (int j = factor; j * factor < number + 1; j++) { isPrime[j * factor] = false; } } } return isPrime; } }
kdn251/interviews
uva/PrimeFactors.java
914
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; public class Pdf { private final String base64EncodedPdf; public Pdf(String base64EncodedPdf) { this.base64EncodedPdf = base64EncodedPdf; } public String getContent() { return base64EncodedPdf; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/Pdf.java
915
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.testing; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.base.Ticker; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.time.Duration; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * A Ticker whose value can be advanced programmatically in test. * * <p>The ticker can be configured so that the time is incremented whenever {@link #read} is called: * see {@link #setAutoIncrementStep}. * * <p>This class is thread-safe. * * @author Jige Yu * @since 10.0 */ @ElementTypesAreNonnullByDefault @GwtCompatible public class FakeTicker extends Ticker { private final AtomicLong nanos = new AtomicLong(); private volatile long autoIncrementStepNanos; /** Advances the ticker value by {@code time} in {@code timeUnit}. */ @SuppressWarnings("GoodTime") // should accept a java.time.Duration @CanIgnoreReturnValue public FakeTicker advance(long time, TimeUnit timeUnit) { return advance(timeUnit.toNanos(time)); } /** Advances the ticker value by {@code nanoseconds}. */ @SuppressWarnings("GoodTime") // should accept a java.time.Duration @CanIgnoreReturnValue public FakeTicker advance(long nanoseconds) { nanos.addAndGet(nanoseconds); return this; } /** * Advances the ticker value by {@code duration}. * * @since 28.0 */ @GwtIncompatible @J2ktIncompatible @CanIgnoreReturnValue @SuppressWarnings("Java7ApiChecker") // guava-android can rely on library desugaring now. public FakeTicker advance(Duration duration) { return advance(duration.toNanos()); } /** * Sets the increment applied to the ticker whenever it is queried. * * <p>The default behavior is to auto increment by zero. i.e: The ticker is left unchanged when * queried. */ @SuppressWarnings("GoodTime") // should accept a java.time.Duration @CanIgnoreReturnValue public FakeTicker setAutoIncrementStep(long autoIncrementStep, TimeUnit timeUnit) { checkArgument(autoIncrementStep >= 0, "May not auto-increment by a negative amount"); this.autoIncrementStepNanos = timeUnit.toNanos(autoIncrementStep); return this; } /** * Sets the increment applied to the ticker whenever it is queried. * * <p>The default behavior is to auto increment by zero. i.e: The ticker is left unchanged when * queried. * * @since 28.0 */ @GwtIncompatible @J2ktIncompatible @CanIgnoreReturnValue @SuppressWarnings("Java7ApiChecker") // guava-android can rely on library desugaring now. public FakeTicker setAutoIncrementStep(Duration autoIncrementStep) { return setAutoIncrementStep(autoIncrementStep.toNanos(), TimeUnit.NANOSECONDS); } @Override public long read() { return nanos.getAndAdd(autoIncrementStepNanos); } }
google/guava
guava-testlib/src/com/google/common/testing/FakeTicker.java
917
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server; import java.util.Optional; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.message.FetchResponseData.PartitionData; /** * This interface defines the APIs needed to handle any state transitions related to tiering */ public interface TierStateMachine { /** * Start the tier state machine for the provided topic partition. * * @param topicPartition the topic partition * @param currentFetchState the current PartitionFetchState which will * be used to derive the return value * @param fetchPartitionData the data from the fetch response that returned the offset moved to tiered storage error * * @return the new PartitionFetchState after the successful start of the * tier state machine */ PartitionFetchState start(TopicPartition topicPartition, PartitionFetchState currentFetchState, PartitionData fetchPartitionData) throws Exception; /** * Optionally advance the state of the tier state machine, based on the * current PartitionFetchState. The decision to advance the tier * state machine is implementation specific. * * @param topicPartition the topic partition * @param currentFetchState the current PartitionFetchState which will * be used to derive the return value * * @return the new PartitionFetchState if the tier state machine was advanced, otherwise, return the currentFetchState */ Optional<PartitionFetchState> maybeAdvanceState(TopicPartition topicPartition, PartitionFetchState currentFetchState); }
apache/kafka
core/src/main/java/kafka/server/TierStateMachine.java
918
/* * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ /** * Base reactive classes: {@link io.reactivex.rxjava3.core.Flowable}, {@link io.reactivex.rxjava3.core.Observable}, * {@link io.reactivex.rxjava3.core.Single}, {@link io.reactivex.rxjava3.core.Maybe} and * {@link io.reactivex.rxjava3.core.Completable}; base reactive consumers; * other common base interfaces. * * <p>A library that enables subscribing to and composing asynchronous events and * callbacks.</p> * <p>The Flowable/Subscriber, Observable/Observer, Single/SingleObserver and * Completable/CompletableObserver interfaces and associated operators (in * the {@code io.reactivex.internal.operators} package) are inspired by the * Reactive Rx library in Microsoft .NET but designed and implemented on * the more advanced Reactive-Streams ( http://www.reactivestreams.org ) principles.</p> * <p> * More information can be found at <a * href="http://msdn.microsoft.com/en-us/data/gg577609">http://msdn.microsoft.com/en-us/data/gg577609</a>. * </p> * * * <p>Compared with the Microsoft implementation: * <ul> * <li>Observable == IObservable (base type)</li> * <li>Observer == IObserver (event consumer)</li> * <li>Disposable == IDisposable (resource/cancellation management)</li> * <li>Observable == Observable (factory methods)</li> * <li>Flowable == IAsyncEnumerable (backpressure)</li> * <li>Subscriber == IAsyncEnumerator</li> * </ul> * The Single and Completable reactive base types have no equivalent in Rx.NET as of 3.x. * * <p>Services which intend on exposing data asynchronously and wish * to allow reactive processing and composition can implement the * {@link io.reactivex.rxjava3.core.Flowable}, {@link io.reactivex.rxjava3.core.Observable}, {@link io.reactivex.rxjava3.core.Single}, * {@link io.reactivex.rxjava3.core.Maybe} or {@link io.reactivex.rxjava3.core.Completable} class which then allow * consumers to subscribe to them and receive events.</p> * <p>Usage examples can be found on the {@link io.reactivex.rxjava3.core.Flowable}/{@link io.reactivex.rxjava3.core.Observable} and {@link org.reactivestreams.Subscriber} classes.</p> */ package io.reactivex.rxjava3.core;
ReactiveX/RxJava
src/main/java/io/reactivex/rxjava3/core/package-info.java
919
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search; import java.io.IOException; /** * Manages loading information about nested documents for a single index segment */ public interface LeafNestedDocuments { /** * Advance to a specific doc, and return its NestedIdentity, or {@code null} if not a child */ SearchHit.NestedIdentity advance(int doc) throws IOException; /** * The current doc */ int doc(); /** * The ultimate parent of the current doc */ int rootDoc(); /** * The NestedIdentity of the current doc */ SearchHit.NestedIdentity nestedIdentity(); /** * An implementation of LeafNestedDocuments for use when there are no nested field mappers */ LeafNestedDocuments NO_NESTED_MAPPERS = new LeafNestedDocuments() { @Override public SearchHit.NestedIdentity advance(int doc) { return null; } @Override public int doc() { throw new UnsupportedOperationException(); } @Override public int rootDoc() { throw new UnsupportedOperationException(); } @Override public SearchHit.NestedIdentity nestedIdentity() { throw new UnsupportedOperationException(); } }; }
elastic/elasticsearch
server/src/main/java/org/elasticsearch/search/LeafNestedDocuments.java
920
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.awt.GraphicsEnvironment; import java.io.*; import java.text.ParseException; import java.util.List; import javax.swing.JFileChooser; import ghidra.launch.*; import ghidra.launch.JavaFinder.JavaFilter; /** * Tool that helps gather information needed to launch Ghidra/GhidraServer. This is intended * to be a helper for the launch scripts so that most of the "heavy-lifting" can be done in Java * rather than in OS-specific scripts. */ public class LaunchSupport { private static final int EXIT_SUCCESS = 0; private static final int EXIT_FAILURE = 1; /** * {@link LaunchSupport} entry point. Uses standard exit codes to tell the user if * the desired operation succeeded for failed. * * @param args [INSTALL_DIR] [-java_home | -jdk_home | -vmargs] [-ask | -save] * <ul> * <li><b>-java_home: </b> Get Java home (JDK or JRE)</li> * <li><b>-jdk_home: </b> Get Java home (JDK only)</li> * <li><b>-vmargs: </b> Get JVM arguments</li> * <li><b>-ask: </b> Interactively ask the user to choose a Java home</li> * <li><b>-save: </b> Save Java home to file for future use</li> * </ul> */ public static void main(String[] args) { int exitCode = EXIT_FAILURE; // failure by default // Validate command line arguments if (args.length < 2 || args.length > 4) { System.err.println("LaunchSupport expected 2 to 4 arguments but got " + args.length); System.exit(exitCode); } // Parse command line arguments String installDirPath = args[0]; String mode = args[1]; boolean ask = false; boolean save = false; for (int i = 2; i < args.length; i++) { if (args[i].equals("-ask")) { ask = true; } else if (args[i].equals("-save")) { save = true; } else { System.err.println("LaunchSupport received illegal argument: " + args[i]); System.exit(exitCode); } } try { File installDir = new File(installDirPath).getCanonicalFile(); // change relative path to absolute JavaConfig javaConfig = new JavaConfig(installDir); JavaFinder javaFinder = JavaFinder.create(); // Pass control to a mode-specific handler switch (mode.toLowerCase()) { case "-java_home": exitCode = handleJavaHome(javaConfig, javaFinder, JavaFilter.ANY, ask, save); break; case "-jdk_home": exitCode = handleJavaHome(javaConfig, javaFinder, JavaFilter.JDK_ONLY, ask, save); break; case "-vmargs": exitCode = handleVmArgs(javaConfig); break; default: System.err.println("LaunchSupport received illegal argument: " + mode); break; } } catch (Exception e) { System.err.println(e.getMessage()); } System.exit(exitCode); } /** * Handles figuring out a Java home directory to use for the launch. If it is successfully * determined, an exit code that indicates success is returned. * * @param javaConfig The Java configuration that defines what we support. * @param javaFinder The Java finder. * @param javaFilter A filter used to restrict what kind of Java installations we search for. * @param ask True to interact with the user to they can specify a Java home directory. * False if the Java home directory should be searched for and output on STDOUT once * discovered. * @param save True if the determined Java home directory should get saved to a file. * @return A suggested exit code based on whether or not a Java home directory was * successfully determined. * @throws IOException if there was a disk-related problem. */ private static int handleJavaHome(JavaConfig javaConfig, JavaFinder javaFinder, JavaFilter javaFilter, boolean ask, boolean save) throws IOException { if (ask) { return askJavaHome(javaConfig, javaFinder, javaFilter); } return findJavaHome(javaConfig, javaFinder, javaFilter, save); } /** * Handles finding a Java home directory to use for the launch. If one is successfully * found, its path is printed to STDOUT and an exit code that indicates success is * returned. Otherwise, nothing is printed to STDOUT and an error exit code is returned. * * @param javaConfig The Java configuration that defines what we support. * @param javaFinder The Java finder. * @param javaFilter A filter used to restrict what kind of Java installations we search for. * @param save True if the determined Java home directory should get saved to a file. * @return A suggested exit code based on whether or not a supported Java home directory was * successfully determined. * @throws IOException if there was a problem saving the java home to disk. */ private static int findJavaHome(JavaConfig javaConfig, JavaFinder javaFinder, JavaFilter javaFilter, boolean save) throws IOException { File javaHomeDir; LaunchProperties launchProperties = javaConfig.getLaunchProperties(); // PRIORITY 1: JAVA_HOME_OVERRIDE property // If a valid java home override is specified in the launch properties, use that. // Someone presumably wants to force that specific version. javaHomeDir = launchProperties.getJavaHomeOverride(); if (javaConfig.isSupportedJavaHomeDir(javaHomeDir, javaFilter)) { if (save) { javaConfig.saveJavaHome(javaHomeDir); } System.out.println(javaHomeDir); return EXIT_SUCCESS; } // PRIORITY 2: Java on PATH // This program (LaunchSupport) was started with the Java on the PATH. Try to use this one // next because it is most likely the one that is being upgraded on the user's system. javaHomeDir = javaFinder.findSupportedJavaHomeFromCurrentJavaHome(javaConfig, javaFilter); if (javaHomeDir != null) { if (save) { javaConfig.saveJavaHome(javaHomeDir); } System.out.println(javaHomeDir); return EXIT_SUCCESS; } // PRIORITY 3: Last used Java // Check to see if a prior launch resulted in that Java being saved. If so, try to use that. javaHomeDir = javaConfig.getSavedJavaHome(); if (javaConfig.isSupportedJavaHomeDir(javaHomeDir, javaFilter)) { System.out.println(javaHomeDir); return EXIT_SUCCESS; } // PRIORITY 4: Find all supported Java installations, and use the newest. List<File> javaHomeDirs = javaFinder.findSupportedJavaHomeFromInstallations(javaConfig, javaFilter); if (!javaHomeDirs.isEmpty()) { javaHomeDir = javaHomeDirs.iterator().next(); if (save) { javaConfig.saveJavaHome(javaHomeDir); } System.out.println(javaHomeDir); return EXIT_SUCCESS; } return EXIT_FAILURE; } /** * Handles interacting with the user to choose a Java home directory to use for the launch. * If a valid Java home directory was successfully determined, it is saved to the the user's * Java home save file, and an exit code that indicates success is returned. * * @param javaConfig The Java configuration that defines what we support. * @param javaFinder The Java finder. * @param javaFilter A filter used to restrict what kind of Java installations we search for. * * @return A suggested exit code based on whether or not a valid Java home directory was * successfully chosen. * @throws IOException if there was a problem interacting with the user, or saving the java * home location to disk. */ private static int askJavaHome(JavaConfig javaConfig, JavaFinder javaFinder, JavaFilter javaFilter) throws IOException { String javaName = javaFilter.equals(JavaFilter.JDK_ONLY) ? "JDK" : "Java"; String javaRange; int min = javaConfig.getMinSupportedJava(); int max = javaConfig.getMaxSupportedJava(); if (min == max) { javaRange = min + ""; } else if (max == 0) { javaRange = min + "+"; } else { javaRange = min + "-" + max; } System.out.println("******************************************************************"); System.out.println( javaName + " " + javaRange + " (" + javaConfig.getSupportedArchitecture() + "-bit) could not be found and must be manually chosen!"); System.out.println("******************************************************************"); File javaHomeDir = null; BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); while (true) { boolean supportsDialog = !GraphicsEnvironment.isHeadless() && !(javaFinder instanceof MacJavaFinder); System.out.print("Enter path to " + javaName + " home directory"); System.out.print(supportsDialog ? " (ENTER for dialog): " : ": "); String line = in.readLine().trim(); if (supportsDialog && line.isEmpty()) { System.out.println("Opening selection dialog..."); JFileChooser chooser = new JFileChooser(); chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); chooser.setDialogTitle("Choose a " + javaName + " home directory"); if (chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { javaHomeDir = chooser.getSelectedFile(); } } else if (!line.isEmpty()) { javaHomeDir = new File(line); } else { continue; } try { JavaVersion javaVersion = javaConfig.getJavaVersion(javaHomeDir, javaFilter); if (javaConfig.isJavaVersionSupported(javaVersion)) { break; } System.out.println( "Java version " + javaVersion + " is outside of supported range: [" + javaRange + " " + javaConfig.getSupportedArchitecture() + "-bit]"); } catch (FileNotFoundException e) { System.out.println( "Not a valid " + javaName + " home directory. " + e.getMessage() + "!"); } catch (IOException | ParseException e) { System.out.println("Failed to verify Java version. " + e.getMessage() + "!"); } } File javaHomeSaveFile = javaConfig.saveJavaHome(javaHomeDir); System.out.println("Saved changes to " + javaHomeSaveFile); return EXIT_SUCCESS; } /** * Handles getting the VM arguments. If they are successfully determined, they are printed * to STDOUT as a new-line delimited string that can be parsed and added to the command line, * and an exit code that indicates success is returned. * @param javaConfig The Java configuration that defines what we support. * @return A suggested exit code based on whether or not the VM arguments were successfully * gotten. */ private static int handleVmArgs(JavaConfig javaConfig) { if (javaConfig.getLaunchProperties() == null) { System.out.println("Launch properties file was not specified!"); return EXIT_FAILURE; } // Force newline style to make cross-platform parsing consistent javaConfig.getLaunchProperties().getVmArgList().forEach(e -> System.out.print(e + "\r\n")); return EXIT_SUCCESS; } }
NationalSecurityAgency/ghidra
GhidraBuild/LaunchSupport/src/main/java/LaunchSupport.java
921
/* * Copyright 2010-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.kotlin.cli.common; import com.intellij.openapi.util.SystemInfo; import kotlin.jvm.JvmClassMappingKt; import kotlin.reflect.KCallable; import kotlin.reflect.KClass; import kotlin.reflect.KProperty1; import kotlin.reflect.jvm.ReflectJvmMapping; import kotlin.text.StringsKt; import org.jetbrains.annotations.NotNull; import org.jetbrains.kotlin.cli.common.arguments.Argument; import org.jetbrains.kotlin.cli.common.arguments.CommonToolArguments; import org.jetbrains.kotlin.cli.common.arguments.ParseCommandLineArgumentsKt; import org.jetbrains.kotlin.cli.common.arguments.PreprocessCommandLineArgumentsKt; import java.lang.reflect.Field; public class Usage { public static final String BAT_DELIMITER_CHARACTERS_NOTE = "Note: on Windows, arguments that contain delimiter characters (whitespace, =, ;, ,) need to be surrounded with double quotes (\")."; // The magic number 29 corresponds to the similar padding width in javac and scalac command line compilers private static final int OPTION_NAME_PADDING_WIDTH = 29; @NotNull public static <A extends CommonToolArguments> String render(@NotNull CLITool<A> tool, @NotNull A arguments) { boolean extraHelp = arguments.getExtraHelp(); StringBuilder sb = new StringBuilder(); appendln(sb, "Usage: " + tool.executableScriptFileName() + " <options> <source files>"); appendln(sb, "where " + (extraHelp ? "advanced" : "possible") + " options include:"); KClass<? extends CommonToolArguments> kClass = JvmClassMappingKt.getKotlinClass(arguments.getClass()); for (KCallable<?> callable : kClass.getMembers()) { if (!(callable instanceof KProperty1)) continue; propertyUsage(sb, (KProperty1<?, ?>) callable, extraHelp); } if (extraHelp) { appendln(sb, ""); appendln(sb, "Advanced options are non-standard and may be changed or removed without any notice."); } else { renderOptionJUsage(sb); renderArgfileUsage(sb); } if (SystemInfo.isWindows) { appendln(sb, ""); appendln(sb, BAT_DELIMITER_CHARACTERS_NOTE); } if (!extraHelp && tool instanceof CLICompiler<?>) { appendln(sb, ""); appendln(sb, "For details, see https://kotl.in/cli"); } return sb.toString(); } private static void propertyUsage(@NotNull StringBuilder sb, @NotNull KProperty1<?, ?> property, boolean extraHelp) { Field field = ReflectJvmMapping.getJavaField(property); Argument argument = field.getAnnotation(Argument.class); if (argument == null) return; if (ParseCommandLineArgumentsKt.isInternal(argument)) return; if (extraHelp != ParseCommandLineArgumentsKt.isAdvanced(argument)) return; int startLength = sb.length(); sb.append(" "); sb.append(argument.value()); if (!argument.shortName().isEmpty()) { sb.append(" ("); sb.append(argument.shortName()); sb.append(")"); } if (!argument.valueDescription().isEmpty()) { sb.append(ParseCommandLineArgumentsKt.isAdvanced(argument) ? "=" : " "); sb.append(argument.valueDescription()); } int margin = startLength + OPTION_NAME_PADDING_WIDTH - 1; if (sb.length() >= margin + 5) { // Break the line if it's too long sb.append("\n"); margin += sb.length() - startLength; } while (sb.length() < margin) { sb.append(" "); } sb.append(" "); appendln(sb, argument.description().replace("\n", "\n" + StringsKt.repeat(" ", OPTION_NAME_PADDING_WIDTH))); } private static void renderOptionJUsage(@NotNull StringBuilder sb) { int descriptionStart = sb.length() + OPTION_NAME_PADDING_WIDTH; sb.append(" -J<option>"); while (sb.length() < descriptionStart) { sb.append(" "); } appendln(sb, "Pass an option directly to JVM."); } private static void renderArgfileUsage(@NotNull StringBuilder sb) { int descriptionStart = sb.length() + OPTION_NAME_PADDING_WIDTH; sb.append(" "); sb.append(PreprocessCommandLineArgumentsKt.ARGFILE_ARGUMENT); sb.append("<argfile>"); while (sb.length() < descriptionStart) { sb.append(" "); } appendln(sb, "Read compiler arguments and file paths from the given file."); } private static void appendln(@NotNull StringBuilder sb, @NotNull String string) { sb.append(string); sb.append('\n'); } }
JetBrains/kotlin
compiler/cli/src/org/jetbrains/kotlin/cli/common/Usage.java
922
package jadx.api; import java.io.Closeable; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.function.Predicate; import java.util.stream.Collectors; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jadx.api.metadata.ICodeAnnotation; import jadx.api.metadata.ICodeNodeRef; import jadx.api.metadata.annotations.NodeDeclareRef; import jadx.api.metadata.annotations.VarNode; import jadx.api.metadata.annotations.VarRef; import jadx.api.plugins.CustomResourcesLoader; import jadx.api.plugins.JadxPlugin; import jadx.api.plugins.events.IJadxEvents; import jadx.api.plugins.input.ICodeLoader; import jadx.api.plugins.input.JadxCodeInput; import jadx.api.plugins.pass.JadxPass; import jadx.api.plugins.pass.types.JadxAfterLoadPass; import jadx.api.plugins.pass.types.JadxPassType; import jadx.api.utils.tasks.ITaskExecutor; import jadx.core.Jadx; import jadx.core.dex.attributes.AFlag; import jadx.core.dex.nodes.ClassNode; import jadx.core.dex.nodes.FieldNode; import jadx.core.dex.nodes.MethodNode; import jadx.core.dex.nodes.PackageNode; import jadx.core.dex.nodes.RootNode; import jadx.core.dex.visitors.SaveCode; import jadx.core.export.ExportGradleTask; import jadx.core.plugins.JadxPluginManager; import jadx.core.plugins.PluginContext; import jadx.core.plugins.events.JadxEventsImpl; import jadx.core.utils.DecompilerScheduler; import jadx.core.utils.Utils; import jadx.core.utils.exceptions.JadxRuntimeException; import jadx.core.utils.files.FileUtils; import jadx.core.utils.tasks.TaskExecutor; import jadx.core.xmlgen.BinaryXMLParser; import jadx.core.xmlgen.ResourcesSaver; /** * Jadx API usage example: * * <pre> * <code> * * JadxArgs args = new JadxArgs(); * args.getInputFiles().add(new File("test.apk")); * args.setOutDir(new File("jadx-test-output")); * try (JadxDecompiler jadx = new JadxDecompiler(args)) { * jadx.load(); * jadx.save(); * } * </code> * </pre> * <p> * Instead of 'save()' you can iterate over decompiled classes: * * <pre> * <code> * * for(JavaClass cls : jadx.getClasses()) { * System.out.println(cls.getCode()); * } * </code> * </pre> */ public final class JadxDecompiler implements Closeable { private static final Logger LOG = LoggerFactory.getLogger(JadxDecompiler.class); private final JadxArgs args; private final JadxPluginManager pluginManager = new JadxPluginManager(this); private final List<ICodeLoader> loadedInputs = new ArrayList<>(); private RootNode root; private List<JavaClass> classes; private List<ResourceFile> resources; private BinaryXMLParser binaryXmlParser; private final IDecompileScheduler decompileScheduler = new DecompilerScheduler(); private final JadxEventsImpl events = new JadxEventsImpl(); private final ResourcesLoader resourcesLoader = new ResourcesLoader(this); private final List<ICodeLoader> customCodeLoaders = new ArrayList<>(); private final List<CustomResourcesLoader> customResourcesLoaders = new ArrayList<>(); private final Map<JadxPassType, List<JadxPass>> customPasses = new HashMap<>(); public JadxDecompiler() { this(new JadxArgs()); } public JadxDecompiler(JadxArgs args) { this.args = args; } public void load() { reset(); JadxArgsValidator.validate(this); LOG.info("loading ..."); loadPlugins(); loadInputFiles(); root = new RootNode(args); root.init(); root.setDecompilerRef(this); root.mergePasses(customPasses); root.loadClasses(loadedInputs); root.initClassPath(); root.loadResources(resourcesLoader, getResources()); root.runPreDecompileStage(); root.initPasses(); loadFinished(); } public void reloadPasses() { LOG.info("reloading (passes only) ..."); customPasses.clear(); root.resetPasses(); events.reset(); loadPlugins(); root.mergePasses(customPasses); root.restartVisitors(); root.initPasses(); loadFinished(); } private void loadInputFiles() { loadedInputs.clear(); List<Path> inputPaths = Utils.collectionMap(args.getInputFiles(), File::toPath); List<Path> inputFiles = FileUtils.expandDirs(inputPaths); long start = System.currentTimeMillis(); for (PluginContext plugin : pluginManager.getResolvedPluginContexts()) { for (JadxCodeInput codeLoader : plugin.getCodeInputs()) { try { ICodeLoader loader = codeLoader.loadFiles(inputFiles); if (loader != null && !loader.isEmpty()) { loadedInputs.add(loader); } } catch (Exception e) { throw new JadxRuntimeException("Failed to load code for plugin: " + plugin, e); } } } loadedInputs.addAll(customCodeLoaders); if (LOG.isDebugEnabled()) { LOG.debug("Loaded using {} inputs plugin in {} ms", loadedInputs.size(), System.currentTimeMillis() - start); } } private void reset() { root = null; classes = null; resources = null; binaryXmlParser = null; events.reset(); } @Override public void close() { reset(); closeInputs(); closeLoaders(); args.close(); } private void closeInputs() { loadedInputs.forEach(load -> { try { load.close(); } catch (Exception e) { LOG.error("Failed to close input", e); } }); loadedInputs.clear(); } private void closeLoaders() { for (CustomResourcesLoader resourcesLoader : customResourcesLoaders) { try { resourcesLoader.close(); } catch (Exception e) { LOG.error("Failed to close resource loader: " + resourcesLoader, e); } } customResourcesLoaders.clear(); } private void loadPlugins() { pluginManager.providesSuggestion("java-input", args.isUseDxInput() ? "java-convert" : "java-input"); pluginManager.load(args.getPluginLoader()); if (LOG.isDebugEnabled()) { LOG.debug("Resolved plugins: {}", pluginManager.getResolvedPluginContexts()); } pluginManager.initResolved(); if (LOG.isDebugEnabled()) { List<String> passes = customPasses.values().stream().flatMap(Collection::stream) .map(p -> p.getInfo().getName()).collect(Collectors.toList()); LOG.debug("Loaded custom passes: {} {}", passes.size(), passes); } } private void loadFinished() { LOG.debug("Load finished"); List<JadxPass> list = customPasses.get(JadxAfterLoadPass.TYPE); if (list != null) { for (JadxPass pass : list) { ((JadxAfterLoadPass) pass).init(this); } } } @SuppressWarnings("unused") public void registerPlugin(JadxPlugin plugin) { pluginManager.register(plugin); } public static String getVersion() { return Jadx.getVersion(); } public void save() { save(!args.isSkipSources(), !args.isSkipResources()); } public interface ProgressListener { void progress(long done, long total); } @SuppressWarnings("BusyWait") public void save(int intervalInMillis, ProgressListener listener) { try { ITaskExecutor tasks = getSaveTaskExecutor(); tasks.execute(); long total = tasks.getTasksCount(); while (tasks.isRunning()) { listener.progress(tasks.getProgress(), total); Thread.sleep(intervalInMillis); } } catch (InterruptedException e) { LOG.error("Save interrupted", e); Thread.currentThread().interrupt(); } } public void saveSources() { save(true, false); } public void saveResources() { save(false, true); } private void save(boolean saveSources, boolean saveResources) { ITaskExecutor executor = getSaveTasks(saveSources, saveResources); executor.execute(); executor.awaitTermination(); } public ITaskExecutor getSaveTaskExecutor() { return getSaveTasks(!args.isSkipSources(), !args.isSkipResources()); } @Deprecated(forRemoval = true) public ExecutorService getSaveExecutor() { ITaskExecutor executor = getSaveTaskExecutor(); executor.execute(); return executor.getInternalExecutor(); } @Deprecated(forRemoval = true) public List<Runnable> getSaveTasks() { return Collections.singletonList(this::save); } private TaskExecutor getSaveTasks(boolean saveSources, boolean saveResources) { if (root == null) { throw new JadxRuntimeException("No loaded files"); } File sourcesOutDir; File resOutDir; ExportGradleTask gradleExportTask; if (args.isExportAsGradleProject()) { gradleExportTask = new ExportGradleTask(resources, root, args.getOutDir()); gradleExportTask.init(); sourcesOutDir = gradleExportTask.getSrcOutDir(); resOutDir = gradleExportTask.getResOutDir(); } else { sourcesOutDir = args.getOutDirSrc(); resOutDir = args.getOutDirRes(); gradleExportTask = null; } TaskExecutor executor = new TaskExecutor(); executor.setThreadsCount(args.getThreadsCount()); if (saveResources) { // save resources first because decompilation can stop or fail appendResourcesSaveTasks(executor, resOutDir); } if (saveSources) { appendSourcesSave(executor, sourcesOutDir); } if (gradleExportTask != null) { executor.addSequentialTask(gradleExportTask); } return executor; } private void appendResourcesSaveTasks(ITaskExecutor executor, File outDir) { if (args.isSkipFilesSave()) { return; } // process AndroidManifest.xml first to load complete resource ids table for (ResourceFile resourceFile : getResources()) { if (resourceFile.getType() == ResourceType.MANIFEST) { new ResourcesSaver(outDir, resourceFile).run(); break; } } Set<String> inputFileNames = args.getInputFiles().stream() .map(File::getAbsolutePath) .collect(Collectors.toSet()); List<Runnable> tasks = new ArrayList<>(); for (ResourceFile resourceFile : getResources()) { ResourceType resType = resourceFile.getType(); if (resType == ResourceType.MANIFEST) { // already processed continue; } if (resType != ResourceType.ARSC && inputFileNames.contains(resourceFile.getOriginalName())) { // ignore resource made from input file continue; } tasks.add(new ResourcesSaver(outDir, resourceFile)); } executor.addParallelTasks(tasks); } private void appendSourcesSave(ITaskExecutor executor, File outDir) { List<JavaClass> classes = getClasses(); List<JavaClass> processQueue = filterClasses(classes); List<List<JavaClass>> batches; try { batches = decompileScheduler.buildBatches(processQueue); } catch (Exception e) { throw new JadxRuntimeException("Decompilation batches build failed", e); } List<Runnable> decompileTasks = new ArrayList<>(batches.size()); for (List<JavaClass> decompileBatch : batches) { decompileTasks.add(() -> { for (JavaClass cls : decompileBatch) { try { ClassNode clsNode = cls.getClassNode(); ICodeInfo code = clsNode.getCode(); SaveCode.save(outDir, clsNode, code); } catch (Exception e) { LOG.error("Error saving class: {}", cls, e); } } }); } executor.addParallelTasks(decompileTasks); } private List<JavaClass> filterClasses(List<JavaClass> classes) { Predicate<String> classFilter = args.getClassFilter(); List<JavaClass> list = new ArrayList<>(classes.size()); for (JavaClass cls : classes) { ClassNode clsNode = cls.getClassNode(); if (clsNode.contains(AFlag.DONT_GENERATE)) { continue; } if (classFilter != null && !classFilter.test(clsNode.getClassInfo().getFullName())) { if (!args.isIncludeDependencies()) { clsNode.add(AFlag.DONT_GENERATE); } continue; } list.add(cls); } return list; } public List<JavaClass> getClasses() { if (root == null) { return Collections.emptyList(); } if (classes == null) { List<ClassNode> classNodeList = root.getClasses(); List<JavaClass> clsList = new ArrayList<>(classNodeList.size()); for (ClassNode classNode : classNodeList) { if (classNode.contains(AFlag.DONT_GENERATE)) { continue; } if (!classNode.getClassInfo().isInner()) { clsList.add(convertClassNode(classNode)); } } classes = Collections.unmodifiableList(clsList); } return classes; } public List<JavaClass> getClassesWithInners() { return Utils.collectionMap(root.getClasses(), this::convertClassNode); } public synchronized List<ResourceFile> getResources() { if (resources == null) { if (root == null) { return Collections.emptyList(); } resources = resourcesLoader.load(root); } return resources; } public List<JavaPackage> getPackages() { return Utils.collectionMap(root.getPackages(), this::convertPackageNode); } public int getErrorsCount() { if (root == null) { return 0; } return root.getErrorsCounter().getErrorCount(); } public int getWarnsCount() { if (root == null) { return 0; } return root.getErrorsCounter().getWarnsCount(); } public void printErrorsReport() { if (root == null) { return; } root.getClsp().printMissingClasses(); root.getErrorsCounter().printReport(); } /** * Internal API. Not Stable! */ @ApiStatus.Internal public RootNode getRoot() { return root; } synchronized BinaryXMLParser getBinaryXmlParser() { if (binaryXmlParser == null) { binaryXmlParser = new BinaryXMLParser(root); } return binaryXmlParser; } /** * Get JavaClass by ClassNode without loading and decompilation */ @ApiStatus.Internal synchronized JavaClass convertClassNode(ClassNode cls) { JavaClass javaClass = cls.getJavaNode(); if (javaClass == null) { javaClass = cls.isInner() ? new JavaClass(cls, convertClassNode(cls.getParentClass())) : new JavaClass(cls, this); cls.setJavaNode(javaClass); } return javaClass; } @ApiStatus.Internal synchronized JavaField convertFieldNode(FieldNode fld) { JavaField javaField = fld.getJavaNode(); if (javaField == null) { JavaClass parentCls = convertClassNode(fld.getParentClass()); javaField = new JavaField(parentCls, fld); fld.setJavaNode(javaField); } return javaField; } @ApiStatus.Internal synchronized JavaMethod convertMethodNode(MethodNode mth) { JavaMethod javaMethod = mth.getJavaNode(); if (javaMethod == null) { javaMethod = new JavaMethod(convertClassNode(mth.getParentClass()), mth); mth.setJavaNode(javaMethod); } return javaMethod; } @ApiStatus.Internal synchronized JavaPackage convertPackageNode(PackageNode pkg) { JavaPackage foundPkg = pkg.getJavaNode(); if (foundPkg != null) { return foundPkg; } List<JavaClass> clsList = Utils.collectionMap(pkg.getClasses(), this::convertClassNode); int subPkgsCount = pkg.getSubPackages().size(); List<JavaPackage> subPkgs = subPkgsCount == 0 ? Collections.emptyList() : new ArrayList<>(subPkgsCount); JavaPackage javaPkg = new JavaPackage(pkg, clsList, subPkgs); if (subPkgsCount != 0) { // add subpackages after parent to avoid endless recursion for (PackageNode subPackage : pkg.getSubPackages()) { subPkgs.add(convertPackageNode(subPackage)); } } pkg.setJavaNode(javaPkg); return javaPkg; } @Nullable public JavaClass searchJavaClassByOrigFullName(String fullName) { return getRoot().getClasses().stream() .filter(cls -> cls.getClassInfo().getFullName().equals(fullName)) .findFirst() .map(this::convertClassNode) .orElse(null); } @Nullable public ClassNode searchClassNodeByOrigFullName(String fullName) { return getRoot().getClasses().stream() .filter(cls -> cls.getClassInfo().getFullName().equals(fullName)) .findFirst() .orElse(null); } // returns parent if class contains DONT_GENERATE flag. @Nullable public JavaClass searchJavaClassOrItsParentByOrigFullName(String fullName) { ClassNode node = getRoot().getClasses().stream() .filter(cls -> cls.getClassInfo().getFullName().equals(fullName)) .findFirst() .orElse(null); if (node != null) { if (node.contains(AFlag.DONT_GENERATE)) { return convertClassNode(node.getTopParentClass()); } else { return convertClassNode(node); } } return null; } @Nullable public JavaClass searchJavaClassByAliasFullName(String fullName) { return getRoot().getClasses().stream() .filter(cls -> cls.getClassInfo().getAliasFullName().equals(fullName)) .findFirst() .map(this::convertClassNode) .orElse(null); } @Nullable public JavaNode getJavaNodeByRef(ICodeNodeRef ann) { return getJavaNodeByCodeAnnotation(null, ann); } @Nullable public JavaNode getJavaNodeByCodeAnnotation(@Nullable ICodeInfo codeInfo, @Nullable ICodeAnnotation ann) { if (ann == null) { return null; } switch (ann.getAnnType()) { case CLASS: return convertClassNode((ClassNode) ann); case METHOD: return convertMethodNode((MethodNode) ann); case FIELD: return convertFieldNode((FieldNode) ann); case DECLARATION: return getJavaNodeByCodeAnnotation(codeInfo, ((NodeDeclareRef) ann).getNode()); case VAR: return resolveVarNode((VarNode) ann); case VAR_REF: return resolveVarRef(codeInfo, (VarRef) ann); case OFFSET: // offset annotation don't have java node object return null; default: throw new JadxRuntimeException("Unknown annotation type: " + ann.getAnnType() + ", class: " + ann.getClass()); } } private JavaVariable resolveVarNode(VarNode varNode) { JavaMethod javaNode = convertMethodNode(varNode.getMth()); return new JavaVariable(javaNode, varNode); } @Nullable private JavaVariable resolveVarRef(ICodeInfo codeInfo, VarRef varRef) { if (codeInfo == null) { throw new JadxRuntimeException("Missing code info for resolve VarRef: " + varRef); } ICodeAnnotation varNodeAnn = codeInfo.getCodeMetadata().getAt(varRef.getRefPos()); if (varNodeAnn != null && varNodeAnn.getAnnType() == ICodeAnnotation.AnnType.DECLARATION) { ICodeNodeRef nodeRef = ((NodeDeclareRef) varNodeAnn).getNode(); if (nodeRef.getAnnType() == ICodeAnnotation.AnnType.VAR) { return resolveVarNode((VarNode) nodeRef); } } return null; } List<JavaNode> convertNodes(Collection<? extends ICodeNodeRef> nodesList) { return nodesList.stream() .map(this::getJavaNodeByRef) .filter(Objects::nonNull) .collect(Collectors.toList()); } @Nullable public JavaNode getJavaNodeAtPosition(ICodeInfo codeInfo, int pos) { ICodeAnnotation ann = codeInfo.getCodeMetadata().getAt(pos); return getJavaNodeByCodeAnnotation(codeInfo, ann); } @Nullable public JavaNode getClosestJavaNode(ICodeInfo codeInfo, int pos) { ICodeAnnotation ann = codeInfo.getCodeMetadata().getClosestUp(pos); return getJavaNodeByCodeAnnotation(codeInfo, ann); } @Nullable public JavaNode getEnclosingNode(ICodeInfo codeInfo, int pos) { ICodeNodeRef obj = codeInfo.getCodeMetadata().getNodeAt(pos); if (obj == null) { return null; } return getJavaNodeByRef(obj); } public void reloadCodeData() { root.notifyCodeDataListeners(); } public JadxArgs getArgs() { return args; } public JadxPluginManager getPluginManager() { return pluginManager; } public IDecompileScheduler getDecompileScheduler() { return decompileScheduler; } public IJadxEvents events() { return events; } public void addCustomCodeLoader(ICodeLoader customCodeLoader) { customCodeLoaders.add(customCodeLoader); } public List<ICodeLoader> getCustomCodeLoaders() { return customCodeLoaders; } public void addCustomResourcesLoader(CustomResourcesLoader loader) { if (customResourcesLoaders.contains(loader)) { return; } customResourcesLoaders.add(loader); } public List<CustomResourcesLoader> getCustomResourcesLoaders() { return customResourcesLoaders; } public void addCustomPass(JadxPass pass) { customPasses.computeIfAbsent(pass.getPassType(), l -> new ArrayList<>()).add(pass); } public ResourcesLoader getResourcesLoader() { return resourcesLoader; } @Override public String toString() { return "jadx decompiler " + getVersion(); } }
skylot/jadx
jadx-core/src/main/java/jadx/api/JadxDecompiler.java
923
package mindustry.ctype; import arc.files.*; import arc.util.*; import mindustry.*; import mindustry.mod.Mods.*; /** Base class for a content type that is loaded in {@link mindustry.core.ContentLoader}. */ public abstract class Content implements Comparable<Content>{ public short id; /** Info on which mod this content was loaded from. */ public ModContentInfo minfo = new ModContentInfo(); public Content(){ this.id = (short)Vars.content.getBy(getContentType()).size; Vars.content.handleContent(this); } /** * Returns the type name of this piece of content. * This should return the same value for all instances of this content type. */ public abstract ContentType getContentType(); /** Called after all content and modules are created. Do not use to load regions or texture data! */ public void init(){} /** * Called after all content is created, only on non-headless versions. * Use for loading regions or other image data. */ public void load(){} /** Called right before load(). */ public void loadIcon(){} /** @return whether an error occurred during mod loading. */ public boolean hasErrored(){ return minfo.error != null; } /** @return whether this is content from the base game. */ public boolean isVanilla(){ return minfo.mod == null; } /** @return whether this content is from a mod. */ public boolean isModded(){ return !isVanilla(); } @Override public int compareTo(Content c){ return Integer.compare(id, c.id); } @Override public String toString(){ return getContentType().name() + "#" + id; } public static class ModContentInfo{ /** The mod that loaded this piece of content. */ public @Nullable LoadedMod mod; /** File that this content was loaded from. */ public @Nullable Fi sourceFile; /** The error that occurred during loading, if applicable. Null if no error occurred. */ public @Nullable String error; /** Base throwable that caused the error. */ public @Nullable Throwable baseError; } }
Anuken/Mindustry
core/src/mindustry/ctype/Content.java
924
package jadx.core.dex.nodes; import java.util.Collections; import java.util.List; import jadx.api.JavaField; import jadx.api.plugins.input.data.IFieldData; import jadx.core.dex.attributes.nodes.NotificationAttrNode; import jadx.core.dex.info.AccessInfo; import jadx.core.dex.info.AccessInfo.AFType; import jadx.core.dex.info.FieldInfo; import jadx.core.dex.instructions.args.ArgType; import jadx.core.utils.ListUtils; public class FieldNode extends NotificationAttrNode implements ICodeNode, IFieldInfoRef { private final ClassNode parentClass; private final FieldInfo fieldInfo; private AccessInfo accFlags; private ArgType type; private List<MethodNode> useIn = Collections.emptyList(); private JavaField javaNode; public static FieldNode build(ClassNode cls, IFieldData fieldData) { FieldInfo fieldInfo = FieldInfo.fromRef(cls.root(), fieldData); FieldNode fieldNode = new FieldNode(cls, fieldInfo, fieldData.getAccessFlags()); fieldNode.addAttrs(fieldData.getAttributes()); return fieldNode; } public FieldNode(ClassNode cls, FieldInfo fieldInfo, int accessFlags) { this.parentClass = cls; this.fieldInfo = fieldInfo; this.type = fieldInfo.getType(); this.accFlags = new AccessInfo(accessFlags, AFType.FIELD); } public void unload() { unloadAttributes(); } public void updateType(ArgType type) { this.type = type; } @Override public FieldInfo getFieldInfo() { return fieldInfo; } @Override public AccessInfo getAccessFlags() { return accFlags; } @Override public void setAccessFlags(AccessInfo accFlags) { this.accFlags = accFlags; } public boolean isStatic() { return accFlags.isStatic(); } public boolean isInstance() { return !accFlags.isStatic(); } public String getName() { return fieldInfo.getName(); } public String getAlias() { return fieldInfo.getAlias(); } @Override public void rename(String alias) { fieldInfo.setAlias(alias); } public ArgType getType() { return type; } public ClassNode getParentClass() { return parentClass; } public ClassNode getTopParentClass() { return parentClass.getTopParentClass(); } public List<MethodNode> getUseIn() { return useIn; } public void setUseIn(List<MethodNode> useIn) { this.useIn = useIn; } public synchronized void addUseIn(MethodNode mth) { useIn = ListUtils.safeAdd(useIn, mth); } @Override public String typeName() { return "field"; } @Override public String getInputFileName() { return parentClass.getInputFileName(); } @Override public RootNode root() { return parentClass.root(); } public JavaField getJavaNode() { return javaNode; } public void setJavaNode(JavaField javaNode) { this.javaNode = javaNode; } @Override public AnnType getAnnType() { return AnnType.FIELD; } @Override public int hashCode() { return fieldInfo.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } FieldNode other = (FieldNode) obj; return fieldInfo.equals(other.fieldInfo); } @Override public String toString() { return fieldInfo.getDeclClass() + "." + fieldInfo.getName() + " :" + type; } }
skylot/jadx
jadx-core/src/main/java/jadx/core/dex/nodes/FieldNode.java
925
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.util; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.SystemPropertyUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.lang.ref.WeakReference; import java.lang.ref.ReferenceQueue; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import static io.netty.util.internal.StringUtil.EMPTY_STRING; import static io.netty.util.internal.StringUtil.NEWLINE; import static io.netty.util.internal.StringUtil.simpleClassName; public class ResourceLeakDetector<T> { private static final String PROP_LEVEL_OLD = "io.netty.leakDetectionLevel"; private static final String PROP_LEVEL = "io.netty.leakDetection.level"; private static final Level DEFAULT_LEVEL = Level.SIMPLE; private static final String PROP_TARGET_RECORDS = "io.netty.leakDetection.targetRecords"; private static final int DEFAULT_TARGET_RECORDS = 4; private static final String PROP_SAMPLING_INTERVAL = "io.netty.leakDetection.samplingInterval"; // There is a minor performance benefit in TLR if this is a power of 2. private static final int DEFAULT_SAMPLING_INTERVAL = 128; private static final int TARGET_RECORDS; static final int SAMPLING_INTERVAL; /** * Represents the level of resource leak detection. */ public enum Level { /** * Disables resource leak detection. */ DISABLED, /** * Enables simplistic sampling resource leak detection which reports there is a leak or not, * at the cost of small overhead (default). */ SIMPLE, /** * Enables advanced sampling resource leak detection which reports where the leaked object was accessed * recently at the cost of high overhead. */ ADVANCED, /** * Enables paranoid resource leak detection which reports where the leaked object was accessed recently, * at the cost of the highest possible overhead (for testing purposes only). */ PARANOID; /** * Returns level based on string value. Accepts also string that represents ordinal number of enum. * * @param levelStr - level string : DISABLED, SIMPLE, ADVANCED, PARANOID. Ignores case. * @return corresponding level or SIMPLE level in case of no match. */ static Level parseLevel(String levelStr) { String trimmedLevelStr = levelStr.trim(); for (Level l : values()) { if (trimmedLevelStr.equalsIgnoreCase(l.name()) || trimmedLevelStr.equals(String.valueOf(l.ordinal()))) { return l; } } return DEFAULT_LEVEL; } } private static Level level; private static final InternalLogger logger = InternalLoggerFactory.getInstance(ResourceLeakDetector.class); static { final boolean disabled; if (SystemPropertyUtil.get("io.netty.noResourceLeakDetection") != null) { disabled = SystemPropertyUtil.getBoolean("io.netty.noResourceLeakDetection", false); logger.debug("-Dio.netty.noResourceLeakDetection: {}", disabled); logger.warn( "-Dio.netty.noResourceLeakDetection is deprecated. Use '-D{}={}' instead.", PROP_LEVEL, Level.DISABLED.name().toLowerCase()); } else { disabled = false; } Level defaultLevel = disabled? Level.DISABLED : DEFAULT_LEVEL; // First read old property name String levelStr = SystemPropertyUtil.get(PROP_LEVEL_OLD, defaultLevel.name()); // If new property name is present, use it levelStr = SystemPropertyUtil.get(PROP_LEVEL, levelStr); Level level = Level.parseLevel(levelStr); TARGET_RECORDS = SystemPropertyUtil.getInt(PROP_TARGET_RECORDS, DEFAULT_TARGET_RECORDS); SAMPLING_INTERVAL = SystemPropertyUtil.getInt(PROP_SAMPLING_INTERVAL, DEFAULT_SAMPLING_INTERVAL); ResourceLeakDetector.level = level; if (logger.isDebugEnabled()) { logger.debug("-D{}: {}", PROP_LEVEL, level.name().toLowerCase()); logger.debug("-D{}: {}", PROP_TARGET_RECORDS, TARGET_RECORDS); } } /** * @deprecated Use {@link #setLevel(Level)} instead. */ @Deprecated public static void setEnabled(boolean enabled) { setLevel(enabled? Level.SIMPLE : Level.DISABLED); } /** * Returns {@code true} if resource leak detection is enabled. */ public static boolean isEnabled() { return getLevel().ordinal() > Level.DISABLED.ordinal(); } /** * Sets the resource leak detection level. */ public static void setLevel(Level level) { ResourceLeakDetector.level = ObjectUtil.checkNotNull(level, "level"); } /** * Returns the current resource leak detection level. */ public static Level getLevel() { return level; } /** the collection of active resources */ private final Set<DefaultResourceLeak<?>> allLeaks = Collections.newSetFromMap(new ConcurrentHashMap<DefaultResourceLeak<?>, Boolean>()); private final ReferenceQueue<Object> refQueue = new ReferenceQueue<Object>(); private final Set<String> reportedLeaks = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>()); private final String resourceType; private final int samplingInterval; /** * Will be notified once a leak is detected. */ private volatile LeakListener leakListener; /** * @deprecated use {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class, int, long)}. */ @Deprecated public ResourceLeakDetector(Class<?> resourceType) { this(simpleClassName(resourceType)); } /** * @deprecated use {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class, int, long)}. */ @Deprecated public ResourceLeakDetector(String resourceType) { this(resourceType, DEFAULT_SAMPLING_INTERVAL, Long.MAX_VALUE); } /** * @deprecated Use {@link ResourceLeakDetector#ResourceLeakDetector(Class, int)}. * <p> * This should not be used directly by users of {@link ResourceLeakDetector}. * Please use {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class)} * or {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class, int, long)} * * @param maxActive This is deprecated and will be ignored. */ @Deprecated public ResourceLeakDetector(Class<?> resourceType, int samplingInterval, long maxActive) { this(resourceType, samplingInterval); } /** * This should not be used directly by users of {@link ResourceLeakDetector}. * Please use {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class)} * or {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class, int, long)} */ @SuppressWarnings("deprecation") public ResourceLeakDetector(Class<?> resourceType, int samplingInterval) { this(simpleClassName(resourceType), samplingInterval, Long.MAX_VALUE); } /** * @deprecated use {@link ResourceLeakDetectorFactory#newResourceLeakDetector(Class, int, long)}. * <p> * @param maxActive This is deprecated and will be ignored. */ @Deprecated public ResourceLeakDetector(String resourceType, int samplingInterval, long maxActive) { this.resourceType = ObjectUtil.checkNotNull(resourceType, "resourceType"); this.samplingInterval = samplingInterval; } /** * Creates a new {@link ResourceLeak} which is expected to be closed via {@link ResourceLeak#close()} when the * related resource is deallocated. * * @return the {@link ResourceLeak} or {@code null} * @deprecated use {@link #track(Object)} */ @Deprecated public final ResourceLeak open(T obj) { return track0(obj, false); } /** * Creates a new {@link ResourceLeakTracker} which is expected to be closed via * {@link ResourceLeakTracker#close(Object)} when the related resource is deallocated. * * @return the {@link ResourceLeakTracker} or {@code null} */ @SuppressWarnings("unchecked") public final ResourceLeakTracker<T> track(T obj) { return track0(obj, false); } /** * Creates a new {@link ResourceLeakTracker} which is expected to be closed via * {@link ResourceLeakTracker#close(Object)} when the related resource is deallocated. * * Unlike {@link #track(Object)}, this method always returns a tracker, regardless * of the detection settings. * * @return the {@link ResourceLeakTracker} */ @SuppressWarnings("unchecked") public ResourceLeakTracker<T> trackForcibly(T obj) { return track0(obj, true); } @SuppressWarnings("unchecked") private DefaultResourceLeak track0(T obj, boolean force) { Level level = ResourceLeakDetector.level; if (force || level == Level.PARANOID || (level != Level.DISABLED && PlatformDependent.threadLocalRandom().nextInt(samplingInterval) == 0)) { reportLeak(); return new DefaultResourceLeak(obj, refQueue, allLeaks, getInitialHint(resourceType)); } return null; } private void clearRefQueue() { for (;;) { DefaultResourceLeak ref = (DefaultResourceLeak) refQueue.poll(); if (ref == null) { break; } ref.dispose(); } } /** * When the return value is {@code true}, {@link #reportTracedLeak} and {@link #reportUntracedLeak} * will be called once a leak is detected, otherwise not. * * @return {@code true} to enable leak reporting. */ protected boolean needReport() { return logger.isErrorEnabled(); } private void reportLeak() { if (!needReport()) { clearRefQueue(); return; } // Detect and report previous leaks. for (;;) { DefaultResourceLeak ref = (DefaultResourceLeak) refQueue.poll(); if (ref == null) { break; } if (!ref.dispose()) { continue; } String records = ref.getReportAndClearRecords(); if (reportedLeaks.add(records)) { if (records.isEmpty()) { reportUntracedLeak(resourceType); } else { reportTracedLeak(resourceType, records); } LeakListener listener = leakListener; if (listener != null) { listener.onLeak(resourceType, records); } } } } /** * This method is called when a traced leak is detected. It can be overridden for tracking how many times leaks * have been detected. */ protected void reportTracedLeak(String resourceType, String records) { logger.error( "LEAK: {}.release() was not called before it's garbage-collected. " + "See https://netty.io/wiki/reference-counted-objects.html for more information.{}", resourceType, records); } /** * This method is called when an untraced leak is detected. It can be overridden for tracking how many times leaks * have been detected. */ protected void reportUntracedLeak(String resourceType) { logger.error("LEAK: {}.release() was not called before it's garbage-collected. " + "Enable advanced leak reporting to find out where the leak occurred. " + "To enable advanced leak reporting, " + "specify the JVM option '-D{}={}' or call {}.setLevel() " + "See https://netty.io/wiki/reference-counted-objects.html for more information.", resourceType, PROP_LEVEL, Level.ADVANCED.name().toLowerCase(), simpleClassName(this)); } /** * @deprecated This method will no longer be invoked by {@link ResourceLeakDetector}. */ @Deprecated protected void reportInstancesLeak(String resourceType) { } /** * Create a hint object to be attached to an object tracked by this record. Similar to the additional information * supplied to {@link ResourceLeakTracker#record(Object)}, will be printed alongside the stack trace of the * creation of the resource. */ protected Object getInitialHint(String resourceType) { return null; } /** * Set leak listener. Previous listener will be replaced. */ public void setLeakListener(LeakListener leakListener) { this.leakListener = leakListener; } public interface LeakListener { /** * Will be called once a leak is detected. */ void onLeak(String resourceType, String records); } @SuppressWarnings("deprecation") private static final class DefaultResourceLeak<T> extends WeakReference<Object> implements ResourceLeakTracker<T>, ResourceLeak { @SuppressWarnings("unchecked") // generics and updaters do not mix. private static final AtomicReferenceFieldUpdater<DefaultResourceLeak<?>, TraceRecord> headUpdater = (AtomicReferenceFieldUpdater) AtomicReferenceFieldUpdater.newUpdater(DefaultResourceLeak.class, TraceRecord.class, "head"); @SuppressWarnings("unchecked") // generics and updaters do not mix. private static final AtomicIntegerFieldUpdater<DefaultResourceLeak<?>> droppedRecordsUpdater = (AtomicIntegerFieldUpdater) AtomicIntegerFieldUpdater.newUpdater(DefaultResourceLeak.class, "droppedRecords"); @SuppressWarnings("unused") private volatile TraceRecord head; @SuppressWarnings("unused") private volatile int droppedRecords; private final Set<DefaultResourceLeak<?>> allLeaks; private final int trackedHash; DefaultResourceLeak( Object referent, ReferenceQueue<Object> refQueue, Set<DefaultResourceLeak<?>> allLeaks, Object initialHint) { super(referent, refQueue); assert referent != null; // Store the hash of the tracked object to later assert it in the close(...) method. // It's important that we not store a reference to the referent as this would disallow it from // be collected via the WeakReference. trackedHash = System.identityHashCode(referent); allLeaks.add(this); // Create a new Record so we always have the creation stacktrace included. headUpdater.set(this, initialHint == null ? new TraceRecord(TraceRecord.BOTTOM) : new TraceRecord(TraceRecord.BOTTOM, initialHint)); this.allLeaks = allLeaks; } @Override public void record() { record0(null); } @Override public void record(Object hint) { record0(hint); } /** * This method works by exponentially backing off as more records are present in the stack. Each record has a * 1 / 2^n chance of dropping the top most record and replacing it with itself. This has a number of convenient * properties: * * <ol> * <li> The current record is always recorded. This is due to the compare and swap dropping the top most * record, rather than the to-be-pushed record. * <li> The very last access will always be recorded. This comes as a property of 1. * <li> It is possible to retain more records than the target, based upon the probability distribution. * <li> It is easy to keep a precise record of the number of elements in the stack, since each element has to * know how tall the stack is. * </ol> * * In this particular implementation, there are also some advantages. A thread local random is used to decide * if something should be recorded. This means that if there is a deterministic access pattern, it is now * possible to see what other accesses occur, rather than always dropping them. Second, after * {@link #TARGET_RECORDS} accesses, backoff occurs. This matches typical access patterns, * where there are either a high number of accesses (i.e. a cached buffer), or low (an ephemeral buffer), but * not many in between. * * The use of atomics avoids serializing a high number of accesses, when most of the records will be thrown * away. High contention only happens when there are very few existing records, which is only likely when the * object isn't shared! If this is a problem, the loop can be aborted and the record dropped, because another * thread won the race. */ private void record0(Object hint) { // Check TARGET_RECORDS > 0 here to avoid similar check before remove from and add to lastRecords if (TARGET_RECORDS > 0) { TraceRecord oldHead; TraceRecord prevHead; TraceRecord newHead; boolean dropped; do { if ((prevHead = oldHead = headUpdater.get(this)) == null) { // already closed. return; } final int numElements = oldHead.pos + 1; if (numElements >= TARGET_RECORDS) { final int backOffFactor = Math.min(numElements - TARGET_RECORDS, 30); if (dropped = PlatformDependent.threadLocalRandom().nextInt(1 << backOffFactor) != 0) { prevHead = oldHead.next; } } else { dropped = false; } newHead = hint != null ? new TraceRecord(prevHead, hint) : new TraceRecord(prevHead); } while (!headUpdater.compareAndSet(this, oldHead, newHead)); if (dropped) { droppedRecordsUpdater.incrementAndGet(this); } } } boolean dispose() { clear(); return allLeaks.remove(this); } @Override public boolean close() { if (allLeaks.remove(this)) { // Call clear so the reference is not even enqueued. clear(); headUpdater.set(this, null); return true; } return false; } @Override public boolean close(T trackedObject) { // Ensure that the object that was tracked is the same as the one that was passed to close(...). assert trackedHash == System.identityHashCode(trackedObject); try { return close(); } finally { // This method will do `synchronized(trackedObject)` and we should be sure this will not cause deadlock. // It should not, because somewhere up the callstack should be a (successful) `trackedObject.release`, // therefore it is unreasonable that anyone else, anywhere, is holding a lock on the trackedObject. // (Unreasonable but possible, unfortunately.) reachabilityFence0(trackedObject); } } /** * Ensures that the object referenced by the given reference remains * <a href="package-summary.html#reachability"><em>strongly reachable</em></a>, * regardless of any prior actions of the program that might otherwise cause * the object to become unreachable; thus, the referenced object is not * reclaimable by garbage collection at least until after the invocation of * this method. * * <p> Recent versions of the JDK have a nasty habit of prematurely deciding objects are unreachable. * see: https://stackoverflow.com/questions/26642153/finalize-called-on-strongly-reachable-object-in-java-8 * The Java 9 method Reference.reachabilityFence offers a solution to this problem. * * <p> This method is always implemented as a synchronization on {@code ref}, not as * {@code Reference.reachabilityFence} for consistency across platforms and to allow building on JDK 6-8. * <b>It is the caller's responsibility to ensure that this synchronization will not cause deadlock.</b> * * @param ref the reference. If {@code null}, this method has no effect. * @see java.lang.ref.Reference#reachabilityFence */ private static void reachabilityFence0(Object ref) { if (ref != null) { synchronized (ref) { // Empty synchronized is ok: https://stackoverflow.com/a/31933260/1151521 } } } @Override public String toString() { TraceRecord oldHead = headUpdater.get(this); return generateReport(oldHead); } String getReportAndClearRecords() { TraceRecord oldHead = headUpdater.getAndSet(this, null); return generateReport(oldHead); } private String generateReport(TraceRecord oldHead) { if (oldHead == null) { // Already closed return EMPTY_STRING; } final int dropped = droppedRecordsUpdater.get(this); int duped = 0; int present = oldHead.pos + 1; // Guess about 2 kilobytes per stack trace StringBuilder buf = new StringBuilder(present * 2048).append(NEWLINE); buf.append("Recent access records: ").append(NEWLINE); int i = 1; Set<String> seen = new HashSet<String>(present); for (; oldHead != TraceRecord.BOTTOM; oldHead = oldHead.next) { String s = oldHead.toString(); if (seen.add(s)) { if (oldHead.next == TraceRecord.BOTTOM) { buf.append("Created at:").append(NEWLINE).append(s); } else { buf.append('#').append(i++).append(':').append(NEWLINE).append(s); } } else { duped++; } } if (duped > 0) { buf.append(": ") .append(duped) .append(" leak records were discarded because they were duplicates") .append(NEWLINE); } if (dropped > 0) { buf.append(": ") .append(dropped) .append(" leak records were discarded because the leak record count is targeted to ") .append(TARGET_RECORDS) .append(". Use system property ") .append(PROP_TARGET_RECORDS) .append(" to increase the limit.") .append(NEWLINE); } buf.setLength(buf.length() - NEWLINE.length()); return buf.toString(); } } private static final AtomicReference<String[]> excludedMethods = new AtomicReference<String[]>(EmptyArrays.EMPTY_STRINGS); public static void addExclusions(Class clz, String ... methodNames) { Set<String> nameSet = new HashSet<String>(Arrays.asList(methodNames)); // Use loop rather than lookup. This avoids knowing the parameters, and doesn't have to handle // NoSuchMethodException. for (Method method : clz.getDeclaredMethods()) { if (nameSet.remove(method.getName()) && nameSet.isEmpty()) { break; } } if (!nameSet.isEmpty()) { throw new IllegalArgumentException("Can't find '" + nameSet + "' in " + clz.getName()); } String[] oldMethods; String[] newMethods; do { oldMethods = excludedMethods.get(); newMethods = Arrays.copyOf(oldMethods, oldMethods.length + 2 * methodNames.length); for (int i = 0; i < methodNames.length; i++) { newMethods[oldMethods.length + i * 2] = clz.getName(); newMethods[oldMethods.length + i * 2 + 1] = methodNames[i]; } } while (!excludedMethods.compareAndSet(oldMethods, newMethods)); } private static class TraceRecord extends Throwable { private static final long serialVersionUID = 6065153674892850720L; private static final TraceRecord BOTTOM = new TraceRecord() { private static final long serialVersionUID = 7396077602074694571L; // Override fillInStackTrace() so we not populate the backtrace via a native call and so leak the // Classloader. // See https://github.com/netty/netty/pull/10691 @Override public Throwable fillInStackTrace() { return this; } }; private final String hintString; private final TraceRecord next; private final int pos; TraceRecord(TraceRecord next, Object hint) { // This needs to be generated even if toString() is never called as it may change later on. hintString = hint instanceof ResourceLeakHint ? ((ResourceLeakHint) hint).toHintString() : hint.toString(); this.next = next; this.pos = next.pos + 1; } TraceRecord(TraceRecord next) { hintString = null; this.next = next; this.pos = next.pos + 1; } // Used to terminate the stack private TraceRecord() { hintString = null; next = null; pos = -1; } @Override public String toString() { StringBuilder buf = new StringBuilder(2048); if (hintString != null) { buf.append("\tHint: ").append(hintString).append(NEWLINE); } // Append the stack trace. StackTraceElement[] array = getStackTrace(); // Skip the first three elements. out: for (int i = 3; i < array.length; i++) { StackTraceElement element = array[i]; // Strip the noisy stack trace elements. String[] exclusions = excludedMethods.get(); for (int k = 0; k < exclusions.length; k += 2) { // Suppress a warning about out of bounds access // since the length of excludedMethods is always even, see addExclusions() if (exclusions[k].equals(element.getClassName()) && exclusions[k + 1].equals(element.getMethodName())) { continue out; } } buf.append('\t'); buf.append(element.toString()); buf.append(NEWLINE); } return buf.toString(); } } }
netty/netty
common/src/main/java/io/netty/util/ResourceLeakDetector.java
926
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.util; import io.netty.util.internal.InternalThreadLocalMap; import static io.netty.util.internal.ObjectUtil.checkNotNull; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CodingErrorAction; import java.util.Map; /** * A utility class that provides various common operations and constants * related with {@link Charset} and its relevant classes. */ public final class CharsetUtil { /** * 16-bit UTF (UCS Transformation Format) whose byte order is identified by * an optional byte-order mark */ public static final Charset UTF_16 = Charset.forName("UTF-16"); /** * 16-bit UTF (UCS Transformation Format) whose byte order is big-endian */ public static final Charset UTF_16BE = Charset.forName("UTF-16BE"); /** * 16-bit UTF (UCS Transformation Format) whose byte order is little-endian */ public static final Charset UTF_16LE = Charset.forName("UTF-16LE"); /** * 8-bit UTF (UCS Transformation Format) */ public static final Charset UTF_8 = Charset.forName("UTF-8"); /** * ISO Latin Alphabet No. 1, as known as <tt>ISO-LATIN-1</tt> */ public static final Charset ISO_8859_1 = Charset.forName("ISO-8859-1"); /** * 7-bit ASCII, as known as ISO646-US or the Basic Latin block of the * Unicode character set */ public static final Charset US_ASCII = Charset.forName("US-ASCII"); private static final Charset[] CHARSETS = new Charset[] { UTF_16, UTF_16BE, UTF_16LE, UTF_8, ISO_8859_1, US_ASCII }; public static Charset[] values() { return CHARSETS; } /** * @deprecated Use {@link #encoder(Charset)}. */ @Deprecated public static CharsetEncoder getEncoder(Charset charset) { return encoder(charset); } /** * Returns a new {@link CharsetEncoder} for the {@link Charset} with specified error actions. * * @param charset The specified charset * @param malformedInputAction The encoder's action for malformed-input errors * @param unmappableCharacterAction The encoder's action for unmappable-character errors * @return The encoder for the specified {@code charset} */ public static CharsetEncoder encoder(Charset charset, CodingErrorAction malformedInputAction, CodingErrorAction unmappableCharacterAction) { checkNotNull(charset, "charset"); CharsetEncoder e = charset.newEncoder(); e.onMalformedInput(malformedInputAction).onUnmappableCharacter(unmappableCharacterAction); return e; } /** * Returns a new {@link CharsetEncoder} for the {@link Charset} with the specified error action. * * @param charset The specified charset * @param codingErrorAction The encoder's action for malformed-input and unmappable-character errors * @return The encoder for the specified {@code charset} */ public static CharsetEncoder encoder(Charset charset, CodingErrorAction codingErrorAction) { return encoder(charset, codingErrorAction, codingErrorAction); } /** * Returns a cached thread-local {@link CharsetEncoder} for the specified {@link Charset}. * * @param charset The specified charset * @return The encoder for the specified {@code charset} */ public static CharsetEncoder encoder(Charset charset) { checkNotNull(charset, "charset"); Map<Charset, CharsetEncoder> map = InternalThreadLocalMap.get().charsetEncoderCache(); CharsetEncoder e = map.get(charset); if (e != null) { e.reset().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE); return e; } e = encoder(charset, CodingErrorAction.REPLACE, CodingErrorAction.REPLACE); map.put(charset, e); return e; } /** * @deprecated Use {@link #decoder(Charset)}. */ @Deprecated public static CharsetDecoder getDecoder(Charset charset) { return decoder(charset); } /** * Returns a new {@link CharsetDecoder} for the {@link Charset} with specified error actions. * * @param charset The specified charset * @param malformedInputAction The decoder's action for malformed-input errors * @param unmappableCharacterAction The decoder's action for unmappable-character errors * @return The decoder for the specified {@code charset} */ public static CharsetDecoder decoder(Charset charset, CodingErrorAction malformedInputAction, CodingErrorAction unmappableCharacterAction) { checkNotNull(charset, "charset"); CharsetDecoder d = charset.newDecoder(); d.onMalformedInput(malformedInputAction).onUnmappableCharacter(unmappableCharacterAction); return d; } /** * Returns a new {@link CharsetDecoder} for the {@link Charset} with the specified error action. * * @param charset The specified charset * @param codingErrorAction The decoder's action for malformed-input and unmappable-character errors * @return The decoder for the specified {@code charset} */ public static CharsetDecoder decoder(Charset charset, CodingErrorAction codingErrorAction) { return decoder(charset, codingErrorAction, codingErrorAction); } /** * Returns a cached thread-local {@link CharsetDecoder} for the specified {@link Charset}. * * @param charset The specified charset * @return The decoder for the specified {@code charset} */ public static CharsetDecoder decoder(Charset charset) { checkNotNull(charset, "charset"); Map<Charset, CharsetDecoder> map = InternalThreadLocalMap.get().charsetDecoderCache(); CharsetDecoder d = map.get(charset); if (d != null) { d.reset().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(CodingErrorAction.REPLACE); return d; } d = decoder(charset, CodingErrorAction.REPLACE, CodingErrorAction.REPLACE); map.put(charset, d); return d; } private CharsetUtil() { } }
netty/netty
common/src/main/java/io/netty/util/CharsetUtil.java
927
/* * Copyright (C) 2013-2018 The Project Lombok Authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok; import static java.lang.annotation.ElementType.*; import static java.lang.annotation.RetentionPolicy.*; import java.lang.annotation.Retention; import java.lang.annotation.Target; /** * The builder annotation creates a so-called 'builder' aspect to the class that is annotated or the class * that contains a member which is annotated with {@code @Builder}. * <p> * If a member is annotated, it must be either a constructor or a method. If a class is annotated, * then a package-private constructor is generated with all fields as arguments * (as if {@code @AllArgsConstructor(access = AccessLevel.PACKAGE)} is present * on the class), and it is as if this constructor has been annotated with {@code @Builder} instead. * Note that this constructor is only generated if you haven't written any constructors and also haven't * added any explicit {@code @XArgsConstructor} annotations. In those cases, lombok will assume an all-args * constructor is present and generate code that uses it; this means you'd get a compiler error if this * constructor is not present. * <p> * The effect of {@code @Builder} is that an inner class is generated named <code><strong>T</strong>Builder</code>, * with a private constructor. Instances of <code><strong>T</strong>Builder</code> are made with the * method named {@code builder()} which is also generated for you in the class itself (not in the builder class). * <p> * The <code><strong>T</strong>Builder</code> class contains 1 method for each parameter of the annotated * constructor / method (each field, when annotating a class), which returns the builder itself. * The builder also has a <code>build()</code> method which returns a completed instance of the original type, * created by passing all parameters as set via the various other methods in the builder to the constructor * or method that was annotated with {@code @Builder}. The return type of this method will be the same * as the relevant class, unless a method has been annotated, in which case it'll be equal to the * return type of that method. * <p> * Complete documentation is found at <a href="https://projectlombok.org/features/Builder">the project lombok features page for &#64;Builder</a>. * <br> * <p> * Before: * * <pre> * &#064;Builder * class Example&lt;T&gt; { * private T foo; * private final String bar; * } * </pre> * * After: * * <pre> * class Example&lt;T&gt; { * private T foo; * private final String bar; * * private Example(T foo, String bar) { * this.foo = foo; * this.bar = bar; * } * * public static &lt;T&gt; ExampleBuilder&lt;T&gt; builder() { * return new ExampleBuilder&lt;T&gt;(); * } * * public static class ExampleBuilder&lt;T&gt; { * private T foo; * private String bar; * * private ExampleBuilder() {} * * public ExampleBuilder foo(T foo) { * this.foo = foo; * return this; * } * * public ExampleBuilder bar(String bar) { * this.bar = bar; * return this; * } * * &#064;java.lang.Override public String toString() { * return "ExampleBuilder(foo = " + foo + ", bar = " + bar + ")"; * } * * public Example build() { * return new Example(foo, bar); * } * } * } * </pre> * * @see Singular */ @Target({TYPE, METHOD, CONSTRUCTOR}) @Retention(SOURCE) public @interface Builder { /** * The field annotated with {@code @Default} must have an initializing expression; that expression is taken as the default to be used if not explicitly set during building. */ @Target(FIELD) @Retention(SOURCE) public @interface Default {} /** @return Name of the method that creates a new builder instance. Default: {@code builder}. If the empty string, suppress generating the {@code builder} method. */ String builderMethodName() default "builder"; /** @return Name of the method in the builder class that creates an instance of your {@code @Builder}-annotated class. */ String buildMethodName() default "build"; /** * Name of the builder class. * * Default for {@code @Builder} on types and constructors: see the configkey {@code lombok.builder.className}, which if not set defaults to {@code (TypeName)Builder}. * <p> * Default for {@code @Builder} on methods: see the configkey {@code lombok.builder.className}, which if not set defaults to {@code (ReturnTypeName)Builder}. * * @return Name of the builder class that will be generated (or if it already exists, will be filled with builder elements). */ String builderClassName() default ""; /** * If true, generate an instance method to obtain a builder that is initialized with the values of this instance. * Legal only if {@code @Builder} is used on a constructor, on the type itself, or on a static method that returns * an instance of the declaring type. * * @return Whether to generate a {@code toBuilder()} method. */ boolean toBuilder() default false; /** * Sets the access level of the generated builder class. By default, generated builder classes are {@code public}. * Note: This does nothing if you write your own builder class (we won't change its access level). * * @return The builder class will be generated with this access modifier. */ AccessLevel access() default lombok.AccessLevel.PUBLIC; /** * Prefix to prepend to 'set' methods in the generated builder class. By default, generated methods do not include a prefix. * * For example, a method normally generated as {@code someField(String someField)} would instead be * generated as {@code withSomeField(String someField)} if using {@code @Builder(setterPrefix = "with")}. * * Note that using "with" to prefix builder setter methods is strongly discouraged as "with" normally * suggests immutable data structures, and builders by definition are mutable objects. * * For {@code @Singular} fields, the generated methods are called {@code withName}, {@code withNames}, and {@code clearNames}, instead of * the default {@code name}, {@code names}, and {@code clearNames}. * * @return The prefix to prepend to generated method names. */ String setterPrefix() default ""; /** * Put on a field (in case of {@code @Builder} on a type) or a parameter (for {@code @Builder} on a constructor or static method) to * indicate how lombok should obtain a value for this field or parameter given an instance; this is only relevant if {@code toBuilder} is {@code true}. * * You do not need to supply an {@code @ObtainVia} annotation unless you wish to change the default behaviour: Use a field with the same name. * <p> * Note that one of {@code field} or {@code method} should be set, or an error is generated. * <p> * The default behaviour is to obtain a value by referencing the name of the parameter as a field on 'this'. */ @Target({FIELD, PARAMETER}) @Retention(SOURCE) public @interface ObtainVia { /** * @return Tells lombok to obtain a value with the expression {@code this.value}. */ String field() default ""; /** * @return Tells lombok to obtain a value with the expression {@code this.method()}. */ String method() default ""; /** * @return Tells lombok to obtain a value with the expression {@code SelfType.method(this)}; requires {@code method} to be set. */ boolean isStatic() default false; } }
projectlombok/lombok
src/core/lombok/Builder.java
928
/* * Copyright (C) 2009-2018 The Project Lombok Authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Generates an implementation for the {@code toString} method inherited by all objects, consisting of printing the values of relevant fields. * <p> * Complete documentation is found at <a href="https://projectlombok.org/features/ToString">the project lombok features page for &#64;ToString</a>. */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.SOURCE) public @interface ToString { /** * Include the name of each field when printing it. * <strong>default: true</strong> * * @return Whether or not to include the names of fields in the string produced by the generated {@code toString()}. */ boolean includeFieldNames() default true; /** * Any fields listed here will not be printed in the generated {@code toString} implementation. * Mutually exclusive with {@link #of()}. * <p> * Will soon be marked {@code @Deprecated}; use the {@code @ToString.Exclude} annotation instead. * * @return A list of fields to exclude. */ String[] exclude() default {}; /** * If present, explicitly lists the fields that are to be printed. * Normally, all non-static fields are printed. * <p> * Mutually exclusive with {@link #exclude()}. * <p> * Will soon be marked {@code @Deprecated}; use the {@code @ToString.Include} annotation together with {@code @ToString(onlyExplicitlyIncluded = true)}. * * @return A list of fields to use (<em>default</em>: all of them). */ String[] of() default {}; /** * Include the result of the superclass's implementation of {@code toString} in the output. * <strong>default: false</strong> * * @return Whether to call the superclass's {@code toString} implementation as part of the generated toString algorithm. */ boolean callSuper() default false; /** * Normally, if getters are available, those are called. To suppress this and let the generated code use the fields directly, set this to {@code true}. * <strong>default: false</strong> * * @return If {@code true}, always use direct field access instead of calling the getter method. */ boolean doNotUseGetters() default false; /** * Only include fields and methods explicitly marked with {@code @ToString.Include}. * Normally, all (non-static) fields are included by default. * * @return If {@code true}, don't include non-static fields automatically (default: {@code false}). */ boolean onlyExplicitlyIncluded() default false; /** * If present, do not include this field in the generated {@code toString}. */ @Target(ElementType.FIELD) @Retention(RetentionPolicy.SOURCE) public @interface Exclude {} /** * Configure the behaviour of how this member is rendered in the {@code toString}; if on a method, include the method's return value in the output. */ @Target({ElementType.FIELD, ElementType.METHOD}) @Retention(RetentionPolicy.SOURCE) public @interface Include { // /** If true and the return value is {@code null}, omit this member entirely from the {@code toString} output. */ // boolean skipNull() default false; // -- We'll add it later, it requires a complete rework on the toString code we generate. /** * Higher ranks are printed first. Members of the same rank are printed in the order they appear in the source file. * * @return ordering within the generating {@code toString()}; higher numbers are printed first. */ int rank() default 0; /** * Defaults to the field / method name of the annotated member. * If the name equals the name of a default-included field, this member takes its place. * * @return The name to show in the generated {@code toString()}. Also, if this annotation is on a method and the name matches an existing field, it replaces that field. */ String name() default ""; } }
projectlombok/lombok
src/core/lombok/ToString.java
930
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.util.ByteProcessor; import io.netty.util.IllegalReferenceCountException; import io.netty.util.ReferenceCountUtil; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.RecyclableArrayList; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.channels.GatheringByteChannel; import java.nio.channels.ScatteringByteChannel; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import static io.netty.util.internal.ObjectUtil.checkNotNull; /** * A virtual buffer which shows multiple buffers as a single merged buffer. It is recommended to use * {@link ByteBufAllocator#compositeBuffer()} or {@link Unpooled#wrappedBuffer(ByteBuf...)} instead of calling the * constructor explicitly. */ public class CompositeByteBuf extends AbstractReferenceCountedByteBuf implements Iterable<ByteBuf> { private static final ByteBuffer EMPTY_NIO_BUFFER = Unpooled.EMPTY_BUFFER.nioBuffer(); private static final Iterator<ByteBuf> EMPTY_ITERATOR = Collections.<ByteBuf>emptyList().iterator(); private final ByteBufAllocator alloc; private final boolean direct; private final int maxNumComponents; private int componentCount; private Component[] components; // resized when needed private boolean freed; private CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, int initSize) { super(AbstractByteBufAllocator.DEFAULT_MAX_CAPACITY); this.alloc = ObjectUtil.checkNotNull(alloc, "alloc"); if (maxNumComponents < 1) { throw new IllegalArgumentException( "maxNumComponents: " + maxNumComponents + " (expected: >= 1)"); } this.direct = direct; this.maxNumComponents = maxNumComponents; components = newCompArray(initSize, maxNumComponents); } public CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { this(alloc, direct, maxNumComponents, 0); } public CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, ByteBuf... buffers) { this(alloc, direct, maxNumComponents, buffers, 0); } CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, ByteBuf[] buffers, int offset) { this(alloc, direct, maxNumComponents, buffers.length - offset); addComponents0(false, 0, buffers, offset); consolidateIfNeeded(); setIndex0(0, capacity()); } public CompositeByteBuf( ByteBufAllocator alloc, boolean direct, int maxNumComponents, Iterable<ByteBuf> buffers) { this(alloc, direct, maxNumComponents, buffers instanceof Collection ? ((Collection<ByteBuf>) buffers).size() : 0); addComponents(false, 0, buffers); setIndex(0, capacity()); } // support passing arrays of other types instead of having to copy to a ByteBuf[] first interface ByteWrapper<T> { ByteBuf wrap(T bytes); boolean isEmpty(T bytes); } static final ByteWrapper<byte[]> BYTE_ARRAY_WRAPPER = new ByteWrapper<byte[]>() { @Override public ByteBuf wrap(byte[] bytes) { return Unpooled.wrappedBuffer(bytes); } @Override public boolean isEmpty(byte[] bytes) { return bytes.length == 0; } }; static final ByteWrapper<ByteBuffer> BYTE_BUFFER_WRAPPER = new ByteWrapper<ByteBuffer>() { @Override public ByteBuf wrap(ByteBuffer bytes) { return Unpooled.wrappedBuffer(bytes); } @Override public boolean isEmpty(ByteBuffer bytes) { return !bytes.hasRemaining(); } }; <T> CompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents, ByteWrapper<T> wrapper, T[] buffers, int offset) { this(alloc, direct, maxNumComponents, buffers.length - offset); addComponents0(false, 0, wrapper, buffers, offset); consolidateIfNeeded(); setIndex(0, capacity()); } private static Component[] newCompArray(int initComponents, int maxNumComponents) { int capacityGuess = Math.min(AbstractByteBufAllocator.DEFAULT_MAX_COMPONENTS, maxNumComponents); return new Component[Math.max(initComponents, capacityGuess)]; } // Special constructor used by WrappedCompositeByteBuf CompositeByteBuf(ByteBufAllocator alloc) { super(Integer.MAX_VALUE); this.alloc = alloc; direct = false; maxNumComponents = 0; components = null; } /** * Add the given {@link ByteBuf}. * <p> * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased use {@link #addComponent(boolean, ByteBuf)}. * <p> * {@link ByteBuf#release()} ownership of {@code buffer} is transferred to this {@link CompositeByteBuf}. * @param buffer the {@link ByteBuf} to add. {@link ByteBuf#release()} ownership is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addComponent(ByteBuf buffer) { return addComponent(false, buffer); } /** * Add the given {@link ByteBuf}s. * <p> * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased use {@link #addComponents(boolean, ByteBuf[])}. * <p> * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all {@link ByteBuf#release()} * ownership of all {@link ByteBuf} objects is transferred to this {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(ByteBuf... buffers) { return addComponents(false, buffers); } /** * Add the given {@link ByteBuf}s. * <p> * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased use {@link #addComponents(boolean, Iterable)}. * <p> * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all {@link ByteBuf#release()} * ownership of all {@link ByteBuf} objects is transferred to this {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(Iterable<ByteBuf> buffers) { return addComponents(false, buffers); } /** * Add the given {@link ByteBuf} on the specific index. * <p> * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased use {@link #addComponent(boolean, int, ByteBuf)}. * <p> * {@link ByteBuf#release()} ownership of {@code buffer} is transferred to this {@link CompositeByteBuf}. * @param cIndex the index on which the {@link ByteBuf} will be added. * @param buffer the {@link ByteBuf} to add. {@link ByteBuf#release()} ownership is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addComponent(int cIndex, ByteBuf buffer) { return addComponent(false, cIndex, buffer); } /** * Add the given {@link ByteBuf} and increase the {@code writerIndex} if {@code increaseWriterIndex} is * {@code true}. * * {@link ByteBuf#release()} ownership of {@code buffer} is transferred to this {@link CompositeByteBuf}. * @param buffer the {@link ByteBuf} to add. {@link ByteBuf#release()} ownership is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addComponent(boolean increaseWriterIndex, ByteBuf buffer) { return addComponent(increaseWriterIndex, componentCount, buffer); } /** * Add the given {@link ByteBuf}s and increase the {@code writerIndex} if {@code increaseWriterIndex} is * {@code true}. * * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all {@link ByteBuf#release()} * ownership of all {@link ByteBuf} objects is transferred to this {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(boolean increaseWriterIndex, ByteBuf... buffers) { checkNotNull(buffers, "buffers"); addComponents0(increaseWriterIndex, componentCount, buffers, 0); consolidateIfNeeded(); return this; } /** * Add the given {@link ByteBuf}s and increase the {@code writerIndex} if {@code increaseWriterIndex} is * {@code true}. * * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all {@link ByteBuf#release()} * ownership of all {@link ByteBuf} objects is transferred to this {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(boolean increaseWriterIndex, Iterable<ByteBuf> buffers) { return addComponents(increaseWriterIndex, componentCount, buffers); } /** * Add the given {@link ByteBuf} on the specific index and increase the {@code writerIndex} * if {@code increaseWriterIndex} is {@code true}. * * {@link ByteBuf#release()} ownership of {@code buffer} is transferred to this {@link CompositeByteBuf}. * @param cIndex the index on which the {@link ByteBuf} will be added. * @param buffer the {@link ByteBuf} to add. {@link ByteBuf#release()} ownership is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addComponent(boolean increaseWriterIndex, int cIndex, ByteBuf buffer) { checkNotNull(buffer, "buffer"); addComponent0(increaseWriterIndex, cIndex, buffer); consolidateIfNeeded(); return this; } private static void checkForOverflow(int capacity, int readableBytes) { if (capacity + readableBytes < 0) { throw new IllegalArgumentException("Can't increase by " + readableBytes + " as capacity(" + capacity + ")" + " would overflow " + Integer.MAX_VALUE); } } /** * Precondition is that {@code buffer != null}. */ private int addComponent0(boolean increaseWriterIndex, int cIndex, ByteBuf buffer) { assert buffer != null; boolean wasAdded = false; try { checkComponentIndex(cIndex); // No need to consolidate - just add a component to the list. Component c = newComponent(ensureAccessible(buffer), 0); int readableBytes = c.length(); // Check if we would overflow. // See https://github.com/netty/netty/issues/10194 checkForOverflow(capacity(), readableBytes); addComp(cIndex, c); wasAdded = true; if (readableBytes > 0 && cIndex < componentCount - 1) { updateComponentOffsets(cIndex); } else if (cIndex > 0) { c.reposition(components[cIndex - 1].endOffset); } if (increaseWriterIndex) { writerIndex += readableBytes; } return cIndex; } finally { if (!wasAdded) { buffer.release(); } } } private static ByteBuf ensureAccessible(final ByteBuf buf) { if (checkAccessible && !buf.isAccessible()) { throw new IllegalReferenceCountException(0); } return buf; } @SuppressWarnings("deprecation") private Component newComponent(final ByteBuf buf, final int offset) { final int srcIndex = buf.readerIndex(); final int len = buf.readableBytes(); // unpeel any intermediate outer layers (UnreleasableByteBuf, LeakAwareByteBufs, SwappedByteBuf) ByteBuf unwrapped = buf; int unwrappedIndex = srcIndex; while (unwrapped instanceof WrappedByteBuf || unwrapped instanceof SwappedByteBuf) { unwrapped = unwrapped.unwrap(); } // unwrap if already sliced if (unwrapped instanceof AbstractUnpooledSlicedByteBuf) { unwrappedIndex += ((AbstractUnpooledSlicedByteBuf) unwrapped).idx(0); unwrapped = unwrapped.unwrap(); } else if (unwrapped instanceof PooledSlicedByteBuf) { unwrappedIndex += ((PooledSlicedByteBuf) unwrapped).adjustment; unwrapped = unwrapped.unwrap(); } else if (unwrapped instanceof DuplicatedByteBuf || unwrapped instanceof PooledDuplicatedByteBuf) { unwrapped = unwrapped.unwrap(); } // We don't need to slice later to expose the internal component if the readable range // is already the entire buffer final ByteBuf slice = buf.capacity() == len ? buf : null; return new Component(buf.order(ByteOrder.BIG_ENDIAN), srcIndex, unwrapped.order(ByteOrder.BIG_ENDIAN), unwrappedIndex, offset, len, slice); } /** * Add the given {@link ByteBuf}s on the specific index * <p> * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased you need to handle it by your own. * <p> * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param cIndex the index on which the {@link ByteBuf} will be added. {@link ByteBuf#release()} ownership of all * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects is transferred to this * {@link CompositeByteBuf}. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all {@link ByteBuf#release()} * ownership of all {@link ByteBuf} objects is transferred to this {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(int cIndex, ByteBuf... buffers) { checkNotNull(buffers, "buffers"); addComponents0(false, cIndex, buffers, 0); consolidateIfNeeded(); return this; } private CompositeByteBuf addComponents0(boolean increaseWriterIndex, final int cIndex, ByteBuf[] buffers, int arrOffset) { final int len = buffers.length, count = len - arrOffset; int readableBytes = 0; int capacity = capacity(); for (int i = arrOffset; i < buffers.length; i++) { ByteBuf b = buffers[i]; if (b == null) { break; } readableBytes += b.readableBytes(); // Check if we would overflow. // See https://github.com/netty/netty/issues/10194 checkForOverflow(capacity, readableBytes); } // only set ci after we've shifted so that finally block logic is always correct int ci = Integer.MAX_VALUE; try { checkComponentIndex(cIndex); shiftComps(cIndex, count); // will increase componentCount int nextOffset = cIndex > 0 ? components[cIndex - 1].endOffset : 0; for (ci = cIndex; arrOffset < len; arrOffset++, ci++) { ByteBuf b = buffers[arrOffset]; if (b == null) { break; } Component c = newComponent(ensureAccessible(b), nextOffset); components[ci] = c; nextOffset = c.endOffset; } return this; } finally { // ci is now the index following the last successfully added component if (ci < componentCount) { if (ci < cIndex + count) { // we bailed early removeCompRange(ci, cIndex + count); for (; arrOffset < len; ++arrOffset) { ReferenceCountUtil.safeRelease(buffers[arrOffset]); } } updateComponentOffsets(ci); // only need to do this here for components after the added ones } if (increaseWriterIndex && ci > cIndex && ci <= componentCount) { writerIndex += components[ci - 1].endOffset - components[cIndex].offset; } } } private <T> int addComponents0(boolean increaseWriterIndex, int cIndex, ByteWrapper<T> wrapper, T[] buffers, int offset) { checkComponentIndex(cIndex); // No need for consolidation for (int i = offset, len = buffers.length; i < len; i++) { T b = buffers[i]; if (b == null) { break; } if (!wrapper.isEmpty(b)) { cIndex = addComponent0(increaseWriterIndex, cIndex, wrapper.wrap(b)) + 1; int size = componentCount; if (cIndex > size) { cIndex = size; } } } return cIndex; } /** * Add the given {@link ByteBuf}s on the specific index * * Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuf}. * If you need to have it increased you need to handle it by your own. * <p> * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects in {@code buffers} is transferred to this * {@link CompositeByteBuf}. * @param cIndex the index on which the {@link ByteBuf} will be added. * @param buffers the {@link ByteBuf}s to add. {@link ByteBuf#release()} ownership of all * {@link ByteBuf#release()} ownership of all {@link ByteBuf} objects is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addComponents(int cIndex, Iterable<ByteBuf> buffers) { return addComponents(false, cIndex, buffers); } /** * Add the given {@link ByteBuf} and increase the {@code writerIndex} if {@code increaseWriterIndex} is * {@code true}. If the provided buffer is a {@link CompositeByteBuf} itself, a "shallow copy" of its * readable components will be performed. Thus the actual number of new components added may vary * and in particular will be zero if the provided buffer is not readable. * <p> * {@link ByteBuf#release()} ownership of {@code buffer} is transferred to this {@link CompositeByteBuf}. * @param buffer the {@link ByteBuf} to add. {@link ByteBuf#release()} ownership is transferred to this * {@link CompositeByteBuf}. */ public CompositeByteBuf addFlattenedComponents(boolean increaseWriterIndex, ByteBuf buffer) { checkNotNull(buffer, "buffer"); final int ridx = buffer.readerIndex(); final int widx = buffer.writerIndex(); if (ridx == widx) { buffer.release(); return this; } if (!(buffer instanceof CompositeByteBuf)) { addComponent0(increaseWriterIndex, componentCount, buffer); consolidateIfNeeded(); return this; } final CompositeByteBuf from; if (buffer instanceof WrappedCompositeByteBuf) { from = (CompositeByteBuf) buffer.unwrap(); } else { from = (CompositeByteBuf) buffer; } from.checkIndex(ridx, widx - ridx); final Component[] fromComponents = from.components; final int compCountBefore = componentCount; final int writerIndexBefore = writerIndex; try { for (int cidx = from.toComponentIndex0(ridx), newOffset = capacity();; cidx++) { final Component component = fromComponents[cidx]; final int compOffset = component.offset; final int fromIdx = Math.max(ridx, compOffset); final int toIdx = Math.min(widx, component.endOffset); final int len = toIdx - fromIdx; if (len > 0) { // skip empty components addComp(componentCount, new Component( component.srcBuf.retain(), component.srcIdx(fromIdx), component.buf, component.idx(fromIdx), newOffset, len, null)); } if (widx == toIdx) { break; } newOffset += len; } if (increaseWriterIndex) { writerIndex = writerIndexBefore + (widx - ridx); } consolidateIfNeeded(); buffer.release(); buffer = null; return this; } finally { if (buffer != null) { // if we did not succeed, attempt to rollback any components that were added if (increaseWriterIndex) { writerIndex = writerIndexBefore; } for (int cidx = componentCount - 1; cidx >= compCountBefore; cidx--) { components[cidx].free(); removeComp(cidx); } } } } // TODO optimize further, similar to ByteBuf[] version // (difference here is that we don't know *always* know precise size increase in advance, // but we do in the most common case that the Iterable is a Collection) private CompositeByteBuf addComponents(boolean increaseIndex, int cIndex, Iterable<ByteBuf> buffers) { if (buffers instanceof ByteBuf) { // If buffers also implements ByteBuf (e.g. CompositeByteBuf), it has to go to addComponent(ByteBuf). return addComponent(increaseIndex, cIndex, (ByteBuf) buffers); } checkNotNull(buffers, "buffers"); Iterator<ByteBuf> it = buffers.iterator(); try { checkComponentIndex(cIndex); // No need for consolidation while (it.hasNext()) { ByteBuf b = it.next(); if (b == null) { break; } cIndex = addComponent0(increaseIndex, cIndex, b) + 1; cIndex = Math.min(cIndex, componentCount); } } finally { while (it.hasNext()) { ReferenceCountUtil.safeRelease(it.next()); } } consolidateIfNeeded(); return this; } /** * This should only be called as last operation from a method as this may adjust the underlying * array of components and so affect the index etc. */ private void consolidateIfNeeded() { // Consolidate if the number of components will exceed the allowed maximum by the current // operation. int size = componentCount; if (size > maxNumComponents) { consolidate0(0, size); } } private void checkComponentIndex(int cIndex) { ensureAccessible(); if (cIndex < 0 || cIndex > componentCount) { throw new IndexOutOfBoundsException(String.format( "cIndex: %d (expected: >= 0 && <= numComponents(%d))", cIndex, componentCount)); } } private void checkComponentIndex(int cIndex, int numComponents) { ensureAccessible(); if (cIndex < 0 || cIndex + numComponents > componentCount) { throw new IndexOutOfBoundsException(String.format( "cIndex: %d, numComponents: %d " + "(expected: cIndex >= 0 && cIndex + numComponents <= totalNumComponents(%d))", cIndex, numComponents, componentCount)); } } private void updateComponentOffsets(int cIndex) { int size = componentCount; if (size <= cIndex) { return; } int nextIndex = cIndex > 0 ? components[cIndex - 1].endOffset : 0; for (; cIndex < size; cIndex++) { Component c = components[cIndex]; c.reposition(nextIndex); nextIndex = c.endOffset; } } /** * Remove the {@link ByteBuf} from the given index. * * @param cIndex the index on from which the {@link ByteBuf} will be remove */ public CompositeByteBuf removeComponent(int cIndex) { checkComponentIndex(cIndex); Component comp = components[cIndex]; if (lastAccessed == comp) { lastAccessed = null; } comp.free(); removeComp(cIndex); if (comp.length() > 0) { // Only need to call updateComponentOffsets if the length was > 0 updateComponentOffsets(cIndex); } return this; } /** * Remove the number of {@link ByteBuf}s starting from the given index. * * @param cIndex the index on which the {@link ByteBuf}s will be started to removed * @param numComponents the number of components to remove */ public CompositeByteBuf removeComponents(int cIndex, int numComponents) { checkComponentIndex(cIndex, numComponents); if (numComponents == 0) { return this; } int endIndex = cIndex + numComponents; boolean needsUpdate = false; for (int i = cIndex; i < endIndex; ++i) { Component c = components[i]; if (c.length() > 0) { needsUpdate = true; } if (lastAccessed == c) { lastAccessed = null; } c.free(); } removeCompRange(cIndex, endIndex); if (needsUpdate) { // Only need to call updateComponentOffsets if the length was > 0 updateComponentOffsets(cIndex); } return this; } @Override public Iterator<ByteBuf> iterator() { ensureAccessible(); return componentCount == 0 ? EMPTY_ITERATOR : new CompositeByteBufIterator(); } @Override protected int forEachByteAsc0(int start, int end, ByteProcessor processor) throws Exception { if (end <= start) { return -1; } for (int i = toComponentIndex0(start), length = end - start; length > 0; i++) { Component c = components[i]; if (c.offset == c.endOffset) { continue; // empty } ByteBuf s = c.buf; int localStart = c.idx(start); int localLength = Math.min(length, c.endOffset - start); // avoid additional checks in AbstractByteBuf case int result = s instanceof AbstractByteBuf ? ((AbstractByteBuf) s).forEachByteAsc0(localStart, localStart + localLength, processor) : s.forEachByte(localStart, localLength, processor); if (result != -1) { return result - c.adjustment; } start += localLength; length -= localLength; } return -1; } @Override protected int forEachByteDesc0(int rStart, int rEnd, ByteProcessor processor) throws Exception { if (rEnd > rStart) { // rStart *and* rEnd are inclusive return -1; } for (int i = toComponentIndex0(rStart), length = 1 + rStart - rEnd; length > 0; i--) { Component c = components[i]; if (c.offset == c.endOffset) { continue; // empty } ByteBuf s = c.buf; int localRStart = c.idx(length + rEnd); int localLength = Math.min(length, localRStart), localIndex = localRStart - localLength; // avoid additional checks in AbstractByteBuf case int result = s instanceof AbstractByteBuf ? ((AbstractByteBuf) s).forEachByteDesc0(localRStart - 1, localIndex, processor) : s.forEachByteDesc(localIndex, localLength, processor); if (result != -1) { return result - c.adjustment; } length -= localLength; } return -1; } /** * Same with {@link #slice(int, int)} except that this method returns a list. */ public List<ByteBuf> decompose(int offset, int length) { checkIndex(offset, length); if (length == 0) { return Collections.emptyList(); } int componentId = toComponentIndex0(offset); int bytesToSlice = length; // The first component Component firstC = components[componentId]; // It's important to use srcBuf and NOT buf as we need to return the "original" source buffer and not the // unwrapped one as otherwise we could loose the ability to correctly update the reference count on the // returned buffer. ByteBuf slice = firstC.srcBuf.slice(firstC.srcIdx(offset), Math.min(firstC.endOffset - offset, bytesToSlice)); bytesToSlice -= slice.readableBytes(); if (bytesToSlice == 0) { return Collections.singletonList(slice); } List<ByteBuf> sliceList = new ArrayList<ByteBuf>(componentCount - componentId); sliceList.add(slice); // Add all the slices until there is nothing more left and then return the List. do { Component component = components[++componentId]; // It's important to use srcBuf and NOT buf as we need to return the "original" source buffer and not the // unwrapped one as otherwise we could loose the ability to correctly update the reference count on the // returned buffer. slice = component.srcBuf.slice(component.srcIdx(component.offset), Math.min(component.length(), bytesToSlice)); bytesToSlice -= slice.readableBytes(); sliceList.add(slice); } while (bytesToSlice > 0); return sliceList; } @Override public boolean isDirect() { int size = componentCount; if (size == 0) { return false; } for (int i = 0; i < size; i++) { if (!components[i].buf.isDirect()) { return false; } } return true; } @Override public boolean hasArray() { switch (componentCount) { case 0: return true; case 1: return components[0].buf.hasArray(); default: return false; } } @Override public byte[] array() { switch (componentCount) { case 0: return EmptyArrays.EMPTY_BYTES; case 1: return components[0].buf.array(); default: throw new UnsupportedOperationException(); } } @Override public int arrayOffset() { switch (componentCount) { case 0: return 0; case 1: Component c = components[0]; return c.idx(c.buf.arrayOffset()); default: throw new UnsupportedOperationException(); } } @Override public boolean hasMemoryAddress() { switch (componentCount) { case 0: return Unpooled.EMPTY_BUFFER.hasMemoryAddress(); case 1: return components[0].buf.hasMemoryAddress(); default: return false; } } @Override public long memoryAddress() { switch (componentCount) { case 0: return Unpooled.EMPTY_BUFFER.memoryAddress(); case 1: Component c = components[0]; return c.buf.memoryAddress() + c.adjustment; default: throw new UnsupportedOperationException(); } } @Override public int capacity() { int size = componentCount; return size > 0 ? components[size - 1].endOffset : 0; } @Override public CompositeByteBuf capacity(int newCapacity) { checkNewCapacity(newCapacity); final int size = componentCount, oldCapacity = capacity(); if (newCapacity > oldCapacity) { final int paddingLength = newCapacity - oldCapacity; ByteBuf padding = allocBuffer(paddingLength).setIndex(0, paddingLength); addComponent0(false, size, padding); if (componentCount >= maxNumComponents) { // FIXME: No need to create a padding buffer and consolidate. // Just create a big single buffer and put the current content there. consolidateIfNeeded(); } } else if (newCapacity < oldCapacity) { lastAccessed = null; int i = size - 1; for (int bytesToTrim = oldCapacity - newCapacity; i >= 0; i--) { Component c = components[i]; final int cLength = c.length(); if (bytesToTrim < cLength) { // Trim the last component c.endOffset -= bytesToTrim; ByteBuf slice = c.slice; if (slice != null) { // We must replace the cached slice with a derived one to ensure that // it can later be released properly in the case of PooledSlicedByteBuf. c.slice = slice.slice(0, c.length()); } break; } c.free(); bytesToTrim -= cLength; } removeCompRange(i + 1, size); if (readerIndex() > newCapacity) { setIndex0(newCapacity, newCapacity); } else if (writerIndex > newCapacity) { writerIndex = newCapacity; } } return this; } @Override public ByteBufAllocator alloc() { return alloc; } @Override public ByteOrder order() { return ByteOrder.BIG_ENDIAN; } /** * Return the current number of {@link ByteBuf}'s that are composed in this instance */ public int numComponents() { return componentCount; } /** * Return the max number of {@link ByteBuf}'s that are composed in this instance */ public int maxNumComponents() { return maxNumComponents; } /** * Return the index for the given offset */ public int toComponentIndex(int offset) { checkIndex(offset); return toComponentIndex0(offset); } private int toComponentIndex0(int offset) { int size = componentCount; if (offset == 0) { // fast-path zero offset for (int i = 0; i < size; i++) { if (components[i].endOffset > 0) { return i; } } } if (size <= 2) { // fast-path for 1 and 2 component count return size == 1 || offset < components[0].endOffset ? 0 : 1; } for (int low = 0, high = size; low <= high;) { int mid = low + high >>> 1; Component c = components[mid]; if (offset >= c.endOffset) { low = mid + 1; } else if (offset < c.offset) { high = mid - 1; } else { return mid; } } throw new Error("should not reach here"); } public int toByteIndex(int cIndex) { checkComponentIndex(cIndex); return components[cIndex].offset; } @Override public byte getByte(int index) { Component c = findComponent(index); return c.buf.getByte(c.idx(index)); } @Override protected byte _getByte(int index) { Component c = findComponent0(index); return c.buf.getByte(c.idx(index)); } @Override protected short _getShort(int index) { Component c = findComponent0(index); if (index + 2 <= c.endOffset) { return c.buf.getShort(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return (short) ((_getByte(index) & 0xff) << 8 | _getByte(index + 1) & 0xff); } else { return (short) (_getByte(index) & 0xff | (_getByte(index + 1) & 0xff) << 8); } } @Override protected short _getShortLE(int index) { Component c = findComponent0(index); if (index + 2 <= c.endOffset) { return c.buf.getShortLE(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return (short) (_getByte(index) & 0xff | (_getByte(index + 1) & 0xff) << 8); } else { return (short) ((_getByte(index) & 0xff) << 8 | _getByte(index + 1) & 0xff); } } @Override protected int _getUnsignedMedium(int index) { Component c = findComponent0(index); if (index + 3 <= c.endOffset) { return c.buf.getUnsignedMedium(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return (_getShort(index) & 0xffff) << 8 | _getByte(index + 2) & 0xff; } else { return _getShort(index) & 0xFFFF | (_getByte(index + 2) & 0xFF) << 16; } } @Override protected int _getUnsignedMediumLE(int index) { Component c = findComponent0(index); if (index + 3 <= c.endOffset) { return c.buf.getUnsignedMediumLE(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return _getShortLE(index) & 0xffff | (_getByte(index + 2) & 0xff) << 16; } else { return (_getShortLE(index) & 0xffff) << 8 | _getByte(index + 2) & 0xff; } } @Override protected int _getInt(int index) { Component c = findComponent0(index); if (index + 4 <= c.endOffset) { return c.buf.getInt(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return (_getShort(index) & 0xffff) << 16 | _getShort(index + 2) & 0xffff; } else { return _getShort(index) & 0xFFFF | (_getShort(index + 2) & 0xFFFF) << 16; } } @Override protected int _getIntLE(int index) { Component c = findComponent0(index); if (index + 4 <= c.endOffset) { return c.buf.getIntLE(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return _getShortLE(index) & 0xffff | (_getShortLE(index + 2) & 0xffff) << 16; } else { return (_getShortLE(index) & 0xffff) << 16 | _getShortLE(index + 2) & 0xffff; } } @Override protected long _getLong(int index) { Component c = findComponent0(index); if (index + 8 <= c.endOffset) { return c.buf.getLong(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return (_getInt(index) & 0xffffffffL) << 32 | _getInt(index + 4) & 0xffffffffL; } else { return _getInt(index) & 0xFFFFFFFFL | (_getInt(index + 4) & 0xFFFFFFFFL) << 32; } } @Override protected long _getLongLE(int index) { Component c = findComponent0(index); if (index + 8 <= c.endOffset) { return c.buf.getLongLE(c.idx(index)); } else if (order() == ByteOrder.BIG_ENDIAN) { return _getIntLE(index) & 0xffffffffL | (_getIntLE(index + 4) & 0xffffffffL) << 32; } else { return (_getIntLE(index) & 0xffffffffL) << 32 | _getIntLE(index + 4) & 0xffffffffL; } } @Override public CompositeByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.length); if (length == 0) { return this; } int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.getBytes(c.idx(index), dst, dstIndex, localLength); index += localLength; dstIndex += localLength; length -= localLength; i ++; } return this; } @Override public CompositeByteBuf getBytes(int index, ByteBuffer dst) { int limit = dst.limit(); int length = dst.remaining(); checkIndex(index, length); if (length == 0) { return this; } int i = toComponentIndex0(index); try { while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); dst.limit(dst.position() + localLength); c.buf.getBytes(c.idx(index), dst); index += localLength; length -= localLength; i ++; } } finally { dst.limit(limit); } return this; } @Override public CompositeByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { checkDstIndex(index, length, dstIndex, dst.capacity()); if (length == 0) { return this; } int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.getBytes(c.idx(index), dst, dstIndex, localLength); index += localLength; dstIndex += localLength; length -= localLength; i ++; } return this; } @Override public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { int count = nioBufferCount(); if (count == 1) { return out.write(internalNioBuffer(index, length)); } else { long writtenBytes = out.write(nioBuffers(index, length)); if (writtenBytes > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } else { return (int) writtenBytes; } } } @Override public int getBytes(int index, FileChannel out, long position, int length) throws IOException { int count = nioBufferCount(); if (count == 1) { return out.write(internalNioBuffer(index, length), position); } else { long writtenBytes = 0; for (ByteBuffer buf : nioBuffers(index, length)) { writtenBytes += out.write(buf, position + writtenBytes); } if (writtenBytes > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int) writtenBytes; } } @Override public CompositeByteBuf getBytes(int index, OutputStream out, int length) throws IOException { checkIndex(index, length); if (length == 0) { return this; } int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.getBytes(c.idx(index), out, localLength); index += localLength; length -= localLength; i ++; } return this; } @Override public CompositeByteBuf setByte(int index, int value) { Component c = findComponent(index); c.buf.setByte(c.idx(index), value); return this; } @Override protected void _setByte(int index, int value) { Component c = findComponent0(index); c.buf.setByte(c.idx(index), value); } @Override public CompositeByteBuf setShort(int index, int value) { checkIndex(index, 2); _setShort(index, value); return this; } @Override protected void _setShort(int index, int value) { Component c = findComponent0(index); if (index + 2 <= c.endOffset) { c.buf.setShort(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setByte(index, (byte) (value >>> 8)); _setByte(index + 1, (byte) value); } else { _setByte(index, (byte) value); _setByte(index + 1, (byte) (value >>> 8)); } } @Override protected void _setShortLE(int index, int value) { Component c = findComponent0(index); if (index + 2 <= c.endOffset) { c.buf.setShortLE(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setByte(index, (byte) value); _setByte(index + 1, (byte) (value >>> 8)); } else { _setByte(index, (byte) (value >>> 8)); _setByte(index + 1, (byte) value); } } @Override public CompositeByteBuf setMedium(int index, int value) { checkIndex(index, 3); _setMedium(index, value); return this; } @Override protected void _setMedium(int index, int value) { Component c = findComponent0(index); if (index + 3 <= c.endOffset) { c.buf.setMedium(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setShort(index, (short) (value >> 8)); _setByte(index + 2, (byte) value); } else { _setShort(index, (short) value); _setByte(index + 2, (byte) (value >>> 16)); } } @Override protected void _setMediumLE(int index, int value) { Component c = findComponent0(index); if (index + 3 <= c.endOffset) { c.buf.setMediumLE(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setShortLE(index, (short) value); _setByte(index + 2, (byte) (value >>> 16)); } else { _setShortLE(index, (short) (value >> 8)); _setByte(index + 2, (byte) value); } } @Override public CompositeByteBuf setInt(int index, int value) { checkIndex(index, 4); _setInt(index, value); return this; } @Override protected void _setInt(int index, int value) { Component c = findComponent0(index); if (index + 4 <= c.endOffset) { c.buf.setInt(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setShort(index, (short) (value >>> 16)); _setShort(index + 2, (short) value); } else { _setShort(index, (short) value); _setShort(index + 2, (short) (value >>> 16)); } } @Override protected void _setIntLE(int index, int value) { Component c = findComponent0(index); if (index + 4 <= c.endOffset) { c.buf.setIntLE(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setShortLE(index, (short) value); _setShortLE(index + 2, (short) (value >>> 16)); } else { _setShortLE(index, (short) (value >>> 16)); _setShortLE(index + 2, (short) value); } } @Override public CompositeByteBuf setLong(int index, long value) { checkIndex(index, 8); _setLong(index, value); return this; } @Override protected void _setLong(int index, long value) { Component c = findComponent0(index); if (index + 8 <= c.endOffset) { c.buf.setLong(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setInt(index, (int) (value >>> 32)); _setInt(index + 4, (int) value); } else { _setInt(index, (int) value); _setInt(index + 4, (int) (value >>> 32)); } } @Override protected void _setLongLE(int index, long value) { Component c = findComponent0(index); if (index + 8 <= c.endOffset) { c.buf.setLongLE(c.idx(index), value); } else if (order() == ByteOrder.BIG_ENDIAN) { _setIntLE(index, (int) value); _setIntLE(index + 4, (int) (value >>> 32)); } else { _setIntLE(index, (int) (value >>> 32)); _setIntLE(index + 4, (int) value); } } @Override public CompositeByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.length); if (length == 0) { return this; } int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.setBytes(c.idx(index), src, srcIndex, localLength); index += localLength; srcIndex += localLength; length -= localLength; i ++; } return this; } @Override public CompositeByteBuf setBytes(int index, ByteBuffer src) { int limit = src.limit(); int length = src.remaining(); checkIndex(index, length); if (length == 0) { return this; } int i = toComponentIndex0(index); try { while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); src.limit(src.position() + localLength); c.buf.setBytes(c.idx(index), src); index += localLength; length -= localLength; i ++; } } finally { src.limit(limit); } return this; } @Override public CompositeByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { checkSrcIndex(index, length, srcIndex, src.capacity()); if (length == 0) { return this; } int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.setBytes(c.idx(index), src, srcIndex, localLength); index += localLength; srcIndex += localLength; length -= localLength; i ++; } return this; } @Override public int setBytes(int index, InputStream in, int length) throws IOException { checkIndex(index, length); if (length == 0) { return in.read(EmptyArrays.EMPTY_BYTES); } int i = toComponentIndex0(index); int readBytes = 0; do { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); if (localLength == 0) { // Skip empty buffer i++; continue; } int localReadBytes = c.buf.setBytes(c.idx(index), in, localLength); if (localReadBytes < 0) { if (readBytes == 0) { return -1; } else { break; } } index += localReadBytes; length -= localReadBytes; readBytes += localReadBytes; if (localReadBytes == localLength) { i ++; } } while (length > 0); return readBytes; } @Override public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { checkIndex(index, length); if (length == 0) { return in.read(EMPTY_NIO_BUFFER); } int i = toComponentIndex0(index); int readBytes = 0; do { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); if (localLength == 0) { // Skip empty buffer i++; continue; } int localReadBytes = c.buf.setBytes(c.idx(index), in, localLength); if (localReadBytes == 0) { break; } if (localReadBytes < 0) { if (readBytes == 0) { return -1; } else { break; } } index += localReadBytes; length -= localReadBytes; readBytes += localReadBytes; if (localReadBytes == localLength) { i ++; } } while (length > 0); return readBytes; } @Override public int setBytes(int index, FileChannel in, long position, int length) throws IOException { checkIndex(index, length); if (length == 0) { return in.read(EMPTY_NIO_BUFFER, position); } int i = toComponentIndex0(index); int readBytes = 0; do { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); if (localLength == 0) { // Skip empty buffer i++; continue; } int localReadBytes = c.buf.setBytes(c.idx(index), in, position + readBytes, localLength); if (localReadBytes == 0) { break; } if (localReadBytes < 0) { if (readBytes == 0) { return -1; } else { break; } } index += localReadBytes; length -= localReadBytes; readBytes += localReadBytes; if (localReadBytes == localLength) { i ++; } } while (length > 0); return readBytes; } @Override public ByteBuf copy(int index, int length) { checkIndex(index, length); ByteBuf dst = allocBuffer(length); if (length != 0) { copyTo(index, length, toComponentIndex0(index), dst); } return dst; } private void copyTo(int index, int length, int componentId, ByteBuf dst) { int dstIndex = 0; int i = componentId; while (length > 0) { Component c = components[i]; int localLength = Math.min(length, c.endOffset - index); c.buf.getBytes(c.idx(index), dst, dstIndex, localLength); index += localLength; dstIndex += localLength; length -= localLength; i ++; } dst.writerIndex(dst.capacity()); } /** * Return the {@link ByteBuf} on the specified index * * @param cIndex the index for which the {@link ByteBuf} should be returned * @return buf the {@link ByteBuf} on the specified index */ public ByteBuf component(int cIndex) { checkComponentIndex(cIndex); return components[cIndex].duplicate(); } /** * Return the {@link ByteBuf} on the specified index * * @param offset the offset for which the {@link ByteBuf} should be returned * @return the {@link ByteBuf} on the specified index */ public ByteBuf componentAtOffset(int offset) { return findComponent(offset).duplicate(); } /** * Return the internal {@link ByteBuf} on the specified index. Note that updating the indexes of the returned * buffer will lead to an undefined behavior of this buffer. * * @param cIndex the index for which the {@link ByteBuf} should be returned */ public ByteBuf internalComponent(int cIndex) { checkComponentIndex(cIndex); return components[cIndex].slice(); } /** * Return the internal {@link ByteBuf} on the specified offset. Note that updating the indexes of the returned * buffer will lead to an undefined behavior of this buffer. * * @param offset the offset for which the {@link ByteBuf} should be returned */ public ByteBuf internalComponentAtOffset(int offset) { return findComponent(offset).slice(); } // weak cache - check it first when looking for component private Component lastAccessed; private Component findComponent(int offset) { Component la = lastAccessed; if (la != null && offset >= la.offset && offset < la.endOffset) { ensureAccessible(); return la; } checkIndex(offset); return findIt(offset); } private Component findComponent0(int offset) { Component la = lastAccessed; if (la != null && offset >= la.offset && offset < la.endOffset) { return la; } return findIt(offset); } private Component findIt(int offset) { for (int low = 0, high = componentCount; low <= high;) { int mid = low + high >>> 1; Component c = components[mid]; if (c == null) { throw new IllegalStateException("No component found for offset. " + "Composite buffer layout might be outdated, e.g. from a discardReadBytes call."); } if (offset >= c.endOffset) { low = mid + 1; } else if (offset < c.offset) { high = mid - 1; } else { lastAccessed = c; return c; } } throw new Error("should not reach here"); } @Override public int nioBufferCount() { int size = componentCount; switch (size) { case 0: return 1; case 1: return components[0].buf.nioBufferCount(); default: int count = 0; for (int i = 0; i < size; i++) { count += components[i].buf.nioBufferCount(); } return count; } } @Override public ByteBuffer internalNioBuffer(int index, int length) { switch (componentCount) { case 0: return EMPTY_NIO_BUFFER; case 1: return components[0].internalNioBuffer(index, length); default: throw new UnsupportedOperationException(); } } @Override public ByteBuffer nioBuffer(int index, int length) { checkIndex(index, length); switch (componentCount) { case 0: return EMPTY_NIO_BUFFER; case 1: Component c = components[0]; ByteBuf buf = c.buf; if (buf.nioBufferCount() == 1) { return buf.nioBuffer(c.idx(index), length); } break; default: break; } ByteBuffer[] buffers = nioBuffers(index, length); if (buffers.length == 1) { return buffers[0]; } ByteBuffer merged = ByteBuffer.allocate(length).order(order()); for (ByteBuffer buf: buffers) { merged.put(buf); } merged.flip(); return merged; } @Override public ByteBuffer[] nioBuffers(int index, int length) { checkIndex(index, length); if (length == 0) { return new ByteBuffer[] { EMPTY_NIO_BUFFER }; } RecyclableArrayList buffers = RecyclableArrayList.newInstance(componentCount); try { int i = toComponentIndex0(index); while (length > 0) { Component c = components[i]; ByteBuf s = c.buf; int localLength = Math.min(length, c.endOffset - index); switch (s.nioBufferCount()) { case 0: throw new UnsupportedOperationException(); case 1: buffers.add(s.nioBuffer(c.idx(index), localLength)); break; default: Collections.addAll(buffers, s.nioBuffers(c.idx(index), localLength)); } index += localLength; length -= localLength; i ++; } return buffers.toArray(EmptyArrays.EMPTY_BYTE_BUFFERS); } finally { buffers.recycle(); } } /** * Consolidate the composed {@link ByteBuf}s */ public CompositeByteBuf consolidate() { ensureAccessible(); consolidate0(0, componentCount); return this; } /** * Consolidate the composed {@link ByteBuf}s * * @param cIndex the index on which to start to compose * @param numComponents the number of components to compose */ public CompositeByteBuf consolidate(int cIndex, int numComponents) { checkComponentIndex(cIndex, numComponents); consolidate0(cIndex, numComponents); return this; } private void consolidate0(int cIndex, int numComponents) { if (numComponents <= 1) { return; } final int endCIndex = cIndex + numComponents; final int startOffset = cIndex != 0 ? components[cIndex].offset : 0; final int capacity = components[endCIndex - 1].endOffset - startOffset; final ByteBuf consolidated = allocBuffer(capacity); for (int i = cIndex; i < endCIndex; i ++) { components[i].transferTo(consolidated); } lastAccessed = null; removeCompRange(cIndex + 1, endCIndex); components[cIndex] = newComponent(consolidated, 0); if (cIndex != 0 || numComponents != componentCount) { updateComponentOffsets(cIndex); } } /** * Discard all {@link ByteBuf}s which are read. */ public CompositeByteBuf discardReadComponents() { ensureAccessible(); final int readerIndex = readerIndex(); if (readerIndex == 0) { return this; } // Discard everything if (readerIndex = writerIndex = capacity). int writerIndex = writerIndex(); if (readerIndex == writerIndex && writerIndex == capacity()) { for (int i = 0, size = componentCount; i < size; i++) { components[i].free(); } lastAccessed = null; clearComps(); setIndex(0, 0); adjustMarkers(readerIndex); return this; } // Remove read components. int firstComponentId = 0; Component c = null; for (int size = componentCount; firstComponentId < size; firstComponentId++) { c = components[firstComponentId]; if (c.endOffset > readerIndex) { break; } c.free(); } if (firstComponentId == 0) { return this; // Nothing to discard } Component la = lastAccessed; if (la != null && la.endOffset <= readerIndex) { lastAccessed = null; } removeCompRange(0, firstComponentId); // Update indexes and markers. int offset = c.offset; updateComponentOffsets(0); setIndex(readerIndex - offset, writerIndex - offset); adjustMarkers(offset); return this; } @Override public CompositeByteBuf discardReadBytes() { ensureAccessible(); final int readerIndex = readerIndex(); if (readerIndex == 0) { return this; } // Discard everything if (readerIndex = writerIndex = capacity). int writerIndex = writerIndex(); if (readerIndex == writerIndex && writerIndex == capacity()) { for (int i = 0, size = componentCount; i < size; i++) { components[i].free(); } lastAccessed = null; clearComps(); setIndex(0, 0); adjustMarkers(readerIndex); return this; } int firstComponentId = 0; Component c = null; for (int size = componentCount; firstComponentId < size; firstComponentId++) { c = components[firstComponentId]; if (c.endOffset > readerIndex) { break; } c.free(); } // Replace the first readable component with a new slice. int trimmedBytes = readerIndex - c.offset; c.offset = 0; c.endOffset -= readerIndex; c.srcAdjustment += readerIndex; c.adjustment += readerIndex; ByteBuf slice = c.slice; if (slice != null) { // We must replace the cached slice with a derived one to ensure that // it can later be released properly in the case of PooledSlicedByteBuf. c.slice = slice.slice(trimmedBytes, c.length()); } Component la = lastAccessed; if (la != null && la.endOffset <= readerIndex) { lastAccessed = null; } removeCompRange(0, firstComponentId); // Update indexes and markers. updateComponentOffsets(0); setIndex(0, writerIndex - readerIndex); adjustMarkers(readerIndex); return this; } private ByteBuf allocBuffer(int capacity) { return direct ? alloc().directBuffer(capacity) : alloc().heapBuffer(capacity); } @Override public String toString() { String result = super.toString(); result = result.substring(0, result.length() - 1); return result + ", components=" + componentCount + ')'; } private static final class Component { final ByteBuf srcBuf; // the originally added buffer final ByteBuf buf; // srcBuf unwrapped zero or more times int srcAdjustment; // index of the start of this CompositeByteBuf relative to srcBuf int adjustment; // index of the start of this CompositeByteBuf relative to buf int offset; // offset of this component within this CompositeByteBuf int endOffset; // end offset of this component within this CompositeByteBuf private ByteBuf slice; // cached slice, may be null Component(ByteBuf srcBuf, int srcOffset, ByteBuf buf, int bufOffset, int offset, int len, ByteBuf slice) { this.srcBuf = srcBuf; this.srcAdjustment = srcOffset - offset; this.buf = buf; this.adjustment = bufOffset - offset; this.offset = offset; this.endOffset = offset + len; this.slice = slice; } int srcIdx(int index) { return index + srcAdjustment; } int idx(int index) { return index + adjustment; } int length() { return endOffset - offset; } void reposition(int newOffset) { int move = newOffset - offset; endOffset += move; srcAdjustment -= move; adjustment -= move; offset = newOffset; } // copy then release void transferTo(ByteBuf dst) { dst.writeBytes(buf, idx(offset), length()); free(); } ByteBuf slice() { ByteBuf s = slice; if (s == null) { slice = s = srcBuf.slice(srcIdx(offset), length()); } return s; } ByteBuf duplicate() { return srcBuf.duplicate(); } ByteBuffer internalNioBuffer(int index, int length) { // Some buffers override this so we must use srcBuf return srcBuf.internalNioBuffer(srcIdx(index), length); } void free() { slice = null; // Release the original buffer since it may have a different // refcount to the unwrapped buf (e.g. if PooledSlicedByteBuf) srcBuf.release(); } } @Override public CompositeByteBuf readerIndex(int readerIndex) { super.readerIndex(readerIndex); return this; } @Override public CompositeByteBuf writerIndex(int writerIndex) { super.writerIndex(writerIndex); return this; } @Override public CompositeByteBuf setIndex(int readerIndex, int writerIndex) { super.setIndex(readerIndex, writerIndex); return this; } @Override public CompositeByteBuf clear() { super.clear(); return this; } @Override public CompositeByteBuf markReaderIndex() { super.markReaderIndex(); return this; } @Override public CompositeByteBuf resetReaderIndex() { super.resetReaderIndex(); return this; } @Override public CompositeByteBuf markWriterIndex() { super.markWriterIndex(); return this; } @Override public CompositeByteBuf resetWriterIndex() { super.resetWriterIndex(); return this; } @Override public CompositeByteBuf ensureWritable(int minWritableBytes) { super.ensureWritable(minWritableBytes); return this; } @Override public CompositeByteBuf getBytes(int index, ByteBuf dst) { return getBytes(index, dst, dst.writableBytes()); } @Override public CompositeByteBuf getBytes(int index, ByteBuf dst, int length) { getBytes(index, dst, dst.writerIndex(), length); dst.writerIndex(dst.writerIndex() + length); return this; } @Override public CompositeByteBuf getBytes(int index, byte[] dst) { return getBytes(index, dst, 0, dst.length); } @Override public CompositeByteBuf setBoolean(int index, boolean value) { return setByte(index, value? 1 : 0); } @Override public CompositeByteBuf setChar(int index, int value) { return setShort(index, value); } @Override public CompositeByteBuf setFloat(int index, float value) { return setInt(index, Float.floatToRawIntBits(value)); } @Override public CompositeByteBuf setDouble(int index, double value) { return setLong(index, Double.doubleToRawLongBits(value)); } @Override public CompositeByteBuf setBytes(int index, ByteBuf src) { super.setBytes(index, src, src.readableBytes()); return this; } @Override public CompositeByteBuf setBytes(int index, ByteBuf src, int length) { super.setBytes(index, src, length); return this; } @Override public CompositeByteBuf setBytes(int index, byte[] src) { return setBytes(index, src, 0, src.length); } @Override public CompositeByteBuf setZero(int index, int length) { super.setZero(index, length); return this; } @Override public CompositeByteBuf readBytes(ByteBuf dst) { super.readBytes(dst, dst.writableBytes()); return this; } @Override public CompositeByteBuf readBytes(ByteBuf dst, int length) { super.readBytes(dst, length); return this; } @Override public CompositeByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { super.readBytes(dst, dstIndex, length); return this; } @Override public CompositeByteBuf readBytes(byte[] dst) { super.readBytes(dst, 0, dst.length); return this; } @Override public CompositeByteBuf readBytes(byte[] dst, int dstIndex, int length) { super.readBytes(dst, dstIndex, length); return this; } @Override public CompositeByteBuf readBytes(ByteBuffer dst) { super.readBytes(dst); return this; } @Override public CompositeByteBuf readBytes(OutputStream out, int length) throws IOException { super.readBytes(out, length); return this; } @Override public CompositeByteBuf skipBytes(int length) { super.skipBytes(length); return this; } @Override public CompositeByteBuf writeBoolean(boolean value) { writeByte(value ? 1 : 0); return this; } @Override public CompositeByteBuf writeByte(int value) { ensureWritable0(1); _setByte(writerIndex++, value); return this; } @Override public CompositeByteBuf writeShort(int value) { super.writeShort(value); return this; } @Override public CompositeByteBuf writeMedium(int value) { super.writeMedium(value); return this; } @Override public CompositeByteBuf writeInt(int value) { super.writeInt(value); return this; } @Override public CompositeByteBuf writeLong(long value) { super.writeLong(value); return this; } @Override public CompositeByteBuf writeChar(int value) { super.writeShort(value); return this; } @Override public CompositeByteBuf writeFloat(float value) { super.writeInt(Float.floatToRawIntBits(value)); return this; } @Override public CompositeByteBuf writeDouble(double value) { super.writeLong(Double.doubleToRawLongBits(value)); return this; } @Override public CompositeByteBuf writeBytes(ByteBuf src) { super.writeBytes(src, src.readableBytes()); return this; } @Override public CompositeByteBuf writeBytes(ByteBuf src, int length) { super.writeBytes(src, length); return this; } @Override public CompositeByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { super.writeBytes(src, srcIndex, length); return this; } @Override public CompositeByteBuf writeBytes(byte[] src) { super.writeBytes(src, 0, src.length); return this; } @Override public CompositeByteBuf writeBytes(byte[] src, int srcIndex, int length) { super.writeBytes(src, srcIndex, length); return this; } @Override public CompositeByteBuf writeBytes(ByteBuffer src) { super.writeBytes(src); return this; } @Override public CompositeByteBuf writeZero(int length) { super.writeZero(length); return this; } @Override public CompositeByteBuf retain(int increment) { super.retain(increment); return this; } @Override public CompositeByteBuf retain() { super.retain(); return this; } @Override public CompositeByteBuf touch() { return this; } @Override public CompositeByteBuf touch(Object hint) { return this; } @Override public ByteBuffer[] nioBuffers() { return nioBuffers(readerIndex(), readableBytes()); } @Override public CompositeByteBuf discardSomeReadBytes() { return discardReadComponents(); } @Override protected void deallocate() { if (freed) { return; } freed = true; // We're not using foreach to avoid creating an iterator. // see https://github.com/netty/netty/issues/2642 for (int i = 0, size = componentCount; i < size; i++) { components[i].free(); } } @Override boolean isAccessible() { return !freed; } @Override public ByteBuf unwrap() { return null; } private final class CompositeByteBufIterator implements Iterator<ByteBuf> { private final int size = numComponents(); private int index; @Override public boolean hasNext() { return size > index; } @Override public ByteBuf next() { if (size != numComponents()) { throw new ConcurrentModificationException(); } if (!hasNext()) { throw new NoSuchElementException(); } try { return components[index++].slice(); } catch (IndexOutOfBoundsException e) { throw new ConcurrentModificationException(); } } @Override public void remove() { throw new UnsupportedOperationException("Read-Only"); } } // Component array manipulation - range checking omitted private void clearComps() { removeCompRange(0, componentCount); } private void removeComp(int i) { removeCompRange(i, i + 1); } private void removeCompRange(int from, int to) { if (from >= to) { return; } final int size = componentCount; assert from >= 0 && to <= size; if (to < size) { System.arraycopy(components, to, components, from, size - to); } int newSize = size - to + from; for (int i = newSize; i < size; i++) { components[i] = null; } componentCount = newSize; } private void addComp(int i, Component c) { shiftComps(i, 1); components[i] = c; } private void shiftComps(int i, int count) { final int size = componentCount, newSize = size + count; assert i >= 0 && i <= size && count > 0; if (newSize > components.length) { // grow the array int newArrSize = Math.max(size + (size >> 1), newSize); Component[] newArr; if (i == size) { newArr = Arrays.copyOf(components, newArrSize, Component[].class); } else { newArr = new Component[newArrSize]; if (i > 0) { System.arraycopy(components, 0, newArr, 0, i); } if (i < size) { System.arraycopy(components, i, newArr, i + count, size - i); } } components = newArr; } else if (i < size) { System.arraycopy(components, i, components, i + count, size - i); } componentCount = newSize; } }
netty/netty
buffer/src/main/java/io/netty/buffer/CompositeByteBuf.java
931
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, Stephen Connolly, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import static hudson.init.InitMilestone.COMPLETED; import static hudson.init.InitMilestone.PLUGINS_LISTED; import static hudson.init.InitMilestone.PLUGINS_PREPARED; import static hudson.init.InitMilestone.PLUGINS_STARTED; import static java.util.logging.Level.FINE; import static java.util.logging.Level.INFO; import static java.util.logging.Level.WARNING; import static java.util.stream.Collectors.toList; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.PluginWrapper.Dependency; import hudson.init.InitMilestone; import hudson.init.InitStrategy; import hudson.init.InitializerFinder; import hudson.lifecycle.Lifecycle; import hudson.model.AbstractItem; import hudson.model.AbstractModelObject; import hudson.model.AdministrativeMonitor; import hudson.model.Api; import hudson.model.Descriptor; import hudson.model.DownloadService; import hudson.model.Failure; import hudson.model.ItemGroupMixIn; import hudson.model.UpdateCenter; import hudson.model.UpdateCenter.DownloadJob; import hudson.model.UpdateCenter.InstallationJob; import hudson.model.UpdateSite; import hudson.security.ACL; import hudson.security.ACLContext; import hudson.security.Permission; import hudson.security.PermissionScope; import hudson.util.CyclicGraphDetector; import hudson.util.CyclicGraphDetector.CycleDetectedException; import hudson.util.FormValidation; import hudson.util.PersistedList; import hudson.util.Retrier; import hudson.util.Service; import hudson.util.VersionNumber; import hudson.util.XStream2; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.lang.reflect.Method; import java.net.JarURLConnection; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.net.URLConnection; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Paths; import java.nio.file.attribute.FileTime; import java.security.CodeSource; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.ServiceLoader; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Future; import java.util.function.Function; import java.util.function.Supplier; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.xml.XMLConstants; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParserFactory; import jenkins.ClassLoaderReflectionToolkit; import jenkins.ExtensionRefreshException; import jenkins.InitReactorRunner; import jenkins.MissingDependencyException; import jenkins.RestartRequiredException; import jenkins.YesNoMaybe; import jenkins.install.InstallState; import jenkins.install.InstallUtil; import jenkins.model.Jenkins; import jenkins.plugins.DetachedPluginsUtil; import jenkins.security.CustomClassFilter; import jenkins.util.SystemProperties; import jenkins.util.io.OnMaster; import jenkins.util.xml.RestrictiveEntityResolver; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.FileUploadException; import org.apache.commons.fileupload.disk.DiskFileItemFactory; import org.apache.commons.fileupload.servlet.ServletFileUpload; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.LogFactory; import org.jenkinsci.Symbol; import org.jvnet.hudson.reactor.Executable; import org.jvnet.hudson.reactor.Reactor; import org.jvnet.hudson.reactor.TaskBuilder; import org.jvnet.hudson.reactor.TaskGraphBuilder; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.DoNotUse; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.HttpRedirect; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.HttpResponses; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerFallback; import org.kohsuke.stapler.StaplerOverridable; import org.kohsuke.stapler.StaplerProxy; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; import org.kohsuke.stapler.interceptor.RequirePOST; import org.kohsuke.stapler.verb.POST; import org.springframework.security.core.Authentication; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * Manages {@link PluginWrapper}s. * * <p> * <b>Setting default Plugin Managers</b>. The default plugin manager in {@code Jenkins} can be replaced by defining a * System Property ({@code hudson.PluginManager.className}). See {@link #createDefault(Jenkins)}. * This className should be available on early startup, so it cannot come only from a library * (e.g. Jenkins module or Extra library dependency in the WAR file project). * Plugins cannot be used for such purpose. * In order to be correctly instantiated, the class definition must have at least one constructor with the same * signature as the following ones: * <ol> * <li>{@link LocalPluginManager#LocalPluginManager(Jenkins)} </li> * <li>{@link LocalPluginManager#LocalPluginManager(ServletContext, File)} </li> * <li>{@link LocalPluginManager#LocalPluginManager(File)} </li> * </ol> * Constructors are searched in the order provided above and only the first found suitable constructor is * tried to build an instance. In the last two cases the {@link File} argument refers to the <i>Jenkins home directory</i>. * * @author Kohsuke Kawaguchi */ @ExportedBean public abstract class PluginManager extends AbstractModelObject implements OnMaster, StaplerOverridable, StaplerProxy { /** Custom plugin manager system property or context param. */ public static final String CUSTOM_PLUGIN_MANAGER = PluginManager.class.getName() + ".className"; private static final Logger LOGGER = Logger.getLogger(PluginManager.class.getName()); /** * Time elapsed between retries to check the updates sites. It's kind of constant, but let it so for tests */ /* private final */ static int CHECK_UPDATE_SLEEP_TIME_MILLIS; /** * Number of attempts to check the updates sites. It's kind of constant, but let it so for tests */ /* private final */ static int CHECK_UPDATE_ATTEMPTS; static { try { // Secure initialization CHECK_UPDATE_SLEEP_TIME_MILLIS = SystemProperties.getInteger(PluginManager.class.getName() + ".checkUpdateSleepTimeMillis", 1000); CHECK_UPDATE_ATTEMPTS = SystemProperties.getInteger(PluginManager.class.getName() + ".checkUpdateAttempts", 1); } catch (RuntimeException e) { LOGGER.warning(String.format("There was an error initializing the PluginManager. Exception: %s", e)); } finally { CHECK_UPDATE_ATTEMPTS = CHECK_UPDATE_ATTEMPTS > 0 ? CHECK_UPDATE_ATTEMPTS : 1; CHECK_UPDATE_SLEEP_TIME_MILLIS = CHECK_UPDATE_SLEEP_TIME_MILLIS > 0 ? CHECK_UPDATE_SLEEP_TIME_MILLIS : 1000; } } /** Accepted constructors for custom plugin manager, in the order they are tried. */ private enum PMConstructor { JENKINS { @Override @NonNull PluginManager doCreate(@NonNull Class<? extends PluginManager> klass, @NonNull Jenkins jenkins) throws ReflectiveOperationException { return klass.getConstructor(Jenkins.class).newInstance(jenkins); } }, SC_FILE { @Override @NonNull PluginManager doCreate(@NonNull Class<? extends PluginManager> klass, @NonNull Jenkins jenkins) throws ReflectiveOperationException { return klass.getConstructor(ServletContext.class, File.class).newInstance(jenkins.servletContext, jenkins.getRootDir()); } }, FILE { @Override @NonNull PluginManager doCreate(@NonNull Class<? extends PluginManager> klass, @NonNull Jenkins jenkins) throws ReflectiveOperationException { return klass.getConstructor(File.class).newInstance(jenkins.getRootDir()); } }; final @CheckForNull PluginManager create(@NonNull Class<? extends PluginManager> klass, @NonNull Jenkins jenkins) throws ReflectiveOperationException { try { return doCreate(klass, jenkins); } catch (NoSuchMethodException e) { // Constructor not found. Will try the remaining ones. return null; } } abstract @NonNull PluginManager doCreate(@NonNull Class<? extends PluginManager> klass, @NonNull Jenkins jenkins) throws ReflectiveOperationException; } /** * Creates the {@link PluginManager} to use if no one is provided to a {@link Jenkins} object. * This method will be called after creation of {@link Jenkins} object, but before it is fully initialized. * @param jenkins Jenkins Instance. * @return Plugin manager to use. If no custom class is configured or in case of any error, the default * {@link LocalPluginManager} is returned. */ public static @NonNull PluginManager createDefault(@NonNull Jenkins jenkins) { String pmClassName = SystemProperties.getString(CUSTOM_PLUGIN_MANAGER); if (pmClassName != null && !pmClassName.isBlank()) { LOGGER.log(FINE, String.format("Use of custom plugin manager [%s] requested.", pmClassName)); try { final Class<? extends PluginManager> klass = Class.forName(pmClassName).asSubclass(PluginManager.class); // Iteration is in declaration order for (PMConstructor c : PMConstructor.values()) { PluginManager pm = c.create(klass, jenkins); if (pm != null) { return pm; } } LOGGER.log(WARNING, String.format("Provided custom plugin manager [%s] does not provide any of the suitable constructors. Using default.", pmClassName)); } catch (ClassCastException e) { LOGGER.log(WARNING, String.format("Provided class [%s] does not extend PluginManager. Using default.", pmClassName)); } catch (Exception e) { LOGGER.log(WARNING, String.format("Unable to instantiate custom plugin manager [%s]. Using default.", pmClassName), e); } } return new LocalPluginManager(jenkins); } /** * All discovered plugins. */ protected final List<PluginWrapper> plugins = new CopyOnWriteArrayList<>(); /** * All active plugins, topologically sorted so that when X depends on Y, Y appears in the list before X does. */ protected final List<PluginWrapper> activePlugins = new CopyOnWriteArrayList<>(); protected final List<FailedPlugin> failedPlugins = new ArrayList<>(); /** * Plug-in root directory. */ public final File rootDir; /** * Hold the status of the last try to check update centers. Consumed from the check.jelly to show an * error message if the last attempt failed. */ private String lastErrorCheckUpdateCenters = null; /** * If non-null, the base directory for all exploded .hpi/.jpi plugins. Controlled by the system property / servlet * context parameter {@literal hudson.PluginManager.workDir}. */ @CheckForNull private final File workDir; /** * @deprecated as of 1.355 * {@link PluginManager} can now live longer than {@link jenkins.model.Jenkins} instance, so * use {@code Hudson.getInstance().servletContext} instead. */ @Deprecated public final ServletContext context; /** * {@link ClassLoader} that can load all the publicly visible classes from plugins * (and including the classloader that loads Hudson itself.) * */ // implementation is minimal --- just enough to run XStream // and load plugin-contributed classes. public final ClassLoader uberClassLoader = new UberClassLoader(activePlugins); /** * Once plugin is uploaded, this flag becomes true. * This is used to report a message that Jenkins needs to be restarted * for new plugins to take effect. */ public volatile boolean pluginUploaded = false; /** * The initialization of {@link PluginManager} splits into two parts; * one is the part about listing them, extracting them, and preparing classloader for them. * The 2nd part is about creating instances. Once the former completes this flags become true, * as the 2nd part can be repeated for each Hudson instance. */ private boolean pluginListed = false; /** * Strategy for creating and initializing plugins */ private final PluginStrategy strategy; protected PluginManager(ServletContext context, File rootDir) { this.context = context; this.rootDir = rootDir; try { Util.createDirectories(rootDir.toPath()); } catch (IOException e) { throw new UncheckedIOException(e); } String workDir = SystemProperties.getString(PluginManager.class.getName() + ".workDir"); this.workDir = workDir == null || workDir.isBlank() ? null : new File(workDir); strategy = createPluginStrategy(); } public Api getApi() { Jenkins.get().checkPermission(Jenkins.SYSTEM_READ); return new Api(this); } /** * If non-null, the base directory for all exploded .hpi/.jpi plugins. * @return the base directory for all exploded .hpi/.jpi plugins or {@code null} to leave this up to the strategy. */ @CheckForNull public File getWorkDir() { return workDir; } /** * Find all registered overrides (intended to allow overriding/adding views) * @return List of extensions * @since 1.627 */ @Override public Collection<PluginManagerStaplerOverride> getOverrides() { return PluginManagerStaplerOverride.all(); } /** * Called immediately after the construction. * This is a separate method so that code executed from here will see a valid value in * {@link jenkins.model.Jenkins#pluginManager}. */ public TaskBuilder initTasks(final InitStrategy initStrategy) { TaskBuilder builder; if (!pluginListed) { builder = new TaskGraphBuilder() { List<File> archives; Collection<String> bundledPlugins; { Handle loadBundledPlugins = add("Loading bundled plugins", new Executable() { @Override public void run(Reactor session) throws Exception { bundledPlugins = loadBundledPlugins(); } }); Handle listUpPlugins = requires(loadBundledPlugins).add("Listing up plugins", new Executable() { @Override public void run(Reactor session) throws Exception { archives = initStrategy.listPluginArchives(PluginManager.this); } }); requires(listUpPlugins).attains(PLUGINS_LISTED).add("Preparing plugins", new Executable() { @Override public void run(Reactor session) throws Exception { // once we've listed plugins, we can fill in the reactor with plugin-specific initialization tasks TaskGraphBuilder g = new TaskGraphBuilder(); final Map<String, File> inspectedShortNames = new HashMap<>(); for (final File arc : archives) { g.followedBy().notFatal().attains(PLUGINS_LISTED).add("Inspecting plugin " + arc, new Executable() { @Override public void run(Reactor session1) throws Exception { try { PluginWrapper p = strategy.createPluginWrapper(arc); if (isDuplicate(p)) return; p.isBundled = containsHpiJpi(bundledPlugins, arc.getName()); plugins.add(p); } catch (IOException e) { failedPlugins.add(new FailedPlugin(arc.getName(), e)); throw e; } } /** * Inspects duplication. this happens when you run hpi:run on a bundled plugin, * as well as putting numbered jpi files, like "cobertura-1.0.jpi" and "cobertura-1.1.jpi" */ private boolean isDuplicate(PluginWrapper p) { String shortName = p.getShortName(); if (inspectedShortNames.containsKey(shortName)) { LOGGER.info("Ignoring " + arc + " because " + inspectedShortNames.get(shortName) + " is already loaded"); return true; } inspectedShortNames.put(shortName, arc); return false; } }); } g.followedBy().attains(PLUGINS_LISTED).add("Checking cyclic dependencies", new Executable() { /** * Makes sure there's no cycle in dependencies. */ @Override public void run(Reactor reactor) throws Exception { try { CyclicGraphDetector<PluginWrapper> cgd = new CyclicGraphDetector<>() { @Override protected List<PluginWrapper> getEdges(PluginWrapper p) { List<PluginWrapper> next = new ArrayList<>(); addTo(p.getDependencies(), next); addTo(p.getOptionalDependencies(), next); return next; } private void addTo(List<Dependency> dependencies, List<PluginWrapper> r) { for (Dependency d : dependencies) { PluginWrapper p = getPlugin(d.shortName); if (p != null) r.add(p); } } @Override protected void reactOnCycle(PluginWrapper q, List<PluginWrapper> cycle) { LOGGER.log(Level.SEVERE, "found cycle in plugin dependencies: (root=" + q + ", deactivating all involved) " + cycle.stream().map(Object::toString).collect(Collectors.joining(" -> "))); for (PluginWrapper pluginWrapper : cycle) { pluginWrapper.setHasCycleDependency(true); failedPlugins.add(new FailedPlugin(pluginWrapper, new CycleDetectedException(cycle))); } } }; cgd.run(getPlugins()); // obtain topologically sorted list and overwrite the list for (PluginWrapper p : cgd.getSorted()) { if (p.isActive()) { activePlugins.add(p); ((UberClassLoader) uberClassLoader).clearCacheMisses(); } } } catch (CycleDetectedException e) { // TODO this should be impossible, since we override reactOnCycle to not throw the exception stop(); // disable all plugins since classloading from them can lead to StackOverflow throw e; // let Hudson fail } } }); session.addAll(g.discoverTasks(session)); pluginListed = true; // technically speaking this is still too early, as at this point tasks are merely scheduled, not necessarily executed. } }); } }; } else { builder = TaskBuilder.EMPTY_BUILDER; } final InitializerFinder initializerFinder = new InitializerFinder(uberClassLoader); // misc. stuff // lists up initialization tasks about loading plugins. return TaskBuilder.union(initializerFinder, // this scans @Initializer in the core once builder, new TaskGraphBuilder() {{ requires(PLUGINS_LISTED).attains(PLUGINS_PREPARED).add("Loading plugins", new Executable() { /** * Once the plugins are listed, schedule their initialization. */ @Override public void run(Reactor session) throws Exception { Jenkins.get().lookup.set(PluginInstanceStore.class, new PluginInstanceStore()); TaskGraphBuilder g = new TaskGraphBuilder(); // schedule execution of loading plugins for (final PluginWrapper p : activePlugins.toArray(new PluginWrapper[0])) { g.followedBy().notFatal().attains(PLUGINS_PREPARED).add(String.format("Loading plugin %s v%s (%s)", p.getLongName(), p.getVersion(), p.getShortName()), new Executable() { @Override public void run(Reactor session) throws Exception { try { p.resolvePluginDependencies(); strategy.load(p); } catch (MissingDependencyException e) { failedPlugins.add(new FailedPlugin(p, e)); activePlugins.remove(p); plugins.remove(p); p.releaseClassLoader(); LOGGER.log(Level.SEVERE, "Failed to install {0}: {1}", new Object[] { p.getShortName(), e.getMessage() }); } catch (IOException e) { failedPlugins.add(new FailedPlugin(p, e)); activePlugins.remove(p); plugins.remove(p); p.releaseClassLoader(); throw e; } } }); } // schedule execution of initializing plugins for (final PluginWrapper p : activePlugins.toArray(new PluginWrapper[0])) { g.followedBy().notFatal().attains(PLUGINS_STARTED).add("Initializing plugin " + p.getShortName(), new Executable() { @Override public void run(Reactor session) throws Exception { if (!activePlugins.contains(p)) { return; } try { p.getPluginOrFail().postInitialize(); } catch (Exception e) { failedPlugins.add(new FailedPlugin(p, e)); activePlugins.remove(p); plugins.remove(p); p.releaseClassLoader(); throw e; } } }); } g.followedBy().attains(PLUGINS_STARTED).add("Discovering plugin initialization tasks", new Executable() { @Override public void run(Reactor reactor) throws Exception { // rescan to find plugin-contributed @Initializer reactor.addAll(initializerFinder.discoverTasks(reactor)); } }); // register them all session.addAll(g.discoverTasks(session)); } }); // All plugins are loaded. Now we can figure out who depends on who. requires(PLUGINS_PREPARED).attains(COMPLETED).add("Resolving Dependent Plugins Graph", new Executable() { @Override public void run(Reactor reactor) throws Exception { resolveDependentPlugins(); } }); }}); } void considerDetachedPlugin(String shortName) { if (new File(rootDir, shortName + ".jpi").isFile() || new File(rootDir, shortName + ".hpi").isFile() || new File(rootDir, shortName + ".jpl").isFile() || new File(rootDir, shortName + ".hpl").isFile()) { LOGGER.fine(() -> "not considering loading a detached dependency " + shortName + " as it is already on disk"); return; } LOGGER.fine(() -> "considering loading a detached dependency " + shortName); for (String loadedFile : loadPluginsFromWar(getDetachedLocation(), (dir, name) -> normalisePluginName(name).equals(shortName))) { String loaded = normalisePluginName(loadedFile); File arc = new File(rootDir, loaded + ".jpi"); LOGGER.info(() -> "Loading a detached plugin as a dependency: " + arc); try { plugins.add(strategy.createPluginWrapper(arc)); } catch (IOException e) { failedPlugins.add(new FailedPlugin(arc.getName(), e)); } } } /** * Defines the location of the detached plugins in the WAR. * @return by default, {@code /WEB-INF/detached-plugins} * @since 2.377 */ protected @NonNull String getDetachedLocation() { return "/WEB-INF/detached-plugins"; } protected @NonNull Set<String> loadPluginsFromWar(@NonNull String fromPath) { return loadPluginsFromWar(fromPath, null); } //TODO: Consider refactoring in order to avoid DMI_COLLECTION_OF_URLS @SuppressFBWarnings(value = "DMI_COLLECTION_OF_URLS", justification = "Plugin loading happens only once on Jenkins startup") protected @NonNull Set<String> loadPluginsFromWar(@NonNull String fromPath, @CheckForNull FilenameFilter filter) { Set<String> names = new HashSet<>(); ServletContext context = Jenkins.get().servletContext; Set<String> plugins = Util.fixNull(context.getResourcePaths(fromPath)); Set<URL> copiedPlugins = new HashSet<>(); Set<URL> dependencies = new HashSet<>(); for (String pluginPath : plugins) { String fileName = pluginPath.substring(pluginPath.lastIndexOf('/') + 1); if (fileName.isEmpty()) { // see http://www.nabble.com/404-Not-Found-error-when-clicking-on-help-td24508544.html // I suspect some containers are returning directory names. continue; } try { URL url = context.getResource(pluginPath); if (filter != null && url != null) { if (!filter.accept(new File(url.getFile()).getParentFile(), fileName)) { continue; } } names.add(fileName); copyBundledPlugin(Objects.requireNonNull(url), fileName); copiedPlugins.add(url); try { addDependencies(url, fromPath, dependencies); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Failed to resolve dependencies for the bundled plugin " + fileName, e); } } catch (IOException e) { LOGGER.log(Level.SEVERE, "Failed to extract the bundled plugin " + fileName, e); } } // Copy dependencies. These are not detached plugins, but are required by them. for (URL dependency : dependencies) { if (copiedPlugins.contains(dependency)) { // Ignore. Already copied. continue; } String fileName = new File(dependency.getFile()).getName(); try { names.add(fileName); copyBundledPlugin(dependency, fileName); copiedPlugins.add(dependency); } catch (IOException e) { LOGGER.log(Level.SEVERE, "Failed to extract the bundled dependency plugin " + fileName, e); } } return names; } //TODO: Consider refactoring in order to avoid DMI_COLLECTION_OF_URLS @SuppressFBWarnings(value = "DMI_COLLECTION_OF_URLS", justification = "Plugin loading happens only once on Jenkins startup") protected static void addDependencies(URL hpiResUrl, String fromPath, Set<URL> dependencySet) throws URISyntaxException, MalformedURLException { if (dependencySet.contains(hpiResUrl)) { return; } Manifest manifest = parsePluginManifest(hpiResUrl); String dependencySpec = manifest.getMainAttributes().getValue("Plugin-Dependencies"); if (dependencySpec != null) { String[] dependencyTokens = dependencySpec.split(","); ServletContext context = Jenkins.get().servletContext; for (String dependencyToken : dependencyTokens) { if (dependencyToken.endsWith(";resolution:=optional")) { // ignore optional dependencies continue; } String[] artifactIdVersionPair = dependencyToken.split(":"); String artifactId = artifactIdVersionPair[0]; VersionNumber dependencyVersion = new VersionNumber(artifactIdVersionPair[1]); PluginManager manager = Jenkins.get().getPluginManager(); VersionNumber installedVersion = manager.getPluginVersion(manager.rootDir, artifactId); if (installedVersion != null && !installedVersion.isOlderThan(dependencyVersion)) { // Do not downgrade dependencies that are already installed. continue; } URL dependencyURL = context.getResource(fromPath + "/" + artifactId + ".hpi"); if (dependencyURL == null) { // Maybe bundling has changed .jpi files dependencyURL = context.getResource(fromPath + "/" + artifactId + ".jpi"); } if (dependencyURL != null) { // And transitive deps... addDependencies(dependencyURL, fromPath, dependencySet); // And then add the current plugin dependencySet.add(dependencyURL); } } } } /** * Load detached plugins and their dependencies. * <p> * Only loads plugins that: * <ul> * <li>Have been detached since the last running version.</li> * <li>Are already installed and need to be upgraded. This can be the case if this Jenkins install has been running since before plugins were "unbundled".</li> * <li>Are dependencies of one of the above e.g. script-security is not one of the detached plugins but it must be loaded if matrix-project is loaded.</li> * </ul> */ protected void loadDetachedPlugins() { VersionNumber lastExecVersion = new VersionNumber(InstallUtil.getLastExecVersion()); if (lastExecVersion.isNewerThan(InstallUtil.NEW_INSTALL_VERSION) && lastExecVersion.isOlderThan(Jenkins.getVersion())) { LOGGER.log(INFO, "Upgrading Jenkins. The last running version was {0}. This Jenkins is version {1}.", new Object[] {lastExecVersion, Jenkins.VERSION}); final List<DetachedPluginsUtil.DetachedPlugin> detachedPlugins = DetachedPluginsUtil.getDetachedPlugins(lastExecVersion); Set<String> loadedDetached = loadPluginsFromWar(getDetachedLocation(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { name = normalisePluginName(name); // If this was a plugin that was detached some time in the past i.e. not just one of the // plugins that was bundled "for fun". if (DetachedPluginsUtil.isDetachedPlugin(name)) { VersionNumber installedVersion = getPluginVersion(rootDir, name); VersionNumber bundledVersion = getPluginVersion(dir, name); // If the plugin is already installed, we need to decide whether to replace it with the bundled version. if (installedVersion != null && bundledVersion != null) { // If the installed version is older than the bundled version, then it MUST be upgraded. // If the installed version is newer than the bundled version, then it MUST NOT be upgraded. // If the versions are equal we just keep the installed version. return installedVersion.isOlderThan(bundledVersion); } } // If it's a plugin that was detached since the last running version. for (DetachedPluginsUtil.DetachedPlugin detachedPlugin : detachedPlugins) { if (detachedPlugin.getShortName().equals(name)) { return true; } } // Otherwise skip this and do not install. return false; } }); LOGGER.log(INFO, "Upgraded Jenkins from version {0} to version {1}. Loaded detached plugins (and dependencies): {2}", new Object[] {lastExecVersion, Jenkins.VERSION, loadedDetached}); } else { final Set<DetachedPluginsUtil.DetachedPlugin> forceUpgrade = new HashSet<>(); // TODO using getDetachedPlugins here seems wrong; should be forcing an upgrade when the installed version is older than that in getDetachedLocation() for (DetachedPluginsUtil.DetachedPlugin p : DetachedPluginsUtil.getDetachedPlugins()) { VersionNumber installedVersion = getPluginVersion(rootDir, p.getShortName()); VersionNumber requiredVersion = p.getRequiredVersion(); if (installedVersion != null && installedVersion.isOlderThan(requiredVersion)) { LOGGER.log(Level.WARNING, "Detached plugin {0} found at version {1}, required minimum version is {2}", new Object[]{p.getShortName(), installedVersion, requiredVersion}); forceUpgrade.add(p); } } if (!forceUpgrade.isEmpty()) { Set<String> loadedDetached = loadPluginsFromWar(getDetachedLocation(), new FilenameFilter() { @Override public boolean accept(File dir, String name) { name = normalisePluginName(name); for (DetachedPluginsUtil.DetachedPlugin detachedPlugin : forceUpgrade) { if (detachedPlugin.getShortName().equals(name)) { return true; } } return false; } }); LOGGER.log(INFO, "Upgraded detached plugins (and dependencies): {0}", new Object[]{loadedDetached}); } } } private String normalisePluginName(@NonNull String name) { // Normalise the name by stripping off the file extension (if present)... return name.replace(".jpi", "").replace(".hpi", ""); } private @CheckForNull VersionNumber getPluginVersion(@NonNull File dir, @NonNull String pluginId) { VersionNumber version = getPluginVersion(new File(dir, pluginId + ".jpi")); if (version == null) { version = getPluginVersion(new File(dir, pluginId + ".hpi")); } return version; } private @CheckForNull VersionNumber getPluginVersion(@NonNull File pluginFile) { if (!pluginFile.exists()) { return null; } try { return getPluginVersion(pluginFile.toURI().toURL()); } catch (MalformedURLException e) { return null; } } private @CheckForNull VersionNumber getPluginVersion(@NonNull URL pluginURL) { Manifest manifest = parsePluginManifest(pluginURL); if (manifest == null) { return null; } String versionSpec = manifest.getMainAttributes().getValue("Plugin-Version"); return new VersionNumber(versionSpec); } /* * contains operation that considers xxx.hpi and xxx.jpi as equal * this is necessary since the bundled plugins are still called *.hpi */ private boolean containsHpiJpi(Collection<String> bundledPlugins, String name) { return bundledPlugins.contains(name.replaceAll("\\.hpi", ".jpi")) || bundledPlugins.contains(name.replaceAll("\\.jpi", ".hpi")); } /** * Returns the manifest of a bundled but not-extracted plugin. * @deprecated removed without replacement */ @Deprecated // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ public @CheckForNull Manifest getBundledPluginManifest(String shortName) { return null; } /** * TODO: revisit where/how to expose this. This is an experiment. */ public void dynamicLoad(File arc) throws IOException, InterruptedException, RestartRequiredException { dynamicLoad(arc, false, null); } /** * Try the dynamicLoad, removeExisting to attempt to dynamic load disabled plugins */ @Restricted(NoExternalUse.class) public void dynamicLoad(File arc, boolean removeExisting, @CheckForNull List<PluginWrapper> batch) throws IOException, InterruptedException, RestartRequiredException { try (ACLContext context = ACL.as2(ACL.SYSTEM2)) { LOGGER.log(FINE, "Attempting to dynamic load {0}", arc); PluginWrapper p = null; String sn; try { sn = strategy.getShortName(arc); } catch (AbstractMethodError x) { LOGGER.log(WARNING, "JENKINS-12753 fix not active: {0}", x.getMessage()); p = strategy.createPluginWrapper(arc); sn = p.getShortName(); } PluginWrapper pw = getPlugin(sn); if (pw != null) { if (removeExisting) { // try to load disabled plugins for (Iterator<PluginWrapper> i = plugins.iterator(); i.hasNext();) { pw = i.next(); if (sn.equals(pw.getShortName())) { i.remove(); break; } } } else { throw new RestartRequiredException(Messages._PluginManager_PluginIsAlreadyInstalled_RestartRequired(sn)); } } if (!Lifecycle.get().supportsDynamicLoad()) { throw new RestartRequiredException(Messages._PluginManager_LifecycleDoesNotSupportDynamicLoad_RestartRequired()); } if (p == null) { p = strategy.createPluginWrapper(arc); } if (p.supportsDynamicLoad() == YesNoMaybe.NO) throw new RestartRequiredException(Messages._PluginManager_PluginDoesntSupportDynamicLoad_RestartRequired(sn)); // there's no need to do cyclic dependency check, because we are deploying one at a time, // so existing plugins can't be depending on this newly deployed one. plugins.add(p); if (p.isActive()) { activePlugins.add(p); ((UberClassLoader) uberClassLoader).clearCacheMisses(); } // TODO antimodular; perhaps should have a PluginListener to complement ExtensionListListener? CustomClassFilter.Contributed.load(); try { p.resolvePluginDependencies(); strategy.load(p); if (batch != null) { batch.add(p); } else { start(List.of(p)); } } catch (Exception e) { failedPlugins.add(new FailedPlugin(p, e)); activePlugins.remove(p); plugins.remove(p); p.releaseClassLoader(); throw new IOException("Failed to install " + sn + " plugin", e); } LOGGER.log(FINE, "Plugin {0}:{1} dynamically {2}", new Object[] {p.getShortName(), p.getVersion(), batch != null ? "loaded but not yet started" : "installed"}); } } @Restricted(NoExternalUse.class) public void start(List<PluginWrapper> plugins) throws Exception { try (ACLContext context = ACL.as2(ACL.SYSTEM2)) { Map<String, PluginWrapper> pluginsByName = plugins.stream().collect(Collectors.toMap(PluginWrapper::getShortName, p -> p)); // recalculate dependencies of plugins optionally depending the newly deployed ones. for (PluginWrapper depender : this.plugins) { if (plugins.contains(depender)) { // skip itself. continue; } for (Dependency d : depender.getOptionalDependencies()) { PluginWrapper dependee = pluginsByName.get(d.shortName); if (dependee != null) { // this plugin depends on the newly loaded one! // recalculate dependencies! getPluginStrategy().updateDependency(depender, dependee); break; } } } // Redo who depends on who. resolveDependentPlugins(); try { Jenkins.get().refreshExtensions(); } catch (ExtensionRefreshException e) { throw new IOException("Failed to refresh extensions after installing some plugins", e); } for (PluginWrapper p : plugins) { //TODO:According to the postInitialize() documentation, one may expect that //p.getPluginOrFail() NPE will continue the initialization. Keeping the original behavior ATM p.getPluginOrFail().postInitialize(); } // run initializers in the added plugins Reactor r = new Reactor(InitMilestone.ordering()); Set<ClassLoader> loaders = plugins.stream().map(p -> p.classLoader).collect(Collectors.toSet()); r.addAll(new InitializerFinder(uberClassLoader) { @Override protected boolean filter(Method e) { return !loaders.contains(e.getDeclaringClass().getClassLoader()) || super.filter(e); } }.discoverTasks(r)); new InitReactorRunner().run(r); } } @Restricted(NoExternalUse.class) public synchronized void resolveDependentPlugins() { for (PluginWrapper plugin : plugins) { // Set of optional dependents plugins of plugin Set<String> optionalDependents = new HashSet<>(); Set<String> dependents = new HashSet<>(); for (PluginWrapper possibleDependent : plugins) { // No need to check if plugin is dependent of itself if (possibleDependent.getShortName().equals(plugin.getShortName())) { continue; } // The plugin could have just been deleted. If so, it doesn't // count as a dependent. if (possibleDependent.isDeleted()) { continue; } List<Dependency> dependencies = possibleDependent.getDependencies(); for (Dependency dependency : dependencies) { if (dependency.shortName.equals(plugin.getShortName())) { dependents.add(possibleDependent.getShortName()); // If, in addition, the dependency is optional, add to the optionalDependents list if (dependency.optional) { optionalDependents.add(possibleDependent.getShortName()); } // already know possibleDependent depends on plugin, no need to continue with the rest of // dependencies. We continue with the next possibleDependent break; } } } plugin.setDependents(dependents); plugin.setOptionalDependents(optionalDependents); } } /** * If the war file has any "/WEB-INF/plugins/[*.jpi | *.hpi]", extract them into the plugin directory. * * @return * File names of the bundled plugins. Normally empty (not to be confused with {@link #loadDetachedPlugins}) but OEM WARs may have some. * @throws Exception * Any exception will be reported and halt the startup. */ protected abstract Collection<String> loadBundledPlugins() throws Exception; /** * Copies the plugin from the given URL to the given destination. * Despite the name, this is used also from {@link #loadDetachedPlugins}. * Includes a reasonable up-to-date check. * A convenience method to be used by {@link #loadBundledPlugins()}. * @param fileName like {@code abc.jpi} */ protected void copyBundledPlugin(URL src, String fileName) throws IOException { LOGGER.log(FINE, "Copying {0}", src); fileName = fileName.replace(".hpi", ".jpi"); // normalize fileNames to have the correct suffix String legacyName = fileName.replace(".jpi", ".hpi"); long lastModified = getModificationDate(src); File file = new File(rootDir, fileName); // normalization first, if the old file exists. rename(new File(rootDir, legacyName), file); // update file if: // - no file exists today // - bundled version and current version differs (by timestamp). if (!file.exists() || file.lastModified() != lastModified) { FileUtils.copyURLToFile(src, file); Files.setLastModifiedTime(Util.fileToPath(file), FileTime.fromMillis(getModificationDate(src))); // lastModified is set for two reasons: // - to avoid unpacking as much as possible, but still do it on both upgrade and downgrade // - to make sure the value is not changed after each restart, so we can avoid // unpacking the plugin itself in ClassicPluginStrategy.explode } // Plugin pinning has been deprecated. // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ } /*package*/ static @CheckForNull Manifest parsePluginManifest(URL bundledJpi) { try (URLClassLoader cl = new URLClassLoader(new URL[]{bundledJpi})) { InputStream in = null; try { URL res = cl.findResource(PluginWrapper.MANIFEST_FILENAME); if (res != null) { in = getBundledJpiManifestStream(res); return new Manifest(in); } } finally { Util.closeAndLogFailures(in, LOGGER, PluginWrapper.MANIFEST_FILENAME, bundledJpi.toString()); } } catch (IOException e) { LOGGER.log(WARNING, "Failed to parse manifest of " + bundledJpi, e); } return null; } /** * Retrieves input stream for the Manifest url. * The method intelligently handles the case of {@link JarURLConnection} pointing to files within JAR. * @param url Url of the manifest file * @return Input stream, which allows to retrieve manifest. This stream must be closed outside * @throws IOException Operation error */ @NonNull /*package*/ static InputStream getBundledJpiManifestStream(@NonNull URL url) throws IOException { URLConnection uc = url.openConnection(); InputStream in = null; // Magic, which allows to avoid using stream generated for JarURLConnection. // It prevents getting into JENKINS-37332 due to the file descriptor leak if (uc instanceof JarURLConnection) { final JarURLConnection jarURLConnection = (JarURLConnection) uc; final String entryName = jarURLConnection.getEntryName(); try (JarFile jarFile = jarURLConnection.getJarFile()) { final JarEntry entry = entryName != null && jarFile != null ? jarFile.getJarEntry(entryName) : null; if (entry != null) { try (InputStream i = jarFile.getInputStream(entry)) { byte[] manifestBytes = i.readAllBytes(); in = new ByteArrayInputStream(manifestBytes); } } else { LOGGER.log(Level.WARNING, "Failed to locate the JAR file for {0}" + "The default URLConnection stream access will be used, file descriptor may be leaked.", url); } } } // If input stream is undefined, use the default implementation if (in == null) { in = url.openStream(); } return in; } /** * Retrieves modification date of the specified file. * The method intelligently handles the case of {@link JarURLConnection} pointing to files within JAR. * @param url Url of the file * @return Modification date * @throws IOException Operation error */ @NonNull /*package*/ static long getModificationDate(@NonNull URL url) throws IOException { URLConnection uc = url.openConnection(); // It prevents file descriptor leak if the URL references a file within JAR // See JENKINS-37332 for more info // The code idea is taken from https://github.com/jknack/handlebars.java/pull/394 if (uc instanceof JarURLConnection) { final JarURLConnection connection = (JarURLConnection) uc; final URL jarURL = connection.getJarFileURL(); if (jarURL.getProtocol().equals("file")) { String file = jarURL.getFile(); return new File(file).lastModified(); } else { // We access the data without file protocol if (connection.getEntryName() != null) { LOGGER.log(WARNING, "Accessing modification date of {0} file, which is an entry in JAR file. " + "The access protocol is not file:, falling back to the default logic (risk of file descriptor leak).", url); } } } // Fallbak to the default implementation return uc.getLastModified(); } /** * Rename a legacy file to a new name, with care to Windows where {@link File#renameTo(File)} * doesn't work if the destination already exists. */ private void rename(File legacyFile, File newFile) throws IOException { if (!legacyFile.exists()) return; if (newFile.exists()) { Util.deleteFile(newFile); } if (!legacyFile.renameTo(newFile)) { LOGGER.warning("Failed to rename " + legacyFile + " to " + newFile); } } /** * Creates a hudson.PluginStrategy, looking at the corresponding system property. */ protected PluginStrategy createPluginStrategy() { String strategyName = SystemProperties.getString(PluginStrategy.class.getName()); if (strategyName != null) { try { Class<?> klazz = getClass().getClassLoader().loadClass(strategyName); Object strategy = klazz.getConstructor(PluginManager.class) .newInstance(this); if (strategy instanceof PluginStrategy) { LOGGER.info("Plugin strategy: " + strategyName); return (PluginStrategy) strategy; } else { LOGGER.warning("Plugin strategy (" + strategyName + ") is not an instance of hudson.PluginStrategy"); } } catch (ClassNotFoundException e) { LOGGER.warning("Plugin strategy class not found: " + strategyName); } catch (Exception e) { LOGGER.log(WARNING, "Could not instantiate plugin strategy: " + strategyName + ". Falling back to ClassicPluginStrategy", e); } LOGGER.info("Falling back to ClassicPluginStrategy"); } // default and fallback return new ClassicPluginStrategy(this); } public PluginStrategy getPluginStrategy() { return strategy; } /** * Returns true if any new plugin was added. */ public boolean isPluginUploaded() { return pluginUploaded; } /** * All discovered plugins. */ @Exported public List<PluginWrapper> getPlugins() { return Collections.unmodifiableList(plugins); } @Restricted(NoExternalUse.class) // used by jelly public List<PluginWrapper> getPluginsSortedByTitle() { return plugins.stream() .sorted(Comparator.comparing(PluginWrapper::getDisplayName, String.CASE_INSENSITIVE_ORDER)) .collect(Collectors.toUnmodifiableList()); } public List<FailedPlugin> getFailedPlugins() { return failedPlugins; } /** * Get the plugin instance with the given short name. * @param shortName the short name of the plugin * @return The plugin singleton or {@code null} if a plugin with the given short name does not exist. * The fact the plugin is loaded does not mean it is enabled and fully initialized for the current Jenkins session. * Use {@link PluginWrapper#isActive()} to check it. */ @CheckForNull public PluginWrapper getPlugin(String shortName) { for (PluginWrapper p : getPlugins()) { if (p.getShortName().equals(shortName)) return p; } return null; } /** * Get the plugin instance that implements a specific class, use to find your plugin singleton. * Note: beware the classloader fun. * @param pluginClazz The class that your plugin implements. * @return The plugin singleton or {@code null} if for some reason the plugin is not loaded. * The fact the plugin is loaded does not mean it is enabled and fully initialized for the current Jenkins session. * Use {@link Plugin#getWrapper()} and then {@link PluginWrapper#isActive()} to check it. */ @CheckForNull public PluginWrapper getPlugin(Class<? extends Plugin> pluginClazz) { for (PluginWrapper p : getPlugins()) { if (pluginClazz.isInstance(p.getPlugin())) return p; } return null; } /** * Get the plugin instances that extend a specific class, use to find similar plugins. * Note: beware the classloader fun. * @param pluginSuperclass The class that your plugin is derived from. * @return The list of plugins implementing the specified class. */ public List<PluginWrapper> getPlugins(Class<? extends Plugin> pluginSuperclass) { List<PluginWrapper> result = new ArrayList<>(); for (PluginWrapper p : getPlugins()) { if (pluginSuperclass.isInstance(p.getPlugin())) result.add(p); } return Collections.unmodifiableList(result); } @Override public String getDisplayName() { return Messages.PluginManager_DisplayName(); } @Override public String getSearchUrl() { return "pluginManager"; } /** * Discover all the service provider implementations of the given class, * via {@code META-INF/services}. * @deprecated Use {@link ServiceLoader} instead, or (more commonly) {@link ExtensionList}. */ @Deprecated public <T> Collection<Class<? extends T>> discover(Class<T> spi) { Set<Class<? extends T>> result = new HashSet<>(); for (PluginWrapper p : activePlugins) { Service.load(spi, p.classLoader, result); } return result; } /** * Return the {@link PluginWrapper} that loaded the given class 'c'. * * @since 1.402. */ public PluginWrapper whichPlugin(Class c) { PluginWrapper oneAndOnly = null; ClassLoader cl = c.getClassLoader(); for (PluginWrapper p : activePlugins) { if (p.classLoader == cl) { if (oneAndOnly != null) return null; // ambiguous oneAndOnly = p; } } if (oneAndOnly == null && Main.isUnitTest) { // compare jenkins.security.ClassFilterImpl CodeSource cs = c.getProtectionDomain().getCodeSource(); if (cs != null) { URL loc = cs.getLocation(); if (loc != null) { if ("file".equals(loc.getProtocol())) { File file; try { file = Paths.get(loc.toURI()).toFile(); } catch (InvalidPathException | URISyntaxException e) { LOGGER.log(Level.WARNING, "could not inspect " + loc, e); return null; } if (file.isFile()) { // ignore directories try (JarFile jf = new JarFile(file)) { Manifest mf = jf.getManifest(); if (mf != null) { java.util.jar.Attributes attr = mf.getMainAttributes(); if (attr.getValue("Plugin-Version") != null) { String shortName = attr.getValue("Short-Name"); LOGGER.fine(() -> "found " + shortName + " for " + c); return getPlugin(shortName); } } } catch (IOException e) { LOGGER.log(Level.WARNING, "could not inspect " + loc, e); } } } } } } return oneAndOnly; } /** * Orderly terminates all the plugins. */ public synchronized void stop() { for (PluginWrapper p : activePlugins) { p.stop(); } List<PluginWrapper> pluginsCopy = new ArrayList<>(plugins); for (PluginWrapper p : pluginsCopy) { activePlugins.remove(p); plugins.remove(p); p.releaseClassLoader(); } // Work around a bug in commons-logging. // See http://www.szegedi.org/articles/memleak.html LogFactory.release(uberClassLoader); } @Restricted(NoExternalUse.class) public static boolean isNonMetaLabel(String label) { return !("adopt-this-plugin".equals(label) || "deprecated".equals(label)); } /** * This allows "Update Center" to live at the URL * {@code /pluginManager/updates/} in addition to its {@code /updateCenter/} * URL which is provided by {@link jenkins.model.Jenkins#getUpdateCenter()}. * For purposes of Stapler, this object is the current item serving the * view, and since this is not a {@link hudson.model.ModelObject}, it does * not appear as an additional breadcrumb and only the "Plugin Manager" * breadcrumb is shown. */ @Restricted(NoExternalUse.class) public static class UpdateCenterProxy implements StaplerFallback { @Override public Object getStaplerFallback() { return Jenkins.get().getUpdateCenter(); } } public UpdateCenterProxy getUpdates() { return new UpdateCenterProxy(); } @Restricted(NoExternalUse.class) public HttpResponse doPluginsSearch(@QueryParameter String query, @QueryParameter Integer limit) { List<JSONObject> plugins = new ArrayList<>(); for (UpdateSite site : Jenkins.get().getUpdateCenter().getSiteList()) { List<JSONObject> sitePlugins = site.getAvailables().stream() .filter(plugin -> { if (query == null || query.isBlank()) { return true; } return (plugin.name != null && plugin.name.toLowerCase(Locale.ROOT).contains(query.toLowerCase(Locale.ROOT))) || (plugin.title != null && plugin.title.toLowerCase(Locale.ROOT).contains(query.toLowerCase(Locale.ROOT))) || (plugin.excerpt != null && plugin.excerpt.toLowerCase(Locale.ROOT).contains(query.toLowerCase(Locale.ROOT))) || plugin.hasCategory(query) || plugin.getCategoriesStream() .map(UpdateCenter::getCategoryDisplayName) .anyMatch(category -> category != null && category.toLowerCase(Locale.ROOT).contains(query.toLowerCase(Locale.ROOT))) || plugin.hasWarnings() && query.equalsIgnoreCase("warning:"); }) .limit(Math.max(limit - plugins.size(), 1)) .sorted((o1, o2) -> { String o1DisplayName = o1.getDisplayName(); if (o1.name.equalsIgnoreCase(query) || o1DisplayName.equalsIgnoreCase(query)) { return -1; } String o2DisplayName = o2.getDisplayName(); if (o2.name.equalsIgnoreCase(query) || o2DisplayName.equalsIgnoreCase(query)) { return 1; } if (o1.name.equals(o2.name)) { return 0; } final int pop = Double.compare(o2.popularity, o1.popularity); if (pop != 0) { return pop; // highest popularity first } return o1DisplayName.compareTo(o2DisplayName); }) .map(plugin -> { JSONObject jsonObject = new JSONObject(); jsonObject.put("name", plugin.name); jsonObject.put("sourceId", plugin.sourceId); jsonObject.put("title", plugin.title); jsonObject.put("displayName", plugin.getDisplayName()); if (plugin.wiki == null || !(plugin.wiki.startsWith("https://") || plugin.wiki.startsWith("http://"))) { jsonObject.put("wiki", ""); } else { jsonObject.put("wiki", plugin.wiki); } jsonObject.put("categories", plugin.getCategoriesStream() .filter(PluginManager::isNonMetaLabel) .map(UpdateCenter::getCategoryDisplayName) .collect(toList()) ); if (hasAdoptThisPluginLabel(plugin)) { jsonObject.put("adoptMe", Messages.PluginManager_adoptThisPlugin()); } if (plugin.isDeprecated()) { jsonObject.put("deprecated", Messages.PluginManager_deprecationWarning(plugin.getDeprecation().url)); } jsonObject.put("excerpt", plugin.excerpt); jsonObject.put("version", plugin.version); jsonObject.put("popularity", plugin.popularity); if (plugin.isForNewerHudson()) { jsonObject.put("newerCoreRequired", Messages.PluginManager_coreWarning(Util.xmlEscape(plugin.requiredCore))); } if (plugin.hasWarnings()) { JSONObject unresolvedSecurityWarnings = new JSONObject(); unresolvedSecurityWarnings.put("text", Messages.PluginManager_securityWarning()); Set<UpdateSite.Warning> pluginWarnings = plugin.getWarnings(); if (pluginWarnings == null) { throw new IllegalStateException("warnings cannot be null here"); } List<JSONObject> warnings = pluginWarnings.stream() .map(warning -> { JSONObject jsonWarning = new JSONObject(); jsonWarning.put("url", warning.url); jsonWarning.put("message", warning.message); return jsonWarning; }).collect(toList()); unresolvedSecurityWarnings.put("warnings", warnings); jsonObject.put("unresolvedSecurityWarnings", unresolvedSecurityWarnings); } if (plugin.releaseTimestamp != null) { JSONObject releaseTimestamp = new JSONObject(); releaseTimestamp.put("iso8601", Functions.iso8601DateTime(plugin.releaseTimestamp)); releaseTimestamp.put("displayValue", Messages.PluginManager_ago(Functions.getTimeSpanString(plugin.releaseTimestamp))); jsonObject.put("releaseTimestamp", releaseTimestamp); } return jsonObject; }) .collect(toList()); plugins.addAll(sitePlugins); if (plugins.size() >= limit) { break; } } JSONArray mappedPlugins = new JSONArray(); mappedPlugins.addAll(plugins); return hudson.util.HttpResponses.okJSON(mappedPlugins); } /** * Get the list of all plugins - available and installed. * @return The list of all plugins - available and installed. */ @Restricted(DoNotUse.class) // WebOnly public HttpResponse doPlugins() { Jenkins.get().checkPermission(Jenkins.ADMINISTER); JSONArray response = new JSONArray(); Map<String, JSONObject> allPlugins = new HashMap<>(); for (PluginWrapper plugin : plugins) { JSONObject pluginInfo = new JSONObject(); pluginInfo.put("installed", true); pluginInfo.put("name", plugin.getShortName()); pluginInfo.put("title", plugin.getDisplayName()); pluginInfo.put("active", plugin.isActive()); pluginInfo.put("enabled", plugin.isEnabled()); pluginInfo.put("bundled", plugin.isBundled); pluginInfo.put("deleted", plugin.isDeleted()); pluginInfo.put("downgradable", plugin.isDowngradable()); pluginInfo.put("website", plugin.getUrl()); List<Dependency> dependencies = plugin.getDependencies(); if (dependencies != null && !dependencies.isEmpty()) { Map<String, String> dependencyMap = new HashMap<>(); for (Dependency dependency : dependencies) { dependencyMap.put(dependency.shortName, dependency.version); } pluginInfo.put("dependencies", dependencyMap); } else { pluginInfo.put("dependencies", Collections.emptyMap()); } response.add(pluginInfo); } for (UpdateSite site : Jenkins.get().getUpdateCenter().getSiteList()) { for (UpdateSite.Plugin plugin : site.getAvailables()) { JSONObject pluginInfo = allPlugins.get(plugin.name); if (pluginInfo == null) { pluginInfo = new JSONObject(); pluginInfo.put("installed", false); } pluginInfo.put("name", plugin.name); pluginInfo.put("title", plugin.getDisplayName()); pluginInfo.put("excerpt", plugin.excerpt); pluginInfo.put("site", site.getId()); pluginInfo.put("dependencies", plugin.dependencies); pluginInfo.put("website", plugin.wiki); response.add(pluginInfo); } } return hudson.util.HttpResponses.okJSON(response); } @RequirePOST public HttpResponse doUpdateSources(StaplerRequest req) throws IOException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); if (req.hasParameter("remove")) { UpdateCenter uc = Jenkins.get().getUpdateCenter(); BulkChange bc = new BulkChange(uc); try { for (String id : req.getParameterValues("sources")) uc.getSites().remove(uc.getById(id)); } finally { bc.commit(); } } else if (req.hasParameter("add")) return new HttpRedirect("addSite"); return new HttpRedirect("./sites"); } /** * Called to progress status beyond installing plugins, e.g. if * there were failures that prevented installation from naturally proceeding */ @RequirePOST @Restricted(DoNotUse.class) // WebOnly public void doInstallPluginsDone() { Jenkins j = Jenkins.get(); j.checkPermission(Jenkins.ADMINISTER); InstallUtil.proceedToNextStateFrom(InstallState.INITIAL_PLUGINS_INSTALLING); } /** * Performs the installation of the plugins. */ @RequirePOST public void doInstall(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); Set<String> plugins = new LinkedHashSet<>(); Enumeration<String> en = req.getParameterNames(); while (en.hasMoreElements()) { String n = en.nextElement(); if (n.startsWith("plugin.")) { n = n.substring(7); plugins.add(n); } } boolean dynamicLoad = req.getParameter("dynamicLoad") != null; install(plugins, dynamicLoad); rsp.sendRedirect("updates/"); } /** * Installs a list of plugins from a JSON POST. * @param req The request object. * @return A JSON response that includes a "correlationId" in the "data" element. * That "correlationId" can then be used in calls to * {@link UpdateCenter#doInstallStatus(org.kohsuke.stapler.StaplerRequest)}. * @throws IOException Error reading JSON payload fro request. */ @RequirePOST @Restricted(DoNotUse.class) // WebOnly public HttpResponse doInstallPlugins(StaplerRequest req) throws IOException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); String payload = IOUtils.toString(req.getInputStream(), req.getCharacterEncoding()); JSONObject request = JSONObject.fromObject(payload); JSONArray pluginListJSON = request.getJSONArray("plugins"); List<String> plugins = new ArrayList<>(); for (int i = 0; i < pluginListJSON.size(); i++) { plugins.add(pluginListJSON.getString(i)); } UUID correlationId = UUID.randomUUID(); try { boolean dynamicLoad = request.getBoolean("dynamicLoad"); install(plugins, dynamicLoad, correlationId); JSONObject responseData = new JSONObject(); responseData.put("correlationId", correlationId.toString()); return hudson.util.HttpResponses.okJSON(responseData); } catch (RuntimeException e) { return hudson.util.HttpResponses.errorJSON(e.getMessage()); } } /** * Performs the installation of the plugins. * @param plugins The collection of plugins to install. * @param dynamicLoad If true, the plugin will be dynamically loaded into this Jenkins. If false, * the plugin will only take effect after the reboot. * See {@link UpdateCenter#isRestartRequiredForCompletion()} * @return The install job list. * @since 2.0 */ @Restricted(NoExternalUse.class) public List<Future<UpdateCenter.UpdateCenterJob>> install(@NonNull Collection<String> plugins, boolean dynamicLoad) { return install(plugins, dynamicLoad, null); } private List<Future<UpdateCenter.UpdateCenterJob>> install(@NonNull Collection<String> plugins, boolean dynamicLoad, @CheckForNull UUID correlationId) { List<Future<UpdateCenter.UpdateCenterJob>> installJobs = new ArrayList<>(); LOGGER.log(INFO, "Starting installation of a batch of {0} plugins plus their dependencies", plugins.size()); long start = System.nanoTime(); List<PluginWrapper> batch = new ArrayList<>(); for (String n : plugins) { // JENKINS-22080 plugin names can contain '.' as could (according to rumour) update sites int index = n.indexOf('.'); UpdateSite.Plugin p = null; if (index == -1) { p = getPlugin(n, UpdateCenter.ID_DEFAULT); } else { while (index != -1) { if (index + 1 >= n.length()) { break; } String pluginName = n.substring(0, index); String siteName = n.substring(index + 1); UpdateSite.Plugin plugin = getPlugin(pluginName, siteName); // There could be cases like: // 'plugin.ambiguous.updatesite' where both // 'plugin' @ 'ambiguous.updatesite' and 'plugin.ambiguous' @ 'updatesite' resolve to valid plugins if (plugin != null) { if (p != null) { throw new Failure("Ambiguous plugin: " + n); } p = plugin; } index = n.indexOf('.', index + 1); } } if (p == null) { throw new Failure("No such plugin: " + n); } Future<UpdateCenter.UpdateCenterJob> jobFuture = p.deploy(dynamicLoad, correlationId, batch, false); installJobs.add(jobFuture); } final Jenkins jenkins = Jenkins.get(); final UpdateCenter updateCenter = jenkins.getUpdateCenter(); if (dynamicLoad) { installJobs.add(updateCenter.addJob(updateCenter.new CompleteBatchJob(batch, start, correlationId))); } final Authentication currentAuth = Jenkins.getAuthentication2(); if (!jenkins.getInstallState().isSetupComplete()) { jenkins.setInstallState(InstallState.INITIAL_PLUGINS_INSTALLING); updateCenter.persistInstallStatus(); new Thread() { @Override public void run() { boolean failures = false; INSTALLING: while (true) { try { updateCenter.persistInstallStatus(); Thread.sleep(500); failures = false; for (Future<UpdateCenter.UpdateCenterJob> jobFuture : installJobs) { if (!jobFuture.isDone() && !jobFuture.isCancelled()) { continue INSTALLING; } UpdateCenter.UpdateCenterJob job = jobFuture.get(); if (job instanceof InstallationJob && ((InstallationJob) job).status instanceof DownloadJob.Failure) { failures = true; } } } catch (Exception e) { LOGGER.log(WARNING, "Unexpected error while waiting for initial plugin set to install.", e); } break; } updateCenter.persistInstallStatus(); if (!failures) { try (ACLContext acl = ACL.as2(currentAuth)) { InstallUtil.proceedToNextStateFrom(InstallState.INITIAL_PLUGINS_INSTALLING); } } } }.start(); } return installJobs; } @CheckForNull private UpdateSite.Plugin getPlugin(String pluginName, String siteName) { UpdateSite updateSite = Jenkins.get().getUpdateCenter().getById(siteName); if (updateSite == null) { throw new Failure("No such update center: " + siteName); } return updateSite.getPlugin(pluginName); } /** * Bare-minimum configuration mechanism to change the update center. */ @RequirePOST public HttpResponse doSiteConfigure(@QueryParameter String site) throws IOException { Jenkins hudson = Jenkins.get(); hudson.checkPermission(Jenkins.ADMINISTER); UpdateCenter uc = hudson.getUpdateCenter(); PersistedList<UpdateSite> sites = uc.getSites(); sites.removeIf(s -> s.getId().equals(UpdateCenter.ID_DEFAULT)); sites.add(new UpdateSite(UpdateCenter.ID_DEFAULT, site)); return new HttpRedirect("advanced"); } @POST public HttpResponse doProxyConfigure(StaplerRequest req) throws IOException, ServletException { Jenkins jenkins = Jenkins.get(); jenkins.checkPermission(Jenkins.ADMINISTER); ProxyConfiguration pc = req.bindJSON(ProxyConfiguration.class, req.getSubmittedForm()); ProxyConfigurationManager.saveProxyConfiguration(pc); return new HttpRedirect("advanced"); } interface PluginCopier { void copy(File target) throws Exception; void cleanup(); } static class FileUploadPluginCopier implements PluginCopier { private FileItem fileItem; FileUploadPluginCopier(FileItem fileItem) { this.fileItem = fileItem; } @Override public void copy(File target) throws Exception { fileItem.write(target); } @Override public void cleanup() { fileItem.delete(); } } static class UrlPluginCopier implements PluginCopier { private String url; UrlPluginCopier(String url) { this.url = url; } @Override public void copy(File target) throws Exception { try (InputStream input = ProxyConfiguration.getInputStream(new URL(url))) { Files.copy(input, target.toPath()); } } @Override public void cleanup() { } } /** * Uploads a plugin. */ @RequirePOST public HttpResponse doUploadPlugin(StaplerRequest req) throws IOException, ServletException { try { Jenkins.get().checkPermission(Jenkins.ADMINISTER); String fileName = ""; PluginCopier copier; File tmpDir = Files.createTempDirectory("uploadDir").toFile(); ServletFileUpload upload = new ServletFileUpload(new DiskFileItemFactory(DiskFileItemFactory.DEFAULT_SIZE_THRESHOLD, tmpDir)); List<FileItem> items = upload.parseRequest(req); String string = items.get(1).getString(); if (string != null && !string.isBlank()) { // this is a URL deployment fileName = string; copier = new UrlPluginCopier(fileName); } else { // this is a file upload FileItem fileItem = items.get(0); fileName = Util.getFileName(fileItem.getName()); copier = new FileUploadPluginCopier(fileItem); } if ("".equals(fileName)) { return new HttpRedirect("advanced"); } // we allow the upload of the new jpi's and the legacy hpi's if (!fileName.endsWith(".jpi") && !fileName.endsWith(".hpi")) { throw new Failure(hudson.model.Messages.Hudson_NotAPlugin(fileName)); } // first copy into a temporary file name File t = File.createTempFile("uploaded", ".jpi", tmpDir); tmpDir.deleteOnExit(); t.deleteOnExit(); // TODO Remove this workaround after FILEUPLOAD-293 is resolved. Files.delete(Util.fileToPath(t)); try { copier.copy(t); } catch (Exception e) { // Exception thrown is too generic so at least limit the scope where it can occur throw new ServletException(e); } copier.cleanup(); final String baseName = identifyPluginShortName(t); pluginUploaded = true; JSONArray dependencies = new JSONArray(); try { Manifest m; try (JarFile jarFile = new JarFile(t)) { m = jarFile.getManifest(); } String deps = m.getMainAttributes().getValue("Plugin-Dependencies"); if (deps != null && !deps.isBlank()) { // now we get to parse it! String[] plugins = deps.split(","); for (String p : plugins) { // should have name:version[;resolution:=optional] String[] attrs = p.split("[:;]"); dependencies.add(new JSONObject() .element("name", attrs[0]) .element("version", attrs[1]) .element("optional", p.contains("resolution:=optional"))); } } } catch (IOException e) { LOGGER.log(WARNING, "Unable to setup dependency list for plugin upload", e); } // Now create a dummy plugin that we can dynamically load (the InstallationJob will force a restart if one is needed): JSONObject cfg = new JSONObject(). element("name", baseName). element("version", "0"). // unused but mandatory element("url", t.toURI().toString()). element("dependencies", dependencies); new UpdateSite(UpdateCenter.ID_UPLOAD, null).new Plugin(UpdateCenter.ID_UPLOAD, cfg).deploy(true); return new HttpRedirect("updates/"); } catch (FileUploadException e) { throw new ServletException(e); } } @Restricted(NoExternalUse.class) @RequirePOST public FormValidation doCheckPluginUrl(StaplerRequest request, @QueryParameter String value) throws IOException { if (value != null && !value.isBlank()) { try { URL url = new URL(value); if (!url.getProtocol().startsWith("http")) { return FormValidation.error(Messages.PluginManager_invalidUrl()); } if (!url.getProtocol().equals("https")) { return FormValidation.warning(Messages.PluginManager_insecureUrl()); } } catch (MalformedURLException e) { return FormValidation.error(e.getMessage()); } } return FormValidation.ok(); } @Restricted(NoExternalUse.class) @RequirePOST public FormValidation doCheckUpdateSiteUrl(StaplerRequest request, @QueryParameter String value) throws InterruptedException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); return checkUpdateSiteURL(value); } @Restricted(DoNotUse.class) // visible for testing only FormValidation checkUpdateSiteURL(@CheckForNull String value) throws InterruptedException { value = Util.fixEmptyAndTrim(value); if (value == null) { return FormValidation.error(Messages.PluginManager_emptyUpdateSiteUrl()); } final URI baseUri; try { baseUri = new URI(value); } catch (URISyntaxException ex) { return FormValidation.error(ex, Messages.PluginManager_invalidUrl()); } if ("file".equalsIgnoreCase(baseUri.getScheme())) { File f = new File(baseUri); if (f.isFile()) { return FormValidation.ok(); } return FormValidation.error(Messages.PluginManager_connectionFailed()); } if ("https".equalsIgnoreCase(baseUri.getScheme()) || "http".equalsIgnoreCase(baseUri.getScheme())) { final URI uriWithQuery; try { if (baseUri.getRawQuery() == null) { uriWithQuery = new URI(value + "?version=" + Jenkins.VERSION + "&uctest"); } else { uriWithQuery = new URI(value + "&version=" + Jenkins.VERSION + "&uctest"); } } catch (URISyntaxException e) { return FormValidation.error(e, Messages.PluginManager_invalidUrl()); } HttpClient httpClient = ProxyConfiguration.newHttpClientBuilder() .connectTimeout(Duration.ofSeconds(5)) .build(); HttpRequest httpRequest; try { httpRequest = ProxyConfiguration.newHttpRequestBuilder(uriWithQuery) .method("HEAD", HttpRequest.BodyPublishers.noBody()) .build(); } catch (IllegalArgumentException e) { return FormValidation.error(e, Messages.PluginManager_invalidUrl()); } try { java.net.http.HttpResponse<Void> httpResponse = httpClient.send( httpRequest, java.net.http.HttpResponse.BodyHandlers.discarding()); if (100 <= httpResponse.statusCode() && httpResponse.statusCode() <= 399) { return FormValidation.ok(); } LOGGER.log(Level.FINE, "Obtained a non OK ({0}) response from the update center", new Object[] {httpResponse.statusCode(), baseUri}); return FormValidation.error(Messages.PluginManager_connectionFailed()); } catch (IOException e) { LOGGER.log(Level.FINE, "Failed to check update site", e); return FormValidation.error(e, Messages.PluginManager_connectionFailed()); } } // not a file or http(s) scheme return FormValidation.error(Messages.PluginManager_invalidUrl()); } @Restricted(NoExternalUse.class) @RequirePOST public HttpResponse doCheckUpdatesServer() throws IOException { Jenkins.get().checkPermission(Jenkins.SYSTEM_READ); // We'll check the update servers with a try-retry mechanism. The retrier is built with a builder Retrier<FormValidation> updateServerRetrier = new Retrier.Builder<>( // the action to perform this::checkUpdatesServer, // the way we know whether this attempt was right or wrong (currentAttempt, result) -> result.kind == FormValidation.Kind.OK, // the action name we are trying to perform "check updates server") // the number of attempts to try .withAttempts(CHECK_UPDATE_ATTEMPTS) // the delay between attempts .withDelay(CHECK_UPDATE_SLEEP_TIME_MILLIS) // whatever exception raised is considered as a fail attempt (all exceptions), not a failure .withDuringActionExceptions(new Class[] {Exception.class}) // what we do with a failed attempt due to an allowed exception, return an FormValidation.error with the message .withDuringActionExceptionListener((attempt, e) -> FormValidation.errorWithMarkup(e.getClass().getSimpleName() + ": " + e.getLocalizedMessage())) // lets get our retrier object .build(); try { // Begin the process FormValidation result = updateServerRetrier.start(); // Check how it went if (!FormValidation.Kind.OK.equals(result.kind)) { LOGGER.log(Level.SEVERE, Messages.PluginManager_UpdateSiteError(CHECK_UPDATE_ATTEMPTS, result.getMessage())); if (CHECK_UPDATE_ATTEMPTS > 1 && !Logger.getLogger(Retrier.class.getName()).isLoggable(Level.WARNING)) { LOGGER.log(Level.SEVERE, Messages.PluginManager_UpdateSiteChangeLogLevel(Retrier.class.getName())); } lastErrorCheckUpdateCenters = Messages.PluginManager_CheckUpdateServerError(result.getMessage()); } else { lastErrorCheckUpdateCenters = null; } } catch (Exception e) { // It's never going to be reached because we declared all Exceptions in the withDuringActionExceptions, so // whatever exception is considered a expected failed attempt and the retries continue LOGGER.log(Level.WARNING, Messages.PluginManager_UnexpectedException(), e); // In order to leave this method as it was, rethrow as IOException throw new IOException(e); } // Stay in the same page in any case return HttpResponses.forwardToPreviousPage(); } private FormValidation checkUpdatesServer() throws Exception { for (UpdateSite site : Jenkins.get().getUpdateCenter().getSites()) { FormValidation v = site.updateDirectlyNow(); if (v.kind != FormValidation.Kind.OK) { // Stop with an error return v; } } for (DownloadService.Downloadable d : DownloadService.Downloadable.all()) { FormValidation v = d.updateNow(); if (v.kind != FormValidation.Kind.OK) { // Stop with an error return v; } } return FormValidation.ok(); } /** * Returns the last error raised during the update sites checking. * @return the last error message */ public String getLastErrorCheckUpdateCenters() { return lastErrorCheckUpdateCenters; } protected String identifyPluginShortName(File t) { try { try (JarFile j = new JarFile(t)) { String name = j.getManifest().getMainAttributes().getValue("Short-Name"); if (name != null) return name; } } catch (IOException e) { LOGGER.log(WARNING, "Failed to identify the short name from " + t, e); } return FilenameUtils.getBaseName(t.getName()); // fall back to the base name of what's uploaded } public Descriptor<ProxyConfiguration> getProxyDescriptor() { return Jenkins.get().getDescriptor(ProxyConfiguration.class); } /** * Prepares plugins for some expected XML configuration. * If the configuration (typically a job’s {@code config.xml}) * needs some plugins to be installed (or updated), those jobs * will be triggered. * Plugins are dynamically loaded whenever possible. * Requires {@link Jenkins#ADMINISTER}. * @param configXml configuration that might be uploaded * @return an empty list if all is well, else a list of submitted jobs which must be completed before this configuration can be fully read * @throws IOException if loading or parsing the configuration failed * @see ItemGroupMixIn#createProjectFromXML * @see AbstractItem#updateByXml(javax.xml.transform.Source) * @see XStream2 * @see hudson.model.UpdateSite.Plugin#deploy(boolean) * @see PluginWrapper#supportsDynamicLoad * @see hudson.model.UpdateCenter.DownloadJob.SuccessButRequiresRestart * @since 1.483 */ public List<Future<UpdateCenter.UpdateCenterJob>> prevalidateConfig(InputStream configXml) throws IOException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); List<Future<UpdateCenter.UpdateCenterJob>> jobs = new ArrayList<>(); UpdateCenter uc = Jenkins.get().getUpdateCenter(); // TODO call uc.updateAllSites() when available? perhaps not, since we should not block on network here for (Map.Entry<String, VersionNumber> requestedPlugin : parseRequestedPlugins(configXml).entrySet()) { PluginWrapper pw = getPlugin(requestedPlugin.getKey()); if (pw == null) { // install new UpdateSite.Plugin toInstall = uc.getPlugin(requestedPlugin.getKey(), requestedPlugin.getValue()); if (toInstall == null) { LOGGER.log(WARNING, "No such plugin {0} to install", requestedPlugin.getKey()); continue; } logPluginWarnings(requestedPlugin, toInstall); jobs.add(toInstall.deploy(true)); } else if (pw.isOlderThan(requestedPlugin.getValue())) { // upgrade UpdateSite.Plugin toInstall = uc.getPlugin(requestedPlugin.getKey(), requestedPlugin.getValue()); if (toInstall == null) { LOGGER.log(WARNING, "No such plugin {0} to upgrade", requestedPlugin.getKey()); continue; } if (!pw.isOlderThan(new VersionNumber(toInstall.version))) { LOGGER.log(WARNING, "{0}@{1} is no newer than what we already have", new Object[] {toInstall.name, toInstall.version}); continue; } logPluginWarnings(requestedPlugin, toInstall); if (!toInstall.isCompatibleWithInstalledVersion()) { LOGGER.log(WARNING, "{0}@{1} is incompatible with the installed @{2}", new Object[] {toInstall.name, toInstall.version, pw.getVersion()}); } jobs.add(toInstall.deploy(true)); // dynamicLoad=true => sure to throw RestartRequiredException, but at least message is nicer } // else already good } return jobs; } private void logPluginWarnings(Map.Entry<String, VersionNumber> requestedPlugin, UpdateSite.Plugin toInstall) { if (new VersionNumber(toInstall.version).compareTo(requestedPlugin.getValue()) < 0) { LOGGER.log(WARNING, "{0} can only be satisfied in @{1}", new Object[] {requestedPlugin, toInstall.version}); } if (toInstall.isForNewerHudson()) { LOGGER.log(WARNING, "{0}@{1} was built for a newer Jenkins", new Object[] {toInstall.name, toInstall.version}); } } /** * Like {@link #doInstallNecessaryPlugins(StaplerRequest)} but only checks if everything is installed * or if some plugins need updates or installation. * * This method runs without side-effect. I'm still requiring the ADMINISTER permission since * XML file can contain various external references and we don't configure parsers properly against * that. * * @since 1.483 */ @RequirePOST public JSONArray doPrevalidateConfig(StaplerRequest req) throws IOException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); JSONArray response = new JSONArray(); for (Map.Entry<String, VersionNumber> p : parseRequestedPlugins(req.getInputStream()).entrySet()) { PluginWrapper pw = getPlugin(p.getKey()); JSONObject j = new JSONObject() .accumulate("name", p.getKey()) .accumulate("version", p.getValue().toString()); if (pw == null) { // install new response.add(j.accumulate("mode", "missing")); } else if (pw.isOlderThan(p.getValue())) { // upgrade response.add(j.accumulate("mode", "old")); } // else already good } return response; } /** * Runs {@link #prevalidateConfig} on posted XML and redirects to the {@link UpdateCenter}. * @since 1.483 */ @RequirePOST public HttpResponse doInstallNecessaryPlugins(StaplerRequest req) throws IOException { prevalidateConfig(req.getInputStream()); return HttpResponses.redirectViaContextPath("pluginManager/updates/"); } /** * Parses configuration XML files and picks up references to XML files. */ public Map<String, VersionNumber> parseRequestedPlugins(InputStream configXml) throws IOException { final Map<String, VersionNumber> requestedPlugins = new TreeMap<>(); try { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); spf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); spf.newSAXParser().parse(configXml, new DefaultHandler() { @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { String plugin = attributes.getValue("plugin"); if (plugin == null) { return; } if (!plugin.matches("[^@]+@[^@]+")) { throw new SAXException("Malformed plugin attribute: " + plugin); } int at = plugin.indexOf('@'); String shortName = plugin.substring(0, at); VersionNumber existing = requestedPlugins.get(shortName); VersionNumber requested = new VersionNumber(plugin.substring(at + 1)); if (existing == null || existing.compareTo(requested) < 0) { requestedPlugins.put(shortName, requested); } } @Override public InputSource resolveEntity(String publicId, String systemId) throws IOException, SAXException { return RestrictiveEntityResolver.INSTANCE.resolveEntity(publicId, systemId); } }); } catch (SAXException x) { throw new IOException("Failed to parse XML", x); } catch (ParserConfigurationException e) { throw new AssertionError(e); // impossible since we don't tweak XMLParser } return requestedPlugins; } @Restricted(DoNotUse.class) // table.jelly public MetadataCache createCache() { return new MetadataCache(); } /** * Disable a list of plugins using a strategy for their dependents plugins. * @param strategy the strategy regarding how the dependent plugins are processed * @param plugins the list of plugins * @return the list of results for every plugin and their dependent plugins. * @throws IOException see {@link PluginWrapper#disable()} */ public @NonNull List<PluginWrapper.PluginDisableResult> disablePlugins(@NonNull PluginWrapper.PluginDisableStrategy strategy, @NonNull List<String> plugins) throws IOException { // Where we store the results of each plugin disablement List<PluginWrapper.PluginDisableResult> results = new ArrayList<>(plugins.size()); // Disable all plugins passed for (String pluginName : plugins) { PluginWrapper plugin = this.getPlugin(pluginName); if (plugin == null) { results.add(new PluginWrapper.PluginDisableResult(pluginName, PluginWrapper.PluginDisableStatus.NO_SUCH_PLUGIN, Messages.PluginWrapper_NoSuchPlugin(pluginName))); } else { results.add(plugin.disable(strategy)); } } return results; } @Restricted(NoExternalUse.class) // table.jelly public static final class MetadataCache { private final Map<String, Object> data = new HashMap<>(); public <T> T of(String key, Class<T> type, Supplier<T> func) { return type.cast(data.computeIfAbsent(key, _ignored -> func.get())); } } /** * {@link ClassLoader} that can see all plugins. */ public static final class UberClassLoader extends ClassLoader { private final List<PluginWrapper> activePlugins; /** Cache of loaded, or known to be unloadable, classes. */ private final ConcurrentMap<String, Optional<Class<?>>> loaded = new ConcurrentHashMap<>(); static { registerAsParallelCapable(); } public UberClassLoader(List<PluginWrapper> activePlugins) { super(PluginManager.class.getClassLoader()); this.activePlugins = activePlugins; } @Override protected Class<?> findClass(String name) throws ClassNotFoundException { if (name.startsWith("SimpleTemplateScript")) { // cf. groovy.text.SimpleTemplateEngine throw new ClassNotFoundException("ignoring " + name); } return loaded.computeIfAbsent(name, this::computeValue).orElseThrow(() -> new ClassNotFoundException(name)); } private Optional<Class<?>> computeValue(String name) { for (PluginWrapper p : activePlugins) { try { if (FAST_LOOKUP) { return Optional.of(ClassLoaderReflectionToolkit.loadClass(p.classLoader, name)); } else { return Optional.of(p.classLoader.loadClass(name)); } } catch (ClassNotFoundException e) { // Not found. Try the next class loader. } } // Not found in any of the class loaders. Delegate. return Optional.empty(); } @Override protected URL findResource(String name) { for (PluginWrapper p : activePlugins) { URL url; if (FAST_LOOKUP) { url = ClassLoaderReflectionToolkit._findResource(p.classLoader, name); } else { url = p.classLoader.getResource(name); } if (url != null) { return url; } } return null; } @Override protected Enumeration<URL> findResources(String name) throws IOException { List<URL> resources = new ArrayList<>(); for (PluginWrapper p : activePlugins) { if (FAST_LOOKUP) { resources.addAll(Collections.list(ClassLoaderReflectionToolkit._findResources(p.classLoader, name))); } else { resources.addAll(Collections.list(p.classLoader.getResources(name))); } } return Collections.enumeration(resources); } void clearCacheMisses() { loaded.values().removeIf(Optional::isEmpty); } @Override public String toString() { // only for debugging purpose return "classLoader " + getClass().getName(); } } @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "for script console") public static boolean FAST_LOOKUP = !SystemProperties.getBoolean(PluginManager.class.getName() + ".noFastLookup"); /** @deprecated in Jenkins 2.222 use {@link Jenkins#ADMINISTER} instead */ @Deprecated public static final Permission UPLOAD_PLUGINS = new Permission(Jenkins.PERMISSIONS, "UploadPlugins", Messages._PluginManager_UploadPluginsPermission_Description(), Jenkins.ADMINISTER, PermissionScope.JENKINS); /** @deprecated in Jenkins 2.222 use {@link Jenkins#ADMINISTER} instead */ @Deprecated public static final Permission CONFIGURE_UPDATECENTER = new Permission(Jenkins.PERMISSIONS, "ConfigureUpdateCenter", Messages._PluginManager_ConfigureUpdateCenterPermission_Description(), Jenkins.ADMINISTER, PermissionScope.JENKINS); /** * Remembers why a plugin failed to deploy. */ public static final class FailedPlugin { public final String name; public final Exception cause; @Nullable public final PluginWrapper pluginWrapper; /** * Constructor for FailedPlugin when we do not have an associated PluginWrapper */ public FailedPlugin(String name, Exception cause) { this.name = name; this.cause = cause; this.pluginWrapper = null; } /** * Constructor for FailedPlugin when we know which PluginWrapper failed */ public FailedPlugin(PluginWrapper pluginWrapper, Exception cause) { this.name = pluginWrapper.getShortName(); this.cause = cause; this.pluginWrapper = pluginWrapper; } public String getExceptionString() { return Functions.printThrowable(cause); } } /** * Stores {@link Plugin} instances. */ /*package*/ static final class PluginInstanceStore { final Map<PluginWrapper, Plugin> store = new ConcurrentHashMap<>(); } /** * {@link AdministrativeMonitor} that checks if there are any plugins with cycle dependencies. */ @Extension @Symbol("pluginCycleDependencies") public static final class PluginCycleDependenciesMonitor extends AdministrativeMonitor { @Override public String getDisplayName() { return Messages.PluginManager_PluginCycleDependenciesMonitor_DisplayName(); } private transient volatile boolean isActive = false; private transient volatile List<PluginWrapper> pluginsWithCycle; @Override public boolean isActivated() { if (pluginsWithCycle == null) { pluginsWithCycle = new ArrayList<>(); for (PluginWrapper p : Jenkins.get().getPluginManager().getPlugins()) { if (p.hasCycleDependency()) { pluginsWithCycle.add(p); isActive = true; } } } return isActive; } public List<PluginWrapper> getPluginsWithCycle() { return pluginsWithCycle; } } /** * {@link AdministrativeMonitor} that informs the administrator about a required plugin update. * @since 1.491 */ @Extension @Symbol("pluginUpdate") public static final class PluginUpdateMonitor extends AdministrativeMonitor { private Map<String, PluginUpdateInfo> pluginsToBeUpdated = new HashMap<>(); /** * Convenience method to ease access to this monitor, this allows other plugins to register required updates. * @return this monitor. */ public static PluginUpdateMonitor getInstance() { return ExtensionList.lookupSingleton(PluginUpdateMonitor.class); } /** * Report to the administrator if the plugin with the given name is older then the required version. * * @param pluginName shortName of the plugin (artifactId) * @param requiredVersion the lowest version which is OK (e.g. 2.2.2) * @param message the message to show (plain text) */ public void ifPluginOlderThenReport(String pluginName, String requiredVersion, String message) { Plugin plugin = Jenkins.get().getPlugin(pluginName); if (plugin != null) { if (plugin.getWrapper().getVersionNumber().isOlderThan(new VersionNumber(requiredVersion))) { pluginsToBeUpdated.put(pluginName, new PluginUpdateInfo(pluginName, message)); } } } @Override public boolean isActivated() { return !pluginsToBeUpdated.isEmpty(); } @Override public String getDisplayName() { return Messages.PluginManager_PluginUpdateMonitor_DisplayName(); } /** * adds a message about a plugin to the manage screen * @param pluginName the plugins name * @param message the message to be displayed */ public void addPluginToUpdate(String pluginName, String message) { this.pluginsToBeUpdated.put(pluginName, new PluginUpdateInfo(pluginName, message)); } public Collection<PluginUpdateInfo> getPluginsToBeUpdated() { return pluginsToBeUpdated.values(); } public static class PluginUpdateInfo { public final String pluginName; public final String message; private PluginUpdateInfo(String pluginName, String message) { this.pluginName = pluginName; this.message = message; } } } /** * {@link AdministrativeMonitor} that checks if there are any plugins that are deprecated. * * @since 2.246 */ @Restricted(NoExternalUse.class) @Symbol("pluginDeprecation") @Extension public static final class PluginDeprecationMonitor extends AdministrativeMonitor { @Override public String getDisplayName() { return Messages.PluginManager_PluginDeprecationMonitor_DisplayName(); } @Override public boolean isActivated() { return !getDeprecatedPlugins().isEmpty(); } public Map<PluginWrapper, String> getDeprecatedPlugins() { return Jenkins.get().getPluginManager().getPlugins().stream() .filter(PluginWrapper::isDeprecated) .collect(Collectors.toMap(Function.identity(), it -> it.getDeprecations().get(0).url)); } } @Restricted(DoNotUse.class) public String unscientific(double d) { return String.format(Locale.US, "%15.4f", d); } @Override @Restricted(NoExternalUse.class) public Object getTarget() { if (!SKIP_PERMISSION_CHECK) { Jenkins.get().checkPermission(Jenkins.SYSTEM_READ); } return this; } @Restricted(DoNotUse.class) // Used from table.jelly public boolean isMetaLabel(String label) { return "adopt-this-plugin".equals(label) || "deprecated".equals(label); } @Restricted(DoNotUse.class) // Used from table.jelly public boolean hasAdoptThisPluginLabel(UpdateSite.Plugin plugin) { return plugin.hasCategory("adopt-this-plugin"); } @Restricted(DoNotUse.class) // Used from table.jelly public boolean hasAdoptThisPluginLabel(PluginWrapper plugin) { final UpdateSite.Plugin pluginMeta = Jenkins.get().getUpdateCenter().getPlugin(plugin.getShortName()); if (pluginMeta == null) { return false; } return pluginMeta.hasCategory("adopt-this-plugin"); } /** * Escape hatch for StaplerProxy-based access control */ @Restricted(NoExternalUse.class) @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "for script console") public static /* Script Console modifiable */ boolean SKIP_PERMISSION_CHECK = SystemProperties.getBoolean(PluginManager.class.getName() + ".skipPermissionCheck"); }
jenkinsci/jenkins
core/src/main/java/hudson/PluginManager.java
932
/** * $Id: EmbedServlet.java,v 1.18 2014/01/31 22:27:07 gaudenz Exp $ * Copyright (c) 2011-2012, JGraph Ltd * * TODO * * We could split the static part and the stencils into two separate requests * in order for multiple graphs in the pages to not load the static part * multiple times. This is only relevant if the embed arguments are different, * in which case there is a problem with parsin the graph model too soon, ie. * before certain stencils become available. * * Easier solution is for the user to move the embed script to after the last * graph in the page and merge the stencil arguments. * * Note: The static part is roundly 105K, the stencils are much smaller in size. * This means if the embed function is widely used, it will make sense to factor * out the static part because only stencils will change between pages. */ package com.mxgraph.online; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.File; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.text.StringEscapeUtils; import com.google.appengine.api.utils.SystemProperty; import com.mxgraph.online.Utils.SizeLimitExceededException; /** * Servlet implementation class OpenServlet */ public class EmbedServlet2 extends HttpServlet { /** * */ private static final long serialVersionUID = 1L; /** * */ protected static String SHAPES_PATH = "/shapes"; /** * */ protected static String STENCIL_PATH = "/stencils"; /** * */ protected static String lastModified = null; /** * */ protected HashMap<String, String> stencils = new HashMap<String, String>(); /** * */ protected HashMap<String, String[]> libraries = new HashMap<String, String[]>(); /** * @see HttpServlet#HttpServlet() */ public EmbedServlet2() { if (lastModified == null) { // Uses deployment date as lastModified header String applicationVersion = SystemProperty.applicationVersion.get(); Date uploadDate = new Date(Long .parseLong(applicationVersion .substring(applicationVersion.lastIndexOf(".") + 1)) / (2 << 27) * 1000); DateFormat httpDateFormat = new SimpleDateFormat( "EEE, dd MMM yyyy HH:mm:ss z", Locale.US); lastModified = httpDateFormat.format(uploadDate); } initLibraries(libraries); } /** * Sets up collection of stencils */ public static void initLibraries(HashMap<String, String[]> libraries) { libraries.put("mockup", new String[] { SHAPES_PATH + "/mockup/mxMockupButtons.js" }); libraries.put("arrows2", new String[] { SHAPES_PATH + "/mxArrows.js" }); libraries.put("bpmn", new String[] { SHAPES_PATH + "/bpmn/mxBpmnShape2.js", STENCIL_PATH + "/bpmn.xml" }); libraries.put("er", new String[] { SHAPES_PATH + "/er/mxER.js" }); libraries.put("ios", new String[] { SHAPES_PATH + "/mockup/mxMockupiOS.js" }); libraries.put("rackGeneral", new String[] { SHAPES_PATH + "/rack/mxRack.js", STENCIL_PATH + "/rack/general.xml" }); libraries.put("rackF5", new String[] { STENCIL_PATH + "/rack/f5.xml" }); libraries.put("lean_mapping", new String[] { SHAPES_PATH + "/mxLeanMap.js", STENCIL_PATH + "/lean_mapping.xml" }); libraries.put("basic", new String[] { SHAPES_PATH + "/mxBasic.js", STENCIL_PATH + "/basic.xml" }); libraries.put("ios7icons", new String[] { STENCIL_PATH + "/ios7/icons.xml" }); libraries.put("ios7ui", new String[] { SHAPES_PATH + "/ios7/mxIOS7Ui.js", STENCIL_PATH + "/ios7/misc.xml" }); libraries.put("android", new String[] { SHAPES_PATH + "/mxAndroid.js", STENCIL_PATH + "electrical/transmission" }); libraries.put("electrical/transmission", new String[] { SHAPES_PATH + "/mxElectrical.js", STENCIL_PATH + "/electrical/transmission.xml" }); libraries.put("mockup/buttons", new String[] { SHAPES_PATH + "/mockup/mxMockupButtons.js" }); libraries.put("mockup/containers", new String[] { SHAPES_PATH + "/mockup/mxMockupContainers.js" }); libraries.put("mockup/forms", new String[] { SHAPES_PATH + "/mockup/mxMockupForms.js" }); libraries.put("mockup/graphics", new String[] { SHAPES_PATH + "/mockup/mxMockupGraphics.js", STENCIL_PATH + "/mockup/misc.xml" }); libraries.put("mockup/markup", new String[] { SHAPES_PATH + "/mockup/mxMockupMarkup.js" }); libraries.put("mockup/misc", new String[] { SHAPES_PATH + "/mockup/mxMockupMisc.js", STENCIL_PATH + "/mockup/misc.xml" }); libraries.put("mockup/navigation", new String[] { SHAPES_PATH + "/mockup/mxMockupNavigation.js", STENCIL_PATH + "/mockup/misc.xml" }); libraries.put("mockup/text", new String[] { SHAPES_PATH + "/mockup/mxMockupText.js" }); libraries.put("floorplan", new String[] { SHAPES_PATH + "/mxFloorplan.js", STENCIL_PATH + "/floorplan.xml" }); libraries.put("bootstrap", new String[] { SHAPES_PATH + "/mxBootstrap.js", STENCIL_PATH + "/bootstrap.xml" }); libraries.put("gmdl", new String[] { SHAPES_PATH + "/mxGmdl.js", STENCIL_PATH + "/gmdl.xml" }); libraries.put("cabinets", new String[] { SHAPES_PATH + "/mxCabinets.js", STENCIL_PATH + "/cabinets.xml" }); libraries.put("archimate", new String[] { SHAPES_PATH + "/mxArchiMate.js" }); libraries.put("archimate3", new String[] { SHAPES_PATH + "/mxArchiMate3.js" }); libraries.put("sysml", new String[] { SHAPES_PATH + "/mxSysML.js" }); libraries.put("eip", new String[] { SHAPES_PATH + "/mxEip.js", STENCIL_PATH + "/eip.xml" }); libraries.put("networks", new String[] { SHAPES_PATH + "/mxNetworks.js", STENCIL_PATH + "/networks.xml" }); libraries.put("aws3d", new String[] { SHAPES_PATH + "/mxAWS3D.js", STENCIL_PATH + "/aws3d.xml" }); libraries.put("pid2inst", new String[] { SHAPES_PATH + "/pid2/mxPidInstruments.js" }); libraries.put("pid2misc", new String[] { SHAPES_PATH + "/pid2/mxPidMisc.js", STENCIL_PATH + "/pid/misc.xml" }); libraries.put("pid2valves", new String[] { SHAPES_PATH + "/pid2/mxPidValves.js" }); libraries.put("pidFlowSensors", new String[] { STENCIL_PATH + "/pid/flow_sensors.xml" }); libraries.put("emoji", new String[] { SHAPES_PATH + "/emoji/mxEmoji.js" }); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { String qs = request.getQueryString(); if (qs != null && qs.equals("stats")) { writeStats(response); } else { // Checks or sets last modified date of delivered content. // Date comparison not needed. Only return 304 if // delivered by this servlet instance. String modSince = request.getHeader("If-Modified-Since"); if (modSince != null && modSince.equals(lastModified) && request.getParameter("fetch") == null) { response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); } else { writeEmbedResponse(request, response); } } } catch (SizeLimitExceededException e) { response.setStatus(HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE); throw e; } catch (Exception e) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); throw e; } } public void writeEmbedResponse(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setStatus(HttpServletResponse.SC_OK); response.setCharacterEncoding("UTF-8"); response.setContentType("application/javascript; charset=UTF-8"); response.setHeader("Last-Modified", lastModified); if (request.getParameter("fetch") != null) { response.setHeader("Cache-Control", "no-store"); } OutputStream out = response.getOutputStream(); // Creates XML for stencils PrintWriter writer = new PrintWriter(out); // Writes JavaScript and adds function call with // stylesheet and stencils as arguments writer.println(createEmbedJavaScript(request)); writer.flush(); writer.close(); } public String createEmbedJavaScript(HttpServletRequest request) throws IOException { String sparam = request.getParameter("s"); String dev = request.getParameter("dev"); StringBuffer result = new StringBuffer("["); StringBuffer js = new StringBuffer(""); // Processes each stencil only once HashSet<String> done = new HashSet<String>(); // Processes each lib only once HashSet<String> libsLoaded = new HashSet<String>(); if (sparam != null) { String[] names = sparam.split(";"); for (int i = 0; i < names.length; i++) { if (names[i].indexOf("..") < 0 && !done.contains(names[i]) && names[i].length() > 0) { if (names[i].equals("*")) { js.append(readXmlFile("/js/shapes-14-6-5.min.js", false)); result.append( "'" + readXmlFile("/stencils.xml", true) + "'"); } else { // Makes name canonical names[i] = new File("/" + names[i]).getCanonicalPath().substring(1); // Checks if any JS files are associated with the library // name and injects the JS into the page String[] libs = libraries.get(names[i]); if (libs != null) { for (int j = 0; j < libs.length; j++) { if (!libsLoaded.contains(libs[j])) { String tmp = stencils.get(libs[j]); libsLoaded.add(libs[j]); if (tmp == null) { try { tmp = readXmlFile(libs[j], !libs[j].toLowerCase() .endsWith(".js")); // Cache for later use if (tmp != null) { stencils.put(libs[j], tmp); } } catch (NullPointerException e) { // This seems possible according to access log so ignore stencil } } if (tmp != null) { // TODO: Add JS to Javascript code inline. This had to be done to quickly // add JS-based dynamic loading to the existing embed setup where everything // dynamic is passed via function call, so an indirection via eval must be // used even though the JS could be parsed directly by adding it to JS. if (libs[j].toLowerCase() .endsWith(".js")) { js.append(tmp); } else { if (result.length() > 1) { result.append(","); } result.append("'" + tmp + "'"); } } } } } else { String tmp = stencils.get(names[i]); if (tmp == null) { try { tmp = readXmlFile( "/stencils/" + names[i] + ".xml", true); // Cache for later use if (tmp != null) { stencils.put(names[i], tmp); } } catch (NullPointerException e) { // This seems possible according to access log so ignore stencil } } if (tmp != null) { if (result.length() > 1) { result.append(","); } result.append("'" + tmp + "'"); } } } done.add(names[i]); } } } result.append("]"); // LATER: Detect protocol of request in dev // mode to avoid security errors String proto = "https://"; String setCachedUrls = ""; String[] urls = request.getParameterValues("fetch"); if (urls != null) { HashSet<String> completed = new HashSet<String>(); int sizeLimit = Utils.MAX_SIZE; for (int i = 0; i < urls.length; i++) { // Checks if URL already fetched to avoid duplicates if (!completed.contains(urls[i]) && Utils.sanitizeUrl(urls[i])) { completed.add(urls[i]); URL url = new URL(urls[i]); URLConnection connection = url.openConnection(); ((HttpURLConnection) connection).setInstanceFollowRedirects(false); connection.setRequestProperty("User-Agent", "draw.io"); ByteArrayOutputStream stream = new ByteArrayOutputStream(); String contentLength = connection.getHeaderField("Content-Length"); // If content length is available, use it to enforce maximum size if (contentLength != null && Long.parseLong(contentLength) > sizeLimit) { break; } sizeLimit -= Utils.copyRestricted(connection.getInputStream(), stream); setCachedUrls += "GraphViewer.cachedUrls['" + StringEscapeUtils.escapeEcmaScript(urls[i]) + "'] = decodeURIComponent('" + StringEscapeUtils.escapeEcmaScript( Utils.encodeURIComponent( stream.toString("UTF-8"), Utils.CHARSET_FOR_URL_ENCODING)) + "');"; } } } // Installs a callback to load the stencils after the viewer was injected return "window.onDrawioViewerLoad = function() {" + setCachedUrls + "mxStencilRegistry.parseStencilSets(" + result.toString() + ");" + js + "GraphViewer.processElements(); };" + "var t = document.getElementsByTagName('script');" + "if (t != null && t.length > 0) {" + "var script = document.createElement('script');" + "script.type = 'text/javascript';" + "script.src = '" + proto + ((dev != null && dev.equals("1")) ? "test" : "www") + ".draw.io/js/viewer-static.min.js';" + "t[0].parentNode.appendChild(script);}"; } public void writeStats(HttpServletResponse response) throws IOException { PrintWriter writer = new PrintWriter(response.getOutputStream()); writer.println("<html>"); writer.println("<body>"); writer.println("Deployed: " + lastModified); writer.println("</body>"); writer.println("</html>"); writer.flush(); } public String readXmlFile(String filename, boolean xmlContent) throws IOException { String result = readFile(filename); if (xmlContent) { result = result.replaceAll("'", "\\\\'").replaceAll("\t", "") .replaceAll("\n", ""); } return result; } public String readFile(String filename) throws IOException { InputStream is = getServletContext().getResourceAsStream(filename); return Utils.readInputStream(is); } }
jgraph/drawio
src/main/java/com/mxgraph/online/EmbedServlet2.java
933
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * * The contents of this file are subject to the Netscape Public * License Version 1.1 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.mozilla.org/NPL/ * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express oqr * implied. See the License for the specific language governing * rights and limitations under the License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is Netscape * Communications Corporation. Portions created by Netscape are * Copyright (C) 1997-1999 Netscape Communications Corporation. All * Rights Reserved. * * Contributor(s): * Mike Ang * Mike McCabe * * Alternatively, the contents of this file may be used under the * terms of the GNU Public License (the "GPL"), in which case the * provisions of the GPL are applicable instead of those above. * If you wish to allow use of your version of this file only * under the terms of the GPL and not to allow others to use your * version of this file under the NPL, indicate your decision by * deleting the provisions above and replace them with the notice * and other provisions required by the GPL. If you do not delete * the provisions above, a recipient may use your version of this * file under either the NPL or the GPL. */ // Modified by Google package com.google.gwt.dev.js.rhino; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * This class implements the JavaScript parser. * <p> * It is based on the C source files jsparse.c and jsparse.h in the jsref * package. * * @see TokenStream */ public class Parser { private List<ParserListener> listeners; public Parser(IRFactory nf, boolean insideFunction) { this.nf = nf; this.insideFunction = insideFunction; } public void addListener(ParserListener listener) { if (listeners == null) { listeners = new ArrayList<>(); } listeners.add(listener); } private void mustMatchToken(TokenStream ts, int toMatch, String messageId) throws IOException, JavaScriptException { int tt; if ((tt = ts.getToken()) != toMatch) { reportError(ts, messageId); ts.ungetToken(tt); // In case the parser decides to continue } } private void reportError(TokenStream ts, String messageId) throws JavaScriptException { this.ok = false; ts.reportSyntaxError(messageId, null); /* * Throw an exception to unwind the recursive descent parse. We use * JavaScriptException here even though it is really a different use of the * exception than it is usually used for. */ throw new JavaScriptException(messageId); } /* * Build a parse tree from the given TokenStream. * * @param ts the TokenStream to parse * * @return an Object representing the parsed program. If the parse fails, null * will be returned. (The parse failure will result in a call to the current * Context's ErrorReporter.) */ public Node parse(TokenStream ts) throws IOException { this.ok = true; sourceTop = 0; functionNumber = 0; int tt; // last token from getToken(); /* * so we have something to add nodes to until we've collected all the source */ Node tempBlock = nf.createLeaf(TokenStream.BLOCK, ts.tokenPosition); while (true) { ts.flags |= TokenStream.TSF_REGEXP; tt = ts.getToken(); ts.flags &= ~TokenStream.TSF_REGEXP; if (tt <= TokenStream.EOF) { break; } if (tt == TokenStream.FUNCTION) { try { tempBlock.addChildToBack(function(ts, false)); } catch (JavaScriptException e) { this.ok = false; break; } } else { ts.ungetToken(tt); tempBlock.addChildToBack(statement(ts)); } } if (!this.ok) { // XXX ts.clearPushback() call here? return null; } return nf.createScript(tempBlock); } /* * The C version of this function takes an argument list, which doesn't seem * to be needed for tree generation... it'd only be useful for checking * argument hiding, which I'm not doing anyway... */ private Node parseFunctionBody(TokenStream ts) throws IOException { int oldflags = ts.flags; ts.flags &= ~(TokenStream.TSF_RETURN_EXPR | TokenStream.TSF_RETURN_VOID); ts.flags |= TokenStream.TSF_FUNCTION; Node pn = nf.createBlock(ts.tokenPosition); try { int tt; while ((tt = ts.peekToken()) > TokenStream.EOF && tt != TokenStream.RC) { if (tt == TokenStream.FUNCTION) { ts.getToken(); pn.addChildToBack(function(ts, false)); } else { pn.addChildToBack(statement(ts)); } } } catch (JavaScriptException e) { this.ok = false; } finally { // also in finally block: // flushNewLines, clearPushback. ts.flags = oldflags; } return pn; } private Node function(TokenStream ts, boolean isExpr) throws IOException, JavaScriptException { if (listeners != null) { for (ParserListener listener : listeners) { listener.functionStarted(); } } CodePosition basePosition = ts.tokenPosition; Node nameNode; Node memberExprNode = null; // For generators boolean isGenerator = ts.matchToken(TokenStream.MUL); if (ts.matchToken(TokenStream.NAME)) { nameNode = nf.createName(ts.getString(), basePosition); if (!ts.matchToken(TokenStream.LP)) { if (Context.getContext().hasFeature(Context.FEATURE_MEMBER_EXPR_AS_FUNCTION_NAME)) { // Extension to ECMA: if 'function <name>' does not follow // by '(', assume <name> starts memberExpr Node memberExprHead = nameNode; nameNode = null; memberExprNode = memberExprTail(ts, false, memberExprHead); } mustMatchToken(ts, TokenStream.LP, "msg.no.paren.parms"); } } else if (ts.matchToken(TokenStream.LP)) { // Anonymous function nameNode = null; } else { if (Context.getContext().hasFeature(Context.FEATURE_MEMBER_EXPR_AS_FUNCTION_NAME)) { // Note that memberExpr can not start with '(' like // in (1+2).toString, because 'function (' already // processed as anonymous function memberExprNode = memberExpr(ts, false); } mustMatchToken(ts, TokenStream.LP, "msg.no.paren.parms"); nameNode = null; } ++functionNumber; // Save current source top to restore it on exit not to include // function to parent source int savedSourceTop = sourceTop; int savedFunctionNumber = functionNumber; Node args; Node body; try { functionNumber = 0; args = nf.createLeaf(TokenStream.LP, ts.tokenPosition); if (!ts.matchToken(TokenStream.GWT)) { do { mustMatchToken(ts, TokenStream.NAME, "msg.no.parm"); String s = ts.getString(); args.addChildToBack(nf.createName(s, ts.tokenPosition)); } while (ts.matchToken(TokenStream.COMMA)); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.after.parms"); } mustMatchToken(ts, TokenStream.LC, "msg.no.brace.body"); body = parseFunctionBody(ts); body.setPosition(ts.tokenPosition); mustMatchToken(ts, TokenStream.RC, "msg.no.brace.after.body"); // skip the last EOL so nested functions work... } finally { sourceTop = savedSourceTop; functionNumber = savedFunctionNumber; } Node pn = nf.createFunction(nameNode, args, body, isGenerator, basePosition); if (memberExprNode != null) { pn = nf.createBinary(TokenStream.ASSIGN, TokenStream.NOP, memberExprNode, pn, basePosition); } // Add EOL but only if function is not part of expression, in which // case it gets SEMI + EOL from Statement. if (!isExpr) { wellTerminated(ts, TokenStream.FUNCTION); } if (listeners != null) { for (ParserListener listener : listeners) { listener.functionEnded(ts); } } return pn; } private Node statements(TokenStream ts) throws IOException { Node pn = nf.createBlock(ts.tokenPosition); int tt; while ((tt = ts.peekToken()) > TokenStream.EOF && tt != TokenStream.RC) { pn.addChildToBack(statement(ts)); } return pn; } private Node condition(TokenStream ts) throws IOException, JavaScriptException { Node pn; mustMatchToken(ts, TokenStream.LP, "msg.no.paren.cond"); pn = expr(ts, false); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.after.cond"); // there's a check here in jsparse.c that corrects = to == return pn; } private void wellTerminated(TokenStream ts, int lastExprType) throws IOException, JavaScriptException { int tt = ts.peekTokenSameLine(); if (tt == TokenStream.ERROR) { return; } if (tt != TokenStream.EOF && tt != TokenStream.EOL && tt != TokenStream.SEMI && tt != TokenStream.RC) { int version = Context.getContext().getLanguageVersion(); if ((tt == TokenStream.FUNCTION || lastExprType == TokenStream.FUNCTION) && (version < Context.VERSION_1_2)) { /* * Checking against version < 1.2 and version >= 1.0 in the above line * breaks old javascript, so we keep it this way for now... XXX warning * needed? */ } else { reportError(ts, "msg.no.semi.stmt"); } } } // match a NAME; return null if no match. private Node matchLabel(TokenStream ts) throws IOException, JavaScriptException { CodePosition position = ts.tokenPosition; int lineno = ts.getLineno(); String label = null; int tt; tt = ts.peekTokenSameLine(); if (tt == TokenStream.NAME) { ts.getToken(); label = ts.getString(); } if (lineno == ts.getLineno()) { wellTerminated(ts, TokenStream.ERROR); } return label != null ? nf.createString(label, position) : null; } private Node statement(TokenStream ts) throws IOException { CodePosition position = ts.lastPosition; try { Comment commentsBefore = getComments(ts); ts.collectCommentsAfter(); Node result = statementHelper(ts); result.setCommentsBeforeNode(commentsBefore); ts.collectCommentsAfter(); result.setCommentsAfterNode(getComments(ts)); return result; } catch (JavaScriptException e) { // skip to end of statement int t; do { t = ts.getToken(); } while (t != TokenStream.SEMI && t != TokenStream.EOL && t != TokenStream.EOF && t != TokenStream.ERROR); return nf.createExprStatement(nf.createName("error", position), position); } } /** * Whether the "catch (e: e instanceof Exception) { ... }" syntax is * implemented. */ private Node statementHelper(TokenStream ts) throws IOException, JavaScriptException { Node pn; int tt; int lastExprType; // For wellTerminated tt = ts.getToken(); CodePosition position = ts.tokenPosition; switch (tt) { case TokenStream.IF: { Node cond = condition(ts); Node ifTrue = statement(ts); Node ifFalse = null; if (ts.matchToken(TokenStream.ELSE)) { ifFalse = statement(ts); } pn = nf.createIf(cond, ifTrue, ifFalse, position); break; } case TokenStream.SWITCH: { pn = nf.createSwitch(position); Node curCase = null; // to kill warning Node caseStatements; mustMatchToken(ts, TokenStream.LP, "msg.no.paren.switch"); pn.addChildToBack(expr(ts, false)); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.after.switch"); mustMatchToken(ts, TokenStream.LC, "msg.no.brace.switch"); while ((tt = ts.getToken()) != TokenStream.RC && tt != TokenStream.EOF) { switch (tt) { case TokenStream.CASE: curCase = nf.createUnary(TokenStream.CASE, expr(ts, false), ts.tokenPosition); break; case TokenStream.DEFAULT: curCase = nf.createLeaf(TokenStream.DEFAULT, ts.tokenPosition); // XXX check that there isn't more than one default break; default: reportError(ts, "msg.bad.switch"); break; } mustMatchToken(ts, TokenStream.COLON, "msg.no.colon.case"); caseStatements = nf.createLeaf(TokenStream.BLOCK, null); while ((tt = ts.peekToken()) != TokenStream.RC && tt != TokenStream.CASE && tt != TokenStream.DEFAULT && tt != TokenStream.EOF) { caseStatements.addChildToBack(statement(ts)); } // assert cur_case if (curCase != null) { curCase.addChildToBack(caseStatements); } pn.addChildToBack(curCase); } break; } case TokenStream.WHILE: { Node cond = condition(ts); Node body = statement(ts); pn = nf.createWhile(cond, body, position); break; } case TokenStream.DO: { Node body = statement(ts); mustMatchToken(ts, TokenStream.WHILE, "msg.no.while.do"); Node cond = condition(ts); pn = nf.createDoWhile(body, cond, position); break; } case TokenStream.FOR: { Node init; // Node init is also foo in 'foo in Object' Node cond; // Node cond is also object in 'foo in Object' Node incr = null; // to kill warning Node body; mustMatchToken(ts, TokenStream.LP, "msg.no.paren.for"); tt = ts.peekToken(); if (tt == TokenStream.SEMI) { init = nf.createLeaf(TokenStream.VOID, null); } else { if (tt == TokenStream.VAR) { // set init to a var list or initial ts.getToken(); // throw away the 'var' token init = variables(ts, true, ts.tokenPosition); } else { init = expr(ts, true); } } tt = ts.peekToken(); if (tt == TokenStream.RELOP && ts.getOp() == TokenStream.IN) { ts.matchToken(TokenStream.RELOP); // 'cond' is the object over which we're iterating cond = expr(ts, false); } else { // ordinary for loop mustMatchToken(ts, TokenStream.SEMI, "msg.no.semi.for"); if (ts.peekToken() == TokenStream.SEMI) { // no loop condition cond = nf.createLeaf(TokenStream.VOID, null); } else { cond = expr(ts, false); } mustMatchToken(ts, TokenStream.SEMI, "msg.no.semi.for.cond"); if (ts.peekToken() == TokenStream.GWT) { incr = nf.createLeaf(TokenStream.VOID, null); } else { incr = expr(ts, false); } } mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.for.ctrl"); body = statement(ts); if (incr == null) { // cond could be null if 'in obj' got eaten by the init node. pn = nf.createForIn(init, cond, body, position); } else { pn = nf.createFor(init, cond, incr, body, position); } break; } case TokenStream.TRY: { Node tryblock; Node catchblocks; Node finallyblock = null; tryblock = statement(ts); catchblocks = nf.createLeaf(TokenStream.BLOCK, null); boolean sawDefaultCatch = false; int peek = ts.peekToken(); if (peek == TokenStream.CATCH) { while (ts.matchToken(TokenStream.CATCH)) { if (sawDefaultCatch) { reportError(ts, "msg.catch.unreachable"); } CodePosition catchPosition = ts.tokenPosition; mustMatchToken(ts, TokenStream.LP, "msg.no.paren.catch"); mustMatchToken(ts, TokenStream.NAME, "msg.bad.catchcond"); Node varName = nf.createName(ts.getString(), ts.tokenPosition); Node catchCond = null; if (ts.matchToken(TokenStream.IF)) { catchCond = expr(ts, false); } else { sawDefaultCatch = true; } mustMatchToken(ts, TokenStream.GWT, "msg.bad.catchcond"); mustMatchToken(ts, TokenStream.LC, "msg.no.brace.catchblock"); catchblocks.addChildToBack(nf.createCatch(varName, catchCond, statements(ts), catchPosition)); mustMatchToken(ts, TokenStream.RC, "msg.no.brace.after.body"); } } else if (peek != TokenStream.FINALLY) { mustMatchToken(ts, TokenStream.FINALLY, "msg.try.no.catchfinally"); } if (ts.matchToken(TokenStream.FINALLY)) { finallyblock = statement(ts); } pn = nf.createTryCatchFinally(tryblock, catchblocks, finallyblock, position); break; } case TokenStream.THROW: { int lineno = ts.getLineno(); pn = nf.createThrow(expr(ts, false), position); if (lineno == ts.getLineno()) { wellTerminated(ts, TokenStream.ERROR); } break; } case TokenStream.BREAK: { // matchLabel only matches if there is one Node label = matchLabel(ts); pn = nf.createBreak(label, position); break; } case TokenStream.CONTINUE: { // matchLabel only matches if there is one Node label = matchLabel(ts); pn = nf.createContinue(label, position); break; } case TokenStream.DEBUGGER: { pn = nf.createDebugger(position); break; } case TokenStream.WITH: { // bruce: we don't support this is JSNI code because it's impossible // to identify bindings even passably well // reportError(ts, "msg.jsni.unsupported.with"); mustMatchToken(ts, TokenStream.LP, "msg.no.paren.with"); Node obj = expr(ts, false); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.after.with"); Node body = statement(ts); pn = nf.createWith(obj, body, position); break; } case TokenStream.VAR: { int lineno = ts.getLineno(); pn = variables(ts, false, position); if (ts.getLineno() == lineno) { wellTerminated(ts, TokenStream.ERROR); } break; } case TokenStream.RETURN: { Node retExpr = null; int lineno; // bail if we're not in a (toplevel) function if ((!insideFunction) && ((ts.flags & TokenStream.TSF_FUNCTION) == 0)) { reportError(ts, "msg.bad.return"); } /* This is ugly, but we don't want to require a semicolon. */ ts.flags |= TokenStream.TSF_REGEXP; tt = ts.peekTokenSameLine(); ts.flags &= ~TokenStream.TSF_REGEXP; if (tt != TokenStream.EOF && tt != TokenStream.EOL && tt != TokenStream.SEMI && tt != TokenStream.RC) { lineno = ts.getLineno(); retExpr = expr(ts, false); if (ts.getLineno() == lineno) { wellTerminated(ts, TokenStream.ERROR); } ts.flags |= TokenStream.TSF_RETURN_EXPR; } else { ts.flags |= TokenStream.TSF_RETURN_VOID; } // XXX ASSERT pn pn = nf.createReturn(retExpr, position); break; } case TokenStream.LC: pn = statements(ts); mustMatchToken(ts, TokenStream.RC, "msg.no.brace.block"); break; case TokenStream.ERROR: // Fall thru, to have a node for error recovery to work on case TokenStream.EOL: case TokenStream.SEMI: pn = nf.createLeaf(TokenStream.VOID, ts.tokenPosition); break; default: { lastExprType = tt; int tokenno = ts.getTokenno(); ts.ungetToken(tt); int lineno = ts.getLineno(); pn = expr(ts, false); if (ts.peekToken() == TokenStream.COLON) { /* * check that the last thing the tokenizer returned was a NAME and * that only one token was consumed. */ if (lastExprType != TokenStream.NAME || (ts.getTokenno() != tokenno)) { reportError(ts, "msg.bad.label"); } ts.getToken(); // eat the COLON /* * in the C source, the label is associated with the statement that * follows: nf.addChildToBack(pn, statement(ts)); */ String name = ts.getString(); pn = nf.createLabel(nf.createString(name, position), position); // bruce: added to make it easier to bind labels to the // statements they modify // pn.addChildToBack(statement(ts)); // depend on decompiling lookahead to guess that that // last name was a label. return pn; } if (lastExprType == TokenStream.FUNCTION) { if (nf.getLeafType(pn) != TokenStream.FUNCTION) { reportError(ts, "msg.syntax"); } } pn = nf.createExprStatement(pn, position); /* * Check explicitly against (multi-line) function statement. * * lastExprEndLine is a hack to fix an automatic semicolon insertion * problem with function expressions; the ts.getLineno() == lineno check * was firing after a function definition even though the next statement * was on a new line, because speculative getToken calls advanced the * line number even when they didn't succeed. */ if (ts.getLineno() == lineno || (lastExprType == TokenStream.FUNCTION && ts.getLineno() == lastExprEndLine)) { wellTerminated(ts, lastExprType); } break; } } ts.matchToken(TokenStream.SEMI); return pn; } private Node variables(TokenStream ts, boolean inForInit, CodePosition position) throws IOException, JavaScriptException { Node pn = nf.createVariables(position); while (true) { Node name; Node init; mustMatchToken(ts, TokenStream.NAME, "msg.bad.var"); String s = ts.getString(); name = nf.createName(s, ts.tokenPosition); // omitted check for argument hiding if (ts.matchToken(TokenStream.ASSIGN)) { if (ts.getOp() != TokenStream.NOP) { reportError(ts, "msg.bad.var.init"); } init = assignExpr(ts, inForInit); name.addChildToBack(init); } pn.addChildToBack(name); if (!ts.matchToken(TokenStream.COMMA)) { break; } } return pn; } public Node expr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = assignExpr(ts, inForInit); while (ts.matchToken(TokenStream.COMMA)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.COMMA, pn, assignExpr(ts, inForInit), position); } return pn; } private Node assignExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Comment commentBeforeNode = getComments(ts); Node pn = condExpr(ts, inForInit); pn.setCommentsBeforeNode(commentBeforeNode); if (ts.matchToken(TokenStream.ASSIGN)) { // omitted: "invalid assignment left-hand side" check. CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.ASSIGN, ts.getOp(), pn, assignExpr(ts, inForInit), position); } ts.collectCommentsAfter(); pn.setCommentsAfterNode(getComments(ts)); return pn; } private Node condExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = orExpr(ts, inForInit); if (ts.matchToken(TokenStream.HOOK)) { CodePosition position = ts.tokenPosition; Node ifTrue = assignExpr(ts, false); mustMatchToken(ts, TokenStream.COLON, "msg.no.colon.cond"); Node ifFalse = assignExpr(ts, inForInit); return nf.createTernary(pn, ifTrue, ifFalse, position); } return pn; } private Node orExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = andExpr(ts, inForInit); while (ts.matchToken(TokenStream.OR)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.OR, pn, andExpr(ts, inForInit), position); } return pn; } private Node andExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = bitOrExpr(ts, inForInit); while (ts.matchToken(TokenStream.AND)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.AND, pn, bitOrExpr(ts, inForInit), position); } return pn; } private Node bitOrExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = bitXorExpr(ts, inForInit); while (ts.matchToken(TokenStream.BITOR)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.BITOR, pn, bitXorExpr(ts, inForInit), position); } return pn; } private Node bitXorExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = bitAndExpr(ts, inForInit); while (ts.matchToken(TokenStream.BITXOR)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.BITXOR, pn, bitAndExpr(ts, inForInit), position); } return pn; } private Node bitAndExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = eqExpr(ts, inForInit); while (ts.matchToken(TokenStream.BITAND)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.BITAND, pn, eqExpr(ts, inForInit), position); } return pn; } private Node eqExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = relExpr(ts, inForInit); while (ts.matchToken(TokenStream.EQOP)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.EQOP, ts.getOp(), pn, relExpr(ts, inForInit), position); } return pn; } private Node relExpr(TokenStream ts, boolean inForInit) throws IOException, JavaScriptException { Node pn = shiftExpr(ts); CodePosition position = ts.tokenPosition; while (ts.matchToken(TokenStream.RELOP)) { int op = ts.getOp(); if (inForInit && op == TokenStream.IN) { ts.ungetToken(TokenStream.RELOP); break; } pn = nf.createBinary(TokenStream.RELOP, op, pn, shiftExpr(ts), position); position = ts.tokenPosition; } return pn; } private Node shiftExpr(TokenStream ts) throws IOException, JavaScriptException { Node pn = addExpr(ts); while (ts.matchToken(TokenStream.SHOP)) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(TokenStream.SHOP, ts.getOp(), pn, addExpr(ts), position); } return pn; } private Node addExpr(TokenStream ts) throws IOException, JavaScriptException { int tt; Node pn = mulExpr(ts); while ((tt = ts.getToken()) == TokenStream.ADD || tt == TokenStream.SUB) { CodePosition position = ts.tokenPosition; pn = nf.createBinary(tt, pn, mulExpr(ts), position); } ts.ungetToken(tt); return pn; } private Node mulExpr(TokenStream ts) throws IOException, JavaScriptException { int tt; Node pn = unaryExpr(ts); while ((tt = ts.peekToken()) == TokenStream.MUL || tt == TokenStream.DIV || tt == TokenStream.MOD) { tt = ts.getToken(); CodePosition position = ts.tokenPosition; pn = nf.createBinary(tt, pn, unaryExpr(ts), position); } return pn; } private Node unaryExpr(TokenStream ts) throws IOException, JavaScriptException { int tt; ts.flags |= TokenStream.TSF_REGEXP; tt = ts.getToken(); ts.flags &= ~TokenStream.TSF_REGEXP; CodePosition position = ts.tokenPosition; switch (tt) { case TokenStream.YIELD: return nf.createUnary(TokenStream.YIELD, ts.getOp(), unaryExpr(ts), position); case TokenStream.UNARYOP: return nf.createUnary(TokenStream.UNARYOP, ts.getOp(), unaryExpr(ts), position); case TokenStream.ADD: case TokenStream.SUB: return nf.createUnary(TokenStream.UNARYOP, tt, unaryExpr(ts), position); case TokenStream.INC: case TokenStream.DEC: return nf.createUnary(tt, TokenStream.PRE, memberExpr(ts, true), position); case TokenStream.DELPROP: { Node argument = unaryExpr(ts); if (!isValidDeleteArgument(argument)) { Context.reportError("msg.wrong.delete argument", argument.getPosition(), ts.lastPosition); } return nf.createUnary(TokenStream.DELPROP, argument, position); } case TokenStream.ERROR: break; default: ts.ungetToken(tt); int lineno = ts.getLineno(); Node pn = memberExpr(ts, true); /* * don't look across a newline boundary for a postfix incop. * * the rhino scanner seems to work differently than the js scanner here; * in js, it works to have the line number check precede the peekToken * calls. It'd be better if they had similar behavior... */ int peeked; if (((peeked = ts.peekToken()) == TokenStream.INC || peeked == TokenStream.DEC) && ts.getLineno() == lineno) { int pf = ts.getToken(); position = ts.tokenPosition; return nf.createUnary(pf, TokenStream.POST, pn, position); } return pn; } return nf.createName("err", position); // Only reached on error. Try to continue. } private static boolean isValidDeleteArgument(@NotNull Node node) { return node.type == TokenStream.GETPROP || node.type == TokenStream.GETELEM; } private Node argumentList(TokenStream ts, Node listNode) throws IOException, JavaScriptException { boolean matched; ts.flags |= TokenStream.TSF_REGEXP; matched = ts.matchToken(TokenStream.GWT); ts.flags &= ~TokenStream.TSF_REGEXP; if (!matched) { do { listNode.addChildToBack(assignExpr(ts, false)); } while (ts.matchToken(TokenStream.COMMA)); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren.arg"); } return listNode; } private Node memberExpr(TokenStream ts, boolean allowCallSyntax) throws IOException, JavaScriptException { int tt; Node pn; CodePosition position = ts.tokenPosition; /* Check for new expressions. */ ts.flags |= TokenStream.TSF_REGEXP; tt = ts.peekToken(); ts.flags &= ~TokenStream.TSF_REGEXP; if (tt == TokenStream.NEW) { /* Eat the NEW token. */ ts.getToken(); /* Make a NEW node to append to. */ pn = nf.createLeaf(TokenStream.NEW, position); pn.addChildToBack(memberExpr(ts, false)); if (ts.matchToken(TokenStream.LP)) { /* Add the arguments to pn, if any are supplied. */ pn = argumentList(ts, pn); } /* * XXX there's a check in the C source against "too many constructor * arguments" - how many do we claim to support? */ /* * Experimental syntax: allow an object literal to follow a new * expression, which will mean a kind of anonymous class built with the * JavaAdapter. the object literal will be passed as an additional * argument to the constructor. */ tt = ts.peekToken(); if (tt == TokenStream.LC) { pn.addChildToBack(primaryExpr(ts)); } } else { pn = primaryExpr(ts); } return memberExprTail(ts, allowCallSyntax, pn); } private Node memberExprTail( TokenStream ts, boolean allowCallSyntax, Node pn ) throws IOException, JavaScriptException { lastExprEndLine = ts.getLineno(); int tt; while ((tt = ts.getToken()) > TokenStream.EOF) { CodePosition position = ts.tokenPosition; if (tt == TokenStream.DOT) { ts.treatKeywordAsIdentifier = true; mustMatchToken(ts, TokenStream.NAME, "msg.no.name.after.dot"); ts.treatKeywordAsIdentifier = false; pn = nf.createBinary(TokenStream.DOT, pn, nf.createName(ts.getString(), ts.tokenPosition), position); /* * pn = nf.createBinary(ts.DOT, pn, memberExpr(ts)) is the version in * Brendan's IR C version. Not in ECMA... does it reflect the 'new' * operator syntax he mentioned? */ lastExprEndLine = ts.getLineno(); } else if (tt == TokenStream.LB) { pn = nf.createBinary(TokenStream.LB, pn, expr(ts, false), position); mustMatchToken(ts, TokenStream.RB, "msg.no.bracket.index"); lastExprEndLine = ts.getLineno(); } else if (allowCallSyntax && tt == TokenStream.LP) { /* make a call node */ pn = nf.createUnary(TokenStream.CALL, pn, position); /* Add the arguments to pn, if any are supplied. */ pn = argumentList(ts, pn); lastExprEndLine = ts.getLineno(); } else { ts.ungetToken(tt); break; } } return pn; } public Node primaryExpr(TokenStream ts) throws IOException, JavaScriptException { Comment commentsBeforeNode = getComments(ts); Node node = primaryExprHelper(ts); node.setCommentsBeforeNode(commentsBeforeNode); ts.collectCommentsAfter(); node.setCommentsAfterNode(getComments(ts)); return node; } private Node primaryExprHelper(TokenStream ts) throws IOException, JavaScriptException { int tt; Node pn; ts.flags |= TokenStream.TSF_REGEXP; tt = ts.getToken(); CodePosition position = ts.tokenPosition; ts.flags &= ~TokenStream.TSF_REGEXP; switch (tt) { case TokenStream.FUNCTION: return function(ts, true); case TokenStream.LB: { pn = nf.createLeaf(TokenStream.ARRAYLIT, position); ts.flags |= TokenStream.TSF_REGEXP; boolean matched = ts.matchToken(TokenStream.RB); ts.flags &= ~TokenStream.TSF_REGEXP; if (!matched) { do { ts.flags |= TokenStream.TSF_REGEXP; tt = ts.peekToken(); ts.flags &= ~TokenStream.TSF_REGEXP; if (tt == TokenStream.RB) { // to fix [,,,].length behavior... break; } if (tt == TokenStream.COMMA) { pn.addChildToBack(nf.createLeaf(TokenStream.PRIMARY, TokenStream.UNDEFINED, position)); } else { pn.addChildToBack(assignExpr(ts, false)); } } while (ts.matchToken(TokenStream.COMMA)); mustMatchToken(ts, TokenStream.RB, "msg.no.bracket.arg"); } return nf.createArrayLiteral(pn); } case TokenStream.LC: { pn = nf.createLeaf(TokenStream.OBJLIT, position); if (!ts.matchToken(TokenStream.RC)) { commaloop: do { Node property; tt = ts.getToken(); switch (tt) { // map NAMEs to STRINGs in object literal context. case TokenStream.NAME: case TokenStream.STRING: property = nf.createString(ts.getString(), ts.tokenPosition); break; case TokenStream.NUMBER_INT: property = nf.createIntNumber(ts.getNumber(), ts.tokenPosition); break; case TokenStream.NUMBER: double d = ts.getNumber(); property = nf.createNumber(d, ts.tokenPosition); break; case TokenStream.RC: // trailing comma is OK. ts.ungetToken(tt); break commaloop; default: reportError(ts, "msg.bad.prop"); break commaloop; } mustMatchToken(ts, TokenStream.COLON, "msg.no.colon.prop"); // OBJLIT is used as ':' in object literal for // decompilation to solve spacing ambiguity. pn.addChildToBack(property); pn.addChildToBack(assignExpr(ts, false)); } while (ts.matchToken(TokenStream.COMMA)); mustMatchToken(ts, TokenStream.RC, "msg.no.brace.prop"); } return nf.createObjectLiteral(pn); } case TokenStream.LP: /* * Brendan's IR-jsparse.c makes a new node tagged with TOK_LP here... * I'm not sure I understand why. Isn't the grouping already implicit in * the structure of the parse tree? also TOK_LP is already overloaded (I * think) in the C IR as 'function call.' */ pn = expr(ts, false); mustMatchToken(ts, TokenStream.GWT, "msg.no.paren"); return pn; case TokenStream.IMPORT: // for import() and import.meta syntax if (ts.peekToken() != TokenStream.LP && ts.peekToken() != TokenStream.DOT) { reportError(ts, "msg.syntax"); } return nf.createName(TokenStream.tokenToName(TokenStream.IMPORT), position); case TokenStream.NAME: String name = ts.getString(); return nf.createName(name, position); case TokenStream.NUMBER_INT: return nf.createIntNumber(ts.getNumber(), position); case TokenStream.NUMBER: double d = ts.getNumber(); return nf.createNumber(d, position); case TokenStream.STRING: String s = ts.getString(); return nf.createString(s, position); case TokenStream.REGEXP: { String flags = ts.regExpFlags; ts.regExpFlags = null; String re = ts.getString(); return nf.createRegExp(re, flags, position); } case TokenStream.PRIMARY: return nf.createLeaf(TokenStream.PRIMARY, ts.getOp(), position); case TokenStream.ERROR: /* the scanner or one of its subroutines reported the error. */ break; default: reportError(ts, "msg.syntax"); break; } return null; // should never reach here } private Comment getComments(TokenStream ts) { Comment comment = ts.getHeadComment(); if (comment != null) { ts.releaseComments(); } return comment; } private int lastExprEndLine; // Hack to handle function expr termination. private final IRFactory nf; private boolean ok; // Did the parse encounter an error? private int sourceTop; private int functionNumber; private final boolean insideFunction; }
JetBrains/kotlin
js/js.parser/src/com/google/gwt/dev/js/rhino/Parser.java
934
package com.bumptech.glide; import android.content.Context; import android.graphics.Bitmap; import android.os.Build; import android.util.Log; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.collection.ArrayMap; import com.bumptech.glide.Glide.RequestOptionsFactory; import com.bumptech.glide.GlideExperiments.Experiment; import com.bumptech.glide.load.DataSource; import com.bumptech.glide.load.engine.Engine; import com.bumptech.glide.load.engine.GlideException; import com.bumptech.glide.load.engine.bitmap_recycle.ArrayPool; import com.bumptech.glide.load.engine.bitmap_recycle.BitmapPool; import com.bumptech.glide.load.engine.bitmap_recycle.BitmapPoolAdapter; import com.bumptech.glide.load.engine.bitmap_recycle.LruArrayPool; import com.bumptech.glide.load.engine.bitmap_recycle.LruBitmapPool; import com.bumptech.glide.load.engine.cache.DiskCache; import com.bumptech.glide.load.engine.cache.InternalCacheDiskCacheFactory; import com.bumptech.glide.load.engine.cache.LruResourceCache; import com.bumptech.glide.load.engine.cache.MemoryCache; import com.bumptech.glide.load.engine.cache.MemorySizeCalculator; import com.bumptech.glide.load.engine.executor.GlideExecutor; import com.bumptech.glide.manager.ConnectivityMonitorFactory; import com.bumptech.glide.manager.DefaultConnectivityMonitorFactory; import com.bumptech.glide.manager.RequestManagerRetriever; import com.bumptech.glide.manager.RequestManagerRetriever.RequestManagerFactory; import com.bumptech.glide.module.AppGlideModule; import com.bumptech.glide.module.GlideModule; import com.bumptech.glide.request.BaseRequestOptions; import com.bumptech.glide.request.RequestListener; import com.bumptech.glide.request.RequestOptions; import com.bumptech.glide.request.target.Target; import com.bumptech.glide.util.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; /** A builder class for setting default structural classes for Glide to use. */ @SuppressWarnings("PMD.ImmutableField") public final class GlideBuilder { private final Map<Class<?>, TransitionOptions<?, ?>> defaultTransitionOptions = new ArrayMap<>(); private final GlideExperiments.Builder glideExperimentsBuilder = new GlideExperiments.Builder(); private Engine engine; private BitmapPool bitmapPool; private ArrayPool arrayPool; private MemoryCache memoryCache; private GlideExecutor sourceExecutor; private GlideExecutor diskCacheExecutor; private DiskCache.Factory diskCacheFactory; private MemorySizeCalculator memorySizeCalculator; private ConnectivityMonitorFactory connectivityMonitorFactory; private int logLevel = Log.INFO; private RequestOptionsFactory defaultRequestOptionsFactory = new RequestOptionsFactory() { @NonNull @Override public RequestOptions build() { return new RequestOptions(); } }; @Nullable private RequestManagerFactory requestManagerFactory; private GlideExecutor animationExecutor; private boolean isActiveResourceRetentionAllowed; @Nullable private List<RequestListener<Object>> defaultRequestListeners; /** * Sets the {@link com.bumptech.glide.load.engine.bitmap_recycle.BitmapPool} implementation to use * to store and retrieve reused {@link android.graphics.Bitmap}s. * * @param bitmapPool The pool to use. * @return This builder. */ @NonNull public GlideBuilder setBitmapPool(@Nullable BitmapPool bitmapPool) { this.bitmapPool = bitmapPool; return this; } /** * Sets the {@link ArrayPool} implementation to allow variable sized arrays to be stored and * retrieved as needed. * * @param arrayPool The pool to use. * @return This builder. */ @NonNull public GlideBuilder setArrayPool(@Nullable ArrayPool arrayPool) { this.arrayPool = arrayPool; return this; } /** * Sets the {@link com.bumptech.glide.load.engine.cache.MemoryCache} implementation to store * {@link com.bumptech.glide.load.engine.Resource}s that are not currently in use. * * @param memoryCache The cache to use. * @return This builder. */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setMemoryCache(@Nullable MemoryCache memoryCache) { this.memoryCache = memoryCache; return this; } /** * Sets the {@link com.bumptech.glide.load.engine.cache.DiskCache.Factory} implementation to use * to construct the {@link com.bumptech.glide.load.engine.cache.DiskCache} to use to store {@link * com.bumptech.glide.load.engine.Resource} data on disk. * * @param diskCacheFactory The disk cache factory to use. * @return This builder. */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setDiskCache(@Nullable DiskCache.Factory diskCacheFactory) { this.diskCacheFactory = diskCacheFactory; return this; } /** * Sets the {@link GlideExecutor} to use when retrieving {@link * com.bumptech.glide.load.engine.Resource}s that are not already in the cache. * * <p>The thread count defaults to the number of cores available on the device, with a maximum of * 4. * * <p>Use the {@link GlideExecutor#newSourceExecutor()} methods if you'd like to specify options * for the source executor. * * @param service The ExecutorService to use. * @return This builder. * @see #setDiskCacheExecutor(GlideExecutor) * @see GlideExecutor * @deprecated Use {@link #setSourceExecutor(GlideExecutor)} */ @Deprecated public GlideBuilder setResizeExecutor(@Nullable GlideExecutor service) { return setSourceExecutor(service); } /** * Sets the {@link GlideExecutor} to use when retrieving {@link * com.bumptech.glide.load.engine.Resource}s that are not already in the cache. * * <p>The thread count defaults to the number of cores available on the device, with a maximum of * 4. * * <p>Use the {@link GlideExecutor#newSourceExecutor()} methods if you'd like to specify options * for the source executor. * * @param service The ExecutorService to use. * @return This builder. * @see #setDiskCacheExecutor(GlideExecutor) * @see GlideExecutor */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setSourceExecutor(@Nullable GlideExecutor service) { this.sourceExecutor = service; return this; } /** * Sets the {@link GlideExecutor} to use when retrieving {@link * com.bumptech.glide.load.engine.Resource}s that are currently in Glide's disk caches. * * <p>Defaults to a single thread which is usually the best combination of memory usage, jank, and * performance, even on high end devices. * * <p>Use the {@link GlideExecutor#newDiskCacheExecutor()} if you'd like to specify options for * the disk cache executor. * * @param service The {@link GlideExecutor} to use. * @return This builder. * @see #setSourceExecutor(GlideExecutor) * @see GlideExecutor */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setDiskCacheExecutor(@Nullable GlideExecutor service) { this.diskCacheExecutor = service; return this; } /** * Sets the {@link GlideExecutor} to use when loading frames of animated images and particularly * of {@link com.bumptech.glide.load.resource.gif.GifDrawable}s. * * <p>Defaults to one or two threads, depending on the number of cores available. * * <p>Use the {@link GlideExecutor#newAnimationExecutor()} methods if you'd like to specify * options for the animation executor. * * @param service The {@link GlideExecutor} to use. * @return This builder. */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setAnimationExecutor(@Nullable GlideExecutor service) { this.animationExecutor = service; return this; } /** * Sets the default {@link RequestOptions} to use for all loads across the app. * * <p>Applying additional options with {@link RequestBuilder#apply(BaseRequestOptions)} will * override defaults set here. * * @see #setDefaultRequestOptions(RequestOptionsFactory) * @param requestOptions The options to use by default. * @return This builder. */ @NonNull public GlideBuilder setDefaultRequestOptions(@Nullable final RequestOptions requestOptions) { return setDefaultRequestOptions( new RequestOptionsFactory() { @NonNull @Override public RequestOptions build() { return requestOptions != null ? requestOptions : new RequestOptions(); } }); } /** * Sets a factory for the default {@link RequestOptions} to use for all loads across the app and * returns this {@code GlideBuilder}. * * <p>This factory will <em>NOT</em> be called once per load. Instead it will be called a handful * of times and memoized. It's not safe to assume that this factory will be called again for every * new load. * * <p>Applying additional options with {@link RequestBuilder#apply(BaseRequestOptions)} will * override defaults set here. * * @see #setDefaultRequestOptions(RequestOptionsFactory) */ @NonNull public GlideBuilder setDefaultRequestOptions(@NonNull RequestOptionsFactory factory) { this.defaultRequestOptionsFactory = Preconditions.checkNotNull(factory); return this; } /** * Sets the default {@link TransitionOptions} to use when starting a request that will load a * resource with the given {@link Class}. * * <p>It's preferable but not required for the requested resource class to match the resource * class applied here as long as the resource class applied here is assignable from the requested * resource class. For example you can set a default transition for {@link * android.graphics.drawable.Drawable} and that default transition will be used if you * subsequently start requests for specific {@link android.graphics.drawable.Drawable} types like * {@link com.bumptech.glide.load.resource.gif.GifDrawable} or {@link * android.graphics.drawable.BitmapDrawable}. Specific types are always preferred so if you * register a default transition for both {@link android.graphics.drawable.Drawable} and {@link * android.graphics.drawable.BitmapDrawable} and then start a request for {@link * android.graphics.drawable.BitmapDrawable}s, the transition you registered for {@link * android.graphics.drawable.BitmapDrawable}s will be used. */ // Public API. @SuppressWarnings("unused") @NonNull public <T> GlideBuilder setDefaultTransitionOptions( @NonNull Class<T> clazz, @Nullable TransitionOptions<?, T> options) { defaultTransitionOptions.put(clazz, options); return this; } /** * Sets the {@link MemorySizeCalculator} to use to calculate maximum sizes for default {@link * MemoryCache MemoryCaches} and/or default {@link BitmapPool BitmapPools}. * * @see #setMemorySizeCalculator(MemorySizeCalculator) * @param builder The builder to use (will not be modified). * @return This builder. */ // Public API. @SuppressWarnings("unused") @NonNull public GlideBuilder setMemorySizeCalculator(@NonNull MemorySizeCalculator.Builder builder) { return setMemorySizeCalculator(builder.build()); } /** * Sets the {@link MemorySizeCalculator} to use to calculate maximum sizes for default {@link * MemoryCache MemoryCaches} and/or default {@link BitmapPool BitmapPools}. * * <p>The given {@link MemorySizeCalculator} will not affect custom pools or caches provided via * {@link #setBitmapPool(BitmapPool)} or {@link #setMemoryCache(MemoryCache)}. * * @param calculator The calculator to use. * @return This builder. */ // Public API. @SuppressWarnings("WeakerAccess") @NonNull public GlideBuilder setMemorySizeCalculator(@Nullable MemorySizeCalculator calculator) { this.memorySizeCalculator = calculator; return this; } /** * Sets the {@link com.bumptech.glide.manager.ConnectivityMonitorFactory} to use to notify {@link * com.bumptech.glide.RequestManager} of connectivity events. If not set {@link * com.bumptech.glide.manager.DefaultConnectivityMonitorFactory} would be used. * * @param factory The factory to use * @return This builder. */ // Public API. @SuppressWarnings("unused") @NonNull public GlideBuilder setConnectivityMonitorFactory(@Nullable ConnectivityMonitorFactory factory) { this.connectivityMonitorFactory = factory; return this; } /** * Sets a log level constant from those in {@link Log} to indicate the desired log verbosity. * * <p>The level must be one of {@link Log#VERBOSE}, {@link Log#DEBUG}, {@link Log#INFO}, {@link * Log#WARN}, or {@link Log#ERROR}. * * <p>{@link Log#VERBOSE} means one or more lines will be logged per request, including timing * logs and failures. {@link Log#DEBUG} means at most one line will be logged per successful * request, including timing logs, although many lines may be logged for failures including * multiple complete stack traces. {@link Log#INFO} means failed loads will be logged including * multiple complete stack traces, but successful loads will not be logged at all. {@link * Log#WARN} means only summaries of failed loads will be logged. {@link Log#ERROR} means only * exceptional cases will be logged. * * <p>All logs will be logged using the 'Glide' tag. * * <p>Many other debugging logs are available in individual classes. The log level supplied here * only controls a small set of informative and well formatted logs. Users wishing to debug * certain aspects of the library can look for individual <code>TAG</code> variables at the tops * of classes and use <code>adb shell setprop log.tag.TAG</code> to enable or disable any relevant * tags. * * @param logLevel The log level to use from {@link Log}. * @return This builder. */ // Public API. @SuppressWarnings("unused") @NonNull public GlideBuilder setLogLevel(int logLevel) { if (logLevel < Log.VERBOSE || logLevel > Log.ERROR) { throw new IllegalArgumentException( "Log level must be one of Log.VERBOSE, Log.DEBUG," + " Log.INFO, Log.WARN, or Log.ERROR"); } this.logLevel = logLevel; return this; } /** * If set to {@code true}, allows Glide to re-capture resources that are loaded into {@link * com.bumptech.glide.request.target.Target}s which are subsequently de-referenced and garbage * collected without being cleared. * * <p>Defaults to {@code false}. * * <p>Glide's resource re-use system is permissive, which means that's acceptable for callers to * load resources into {@link com.bumptech.glide.request.target.Target}s and then never clear the * {@link com.bumptech.glide.request.target.Target}. To do so, Glide uses {@link * java.lang.ref.WeakReference}s to track resources that belong to {@link * com.bumptech.glide.request.target.Target}s that haven't yet been cleared. Setting this method * to {@code true} allows Glide to also maintain a hard reference to the underlying resource so * that if the {@link com.bumptech.glide.request.target.Target} is garbage collected, Glide can * return the underlying resource to it's memory cache so that subsequent requests will not * unexpectedly re-load the resource from disk or source. As a side affect, it will take the * system slightly longer to garbage collect the underlying resource because the weak reference * has to be cleared and processed before the hard reference is removed. As a result, setting this * method to {@code true} may transiently increase the memory usage of an application. * * <p>Leaving this method at the default {@code false} value will allow the platform to garbage * collect resources more quickly, but will lead to unexpected memory cache misses if callers load * resources into {@link com.bumptech.glide.request.target.Target}s but never clear them. * * <p>If you set this method to {@code true} you <em>must not</em> call {@link Bitmap#recycle()} * or mutate any Bitmaps returned by Glide. If this method is set to {@code false}, recycling or * mutating Bitmaps is inefficient but safe as long as you do not clear the corresponding {@link * com.bumptech.glide.request.target.Target} used to load the {@link Bitmap}. However, if you set * this method to {@code true} and recycle or mutate any returned {@link Bitmap}s or other mutable * resources, Glide may recover those resources and attempt to use them later on, resulting in * crashes, graphical corruption or undefined behavior. * * <p>Regardless of what value this method is set to, it's always good practice to clear {@link * com.bumptech.glide.request.target.Target}s when you're done with the corresponding resource. * Clearing {@link com.bumptech.glide.request.target.Target}s allows Glide to maximize resource * re-use, minimize memory overhead and minimize unexpected behavior resulting from edge cases. If * you use {@link RequestManager#clear(Target)}, calling {@link Bitmap#recycle()} or mutating * {@link Bitmap}s is not only unsafe, it's also totally unnecessary and should be avoided. In all * cases, prefer {@link RequestManager#clear(Target)} to {@link Bitmap#recycle()}. * * @return This builder. */ // Public API. @SuppressWarnings("unused") @NonNull public GlideBuilder setIsActiveResourceRetentionAllowed( boolean isActiveResourceRetentionAllowed) { this.isActiveResourceRetentionAllowed = isActiveResourceRetentionAllowed; return this; } /** * Adds a global {@link RequestListener} that will be added to every request started with Glide. * * <p>Multiple {@link RequestListener}s can be added here, in {@link RequestManager} scopes or to * individual {@link RequestBuilder}s. {@link RequestListener}s are called in the order they're * added. Even if an earlier {@link RequestListener} returns {@code true} from {@link * RequestListener#onLoadFailed(GlideException, Object, Target, boolean)} or {@link * RequestListener#onResourceReady(Object, Object, Target, DataSource, boolean)}, it will not * prevent subsequent {@link RequestListener}s from being called. * * <p>Because Glide requests can be started for any number of individual resource types, any * listener added here has to accept any generic resource type in {@link * RequestListener#onResourceReady(Object, Object, Target, DataSource, boolean)}. If you must base * the behavior of the listener on the resource type, you will need to use {@code instanceof} to * do so. It's not safe to cast resource types without first checking with {@code instanceof}. */ @NonNull public GlideBuilder addGlobalRequestListener(@NonNull RequestListener<Object> listener) { if (defaultRequestListeners == null) { defaultRequestListeners = new ArrayList<>(); } defaultRequestListeners.add(listener); return this; } /** * Set to {@code true} to make Glide populate {@link * com.bumptech.glide.load.engine.GlideException#setOrigin(Exception)} for failed requests. * * <p>The exception set by this method is not printed by {@link GlideException} and can only be * viewed via a {@link RequestListener} that reads the field via {@link * GlideException#getOrigin()}. * * <p>This is an experimental API that may be removed in the future. */ public GlideBuilder setLogRequestOrigins(boolean isEnabled) { glideExperimentsBuilder.update(new LogRequestOrigins(), isEnabled); return this; } /** * Set to {@code true} to make Glide use {@link android.graphics.ImageDecoder} when decoding * {@link Bitmap}s on Android P and higher. * * <p>Calls to this method on versions of Android less than Q are ignored. Although ImageDecoder * was added in Android O a bug prevents it from scaling images with exif orientations until Q. * See b/136096254. * * <p>Specifically {@link android.graphics.ImageDecoder} will be used in place of {@link * com.bumptech.glide.load.resource.bitmap.Downsampler} and {@link android.graphics.BitmapFactory} * to decode {@link Bitmap}s. GIFs, resources, and all other types of {@link * android.graphics.drawable.Drawable}s are not affected by this flag. * * <p>This flag is experimental and may be removed without deprecation in a future version. * * <p>When this flag is enabled, Bitmap's will not be re-used when decoding images, though they * may still be used as part of {@link com.bumptech.glide.load.Transformation}s because {@link * android.graphics.ImageDecoder} does not support Bitmap re-use. * * <p>When this flag is enabled {@link * com.bumptech.glide.load.resource.bitmap.Downsampler#FIX_BITMAP_SIZE_TO_REQUESTED_DIMENSIONS} is * ignored. All other {@link com.bumptech.glide.load.resource.bitmap.Downsampler} flags are * obeyed, although there may be subtle behavior differences because many options are subject to * the whims of {@link android.graphics.BitmapFactory} and {@link android.graphics.ImageDecoder} * which may not agree. */ public GlideBuilder setImageDecoderEnabledForBitmaps(boolean isEnabled) { glideExperimentsBuilder.update( new EnableImageDecoderForBitmaps(), /* isEnabled= */ isEnabled && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q); return this; } /** * @deprecated This method does nothing. It will be hard coded and removed in a future release * without further warning. */ @Deprecated public GlideBuilder setPreserveGainmapAndColorSpaceForTransformations(boolean isEnabled) { return this; } /** * @deprecated This method does nothing. It will be hard coded and removed in a future release * without further warning. */ @Deprecated public GlideBuilder setEnableHardwareGainmapFixOnU(boolean isEnabled) { return this; } /** * @deprecated This method does nothing. It will be hard coded and removed in a future release * without further warning. */ @Deprecated public GlideBuilder setDisableHardwareBitmapsOnO(boolean disableHardwareBitmapsOnO) { return this; } void setRequestManagerFactory(@Nullable RequestManagerFactory factory) { this.requestManagerFactory = factory; } // For testing. GlideBuilder setEngine(Engine engine) { this.engine = engine; return this; } @NonNull Glide build( @NonNull Context context, List<GlideModule> manifestModules, AppGlideModule annotationGeneratedGlideModule) { if (sourceExecutor == null) { sourceExecutor = GlideExecutor.newSourceExecutor(); } if (diskCacheExecutor == null) { diskCacheExecutor = GlideExecutor.newDiskCacheExecutor(); } if (animationExecutor == null) { animationExecutor = GlideExecutor.newAnimationExecutor(); } if (memorySizeCalculator == null) { memorySizeCalculator = new MemorySizeCalculator.Builder(context).build(); } if (connectivityMonitorFactory == null) { connectivityMonitorFactory = new DefaultConnectivityMonitorFactory(); } if (bitmapPool == null) { int size = memorySizeCalculator.getBitmapPoolSize(); if (size > 0) { bitmapPool = new LruBitmapPool(size); } else { bitmapPool = new BitmapPoolAdapter(); } } if (arrayPool == null) { arrayPool = new LruArrayPool(memorySizeCalculator.getArrayPoolSizeInBytes()); } if (memoryCache == null) { memoryCache = new LruResourceCache(memorySizeCalculator.getMemoryCacheSize()); } if (diskCacheFactory == null) { diskCacheFactory = new InternalCacheDiskCacheFactory(context); } if (engine == null) { engine = new Engine( memoryCache, diskCacheFactory, diskCacheExecutor, sourceExecutor, GlideExecutor.newUnlimitedSourceExecutor(), animationExecutor, isActiveResourceRetentionAllowed); } if (defaultRequestListeners == null) { defaultRequestListeners = Collections.emptyList(); } else { defaultRequestListeners = Collections.unmodifiableList(defaultRequestListeners); } GlideExperiments experiments = glideExperimentsBuilder.build(); RequestManagerRetriever requestManagerRetriever = new RequestManagerRetriever(requestManagerFactory); return new Glide( context, engine, memoryCache, bitmapPool, arrayPool, requestManagerRetriever, connectivityMonitorFactory, logLevel, defaultRequestOptionsFactory, defaultTransitionOptions, defaultRequestListeners, manifestModules, annotationGeneratedGlideModule, experiments); } static final class ManualOverrideHardwareBitmapMaxFdCount implements Experiment { final int fdCount; ManualOverrideHardwareBitmapMaxFdCount(int fdCount) { this.fdCount = fdCount; } } static final class EnableImageDecoderForBitmaps implements Experiment {} /** See {@link #setLogRequestOrigins(boolean)}. */ public static final class LogRequestOrigins implements Experiment {} }
bumptech/glide
library/src/main/java/com/bumptech/glide/GlideBuilder.java
935
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.util.concurrent; import io.netty.util.internal.InternalThreadLocalMap; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.StringUtil; import io.netty.util.internal.SystemPropertyUtil; import io.netty.util.internal.ThrowableUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import static io.netty.util.internal.ObjectUtil.checkNotNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> { private static final InternalLogger logger = InternalLoggerFactory.getInstance(DefaultPromise.class); private static final InternalLogger rejectedExecutionLogger = InternalLoggerFactory.getInstance(DefaultPromise.class.getName() + ".rejectedExecution"); private static final int MAX_LISTENER_STACK_DEPTH = Math.min(8, SystemPropertyUtil.getInt("io.netty.defaultPromise.maxListenerStackDepth", 8)); @SuppressWarnings("rawtypes") private static final AtomicReferenceFieldUpdater<DefaultPromise, Object> RESULT_UPDATER = AtomicReferenceFieldUpdater.newUpdater(DefaultPromise.class, Object.class, "result"); private static final Object SUCCESS = new Object(); private static final Object UNCANCELLABLE = new Object(); private static final CauseHolder CANCELLATION_CAUSE_HOLDER = new CauseHolder( StacklessCancellationException.newInstance(DefaultPromise.class, "cancel(...)")); private static final StackTraceElement[] CANCELLATION_STACK = CANCELLATION_CAUSE_HOLDER.cause.getStackTrace(); private volatile Object result; private final EventExecutor executor; /** * One or more listeners. Can be a {@link GenericFutureListener} or a {@link DefaultFutureListeners}. * If {@code null}, it means either 1) no listeners were added yet or 2) all listeners were notified. * * Threading - synchronized(this). We must support adding listeners when there is no EventExecutor. */ private GenericFutureListener<? extends Future<?>> listener; private DefaultFutureListeners listeners; /** * Threading - synchronized(this). We are required to hold the monitor to use Java's underlying wait()/notifyAll(). */ private short waiters; /** * Threading - synchronized(this). We must prevent concurrent notification and FIFO listener notification if the * executor changes. */ private boolean notifyingListeners; /** * Creates a new instance. * * It is preferable to use {@link EventExecutor#newPromise()} to create a new promise * * @param executor * the {@link EventExecutor} which is used to notify the promise once it is complete. * It is assumed this executor will protect against {@link StackOverflowError} exceptions. * The executor may be used to avoid {@link StackOverflowError} by executing a {@link Runnable} if the stack * depth exceeds a threshold. * */ public DefaultPromise(EventExecutor executor) { this.executor = checkNotNull(executor, "executor"); } /** * See {@link #executor()} for expectations of the executor. */ protected DefaultPromise() { // only for subclasses executor = null; } @Override public Promise<V> setSuccess(V result) { if (setSuccess0(result)) { return this; } throw new IllegalStateException("complete already: " + this); } @Override public boolean trySuccess(V result) { return setSuccess0(result); } @Override public Promise<V> setFailure(Throwable cause) { if (setFailure0(cause)) { return this; } throw new IllegalStateException("complete already: " + this, cause); } @Override public boolean tryFailure(Throwable cause) { return setFailure0(cause); } @Override public boolean setUncancellable() { if (RESULT_UPDATER.compareAndSet(this, null, UNCANCELLABLE)) { return true; } Object result = this.result; return !isDone0(result) || !isCancelled0(result); } @Override public boolean isSuccess() { Object result = this.result; return result != null && result != UNCANCELLABLE && !(result instanceof CauseHolder); } @Override public boolean isCancellable() { return result == null; } private static final class LeanCancellationException extends CancellationException { private static final long serialVersionUID = 2794674970981187807L; // Suppress a warning since the method doesn't need synchronization @Override public Throwable fillInStackTrace() { setStackTrace(CANCELLATION_STACK); return this; } @Override public String toString() { return CancellationException.class.getName(); } } @Override public Throwable cause() { return cause0(result); } private Throwable cause0(Object result) { if (!(result instanceof CauseHolder)) { return null; } if (result == CANCELLATION_CAUSE_HOLDER) { CancellationException ce = new LeanCancellationException(); if (RESULT_UPDATER.compareAndSet(this, CANCELLATION_CAUSE_HOLDER, new CauseHolder(ce))) { return ce; } result = this.result; } return ((CauseHolder) result).cause; } @Override public Promise<V> addListener(GenericFutureListener<? extends Future<? super V>> listener) { checkNotNull(listener, "listener"); synchronized (this) { addListener0(listener); } if (isDone()) { notifyListeners(); } return this; } @Override public Promise<V> addListeners(GenericFutureListener<? extends Future<? super V>>... listeners) { checkNotNull(listeners, "listeners"); synchronized (this) { for (GenericFutureListener<? extends Future<? super V>> listener : listeners) { if (listener == null) { break; } addListener0(listener); } } if (isDone()) { notifyListeners(); } return this; } @Override public Promise<V> removeListener(final GenericFutureListener<? extends Future<? super V>> listener) { checkNotNull(listener, "listener"); synchronized (this) { removeListener0(listener); } return this; } @Override public Promise<V> removeListeners(final GenericFutureListener<? extends Future<? super V>>... listeners) { checkNotNull(listeners, "listeners"); synchronized (this) { for (GenericFutureListener<? extends Future<? super V>> listener : listeners) { if (listener == null) { break; } removeListener0(listener); } } return this; } @Override public Promise<V> await() throws InterruptedException { if (isDone()) { return this; } if (Thread.interrupted()) { throw new InterruptedException(toString()); } checkDeadLock(); synchronized (this) { while (!isDone()) { incWaiters(); try { wait(); } finally { decWaiters(); } } } return this; } @Override public Promise<V> awaitUninterruptibly() { if (isDone()) { return this; } checkDeadLock(); boolean interrupted = false; synchronized (this) { while (!isDone()) { incWaiters(); try { wait(); } catch (InterruptedException e) { // Interrupted while waiting. interrupted = true; } finally { decWaiters(); } } } if (interrupted) { Thread.currentThread().interrupt(); } return this; } @Override public boolean await(long timeout, TimeUnit unit) throws InterruptedException { return await0(unit.toNanos(timeout), true); } @Override public boolean await(long timeoutMillis) throws InterruptedException { return await0(MILLISECONDS.toNanos(timeoutMillis), true); } @Override public boolean awaitUninterruptibly(long timeout, TimeUnit unit) { try { return await0(unit.toNanos(timeout), false); } catch (InterruptedException e) { // Should not be raised at all. throw new InternalError(); } } @Override public boolean awaitUninterruptibly(long timeoutMillis) { try { return await0(MILLISECONDS.toNanos(timeoutMillis), false); } catch (InterruptedException e) { // Should not be raised at all. throw new InternalError(); } } @SuppressWarnings("unchecked") @Override public V getNow() { Object result = this.result; if (result instanceof CauseHolder || result == SUCCESS || result == UNCANCELLABLE) { return null; } return (V) result; } @SuppressWarnings("unchecked") @Override public V get() throws InterruptedException, ExecutionException { Object result = this.result; if (!isDone0(result)) { await(); result = this.result; } if (result == SUCCESS || result == UNCANCELLABLE) { return null; } Throwable cause = cause0(result); if (cause == null) { return (V) result; } if (cause instanceof CancellationException) { throw (CancellationException) cause; } throw new ExecutionException(cause); } @SuppressWarnings("unchecked") @Override public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { Object result = this.result; if (!isDone0(result)) { if (!await(timeout, unit)) { throw new TimeoutException(); } result = this.result; } if (result == SUCCESS || result == UNCANCELLABLE) { return null; } Throwable cause = cause0(result); if (cause == null) { return (V) result; } if (cause instanceof CancellationException) { throw (CancellationException) cause; } throw new ExecutionException(cause); } /** * {@inheritDoc} * * @param mayInterruptIfRunning this value has no effect in this implementation. */ @Override public boolean cancel(boolean mayInterruptIfRunning) { if (RESULT_UPDATER.compareAndSet(this, null, CANCELLATION_CAUSE_HOLDER)) { if (checkNotifyWaiters()) { notifyListeners(); } return true; } return false; } @Override public boolean isCancelled() { return isCancelled0(result); } @Override public boolean isDone() { return isDone0(result); } @Override public Promise<V> sync() throws InterruptedException { await(); rethrowIfFailed(); return this; } @Override public Promise<V> syncUninterruptibly() { awaitUninterruptibly(); rethrowIfFailed(); return this; } @Override public String toString() { return toStringBuilder().toString(); } protected StringBuilder toStringBuilder() { StringBuilder buf = new StringBuilder(64) .append(StringUtil.simpleClassName(this)) .append('@') .append(Integer.toHexString(hashCode())); Object result = this.result; if (result == SUCCESS) { buf.append("(success)"); } else if (result == UNCANCELLABLE) { buf.append("(uncancellable)"); } else if (result instanceof CauseHolder) { buf.append("(failure: ") .append(((CauseHolder) result).cause) .append(')'); } else if (result != null) { buf.append("(success: ") .append(result) .append(')'); } else { buf.append("(incomplete)"); } return buf; } /** * Get the executor used to notify listeners when this promise is complete. * <p> * It is assumed this executor will protect against {@link StackOverflowError} exceptions. * The executor may be used to avoid {@link StackOverflowError} by executing a {@link Runnable} if the stack * depth exceeds a threshold. * @return The executor used to notify listeners when this promise is complete. */ protected EventExecutor executor() { return executor; } protected void checkDeadLock() { EventExecutor e = executor(); if (e != null && e.inEventLoop()) { throw new BlockingOperationException(toString()); } } /** * Notify a listener that a future has completed. * <p> * This method has a fixed depth of {@link #MAX_LISTENER_STACK_DEPTH} that will limit recursion to prevent * {@link StackOverflowError} and will stop notifying listeners added after this threshold is exceeded. * @param eventExecutor the executor to use to notify the listener {@code listener}. * @param future the future that is complete. * @param listener the listener to notify. */ protected static void notifyListener( EventExecutor eventExecutor, final Future<?> future, final GenericFutureListener<?> listener) { notifyListenerWithStackOverFlowProtection( checkNotNull(eventExecutor, "eventExecutor"), checkNotNull(future, "future"), checkNotNull(listener, "listener")); } private void notifyListeners() { EventExecutor executor = executor(); if (executor.inEventLoop()) { final InternalThreadLocalMap threadLocals = InternalThreadLocalMap.get(); final int stackDepth = threadLocals.futureListenerStackDepth(); if (stackDepth < MAX_LISTENER_STACK_DEPTH) { threadLocals.setFutureListenerStackDepth(stackDepth + 1); try { notifyListenersNow(); } finally { threadLocals.setFutureListenerStackDepth(stackDepth); } return; } } safeExecute(executor, new Runnable() { @Override public void run() { notifyListenersNow(); } }); } /** * The logic in this method should be identical to {@link #notifyListeners()} but * cannot share code because the listener(s) cannot be cached for an instance of {@link DefaultPromise} since the * listener(s) may be changed and is protected by a synchronized operation. */ private static void notifyListenerWithStackOverFlowProtection(final EventExecutor executor, final Future<?> future, final GenericFutureListener<?> listener) { if (executor.inEventLoop()) { final InternalThreadLocalMap threadLocals = InternalThreadLocalMap.get(); final int stackDepth = threadLocals.futureListenerStackDepth(); if (stackDepth < MAX_LISTENER_STACK_DEPTH) { threadLocals.setFutureListenerStackDepth(stackDepth + 1); try { notifyListener0(future, listener); } finally { threadLocals.setFutureListenerStackDepth(stackDepth); } return; } } safeExecute(executor, new Runnable() { @Override public void run() { notifyListener0(future, listener); } }); } private void notifyListenersNow() { GenericFutureListener listener; DefaultFutureListeners listeners; synchronized (this) { listener = this.listener; listeners = this.listeners; // Only proceed if there are listeners to notify and we are not already notifying listeners. if (notifyingListeners || (listener == null && listeners == null)) { return; } notifyingListeners = true; if (listener != null) { this.listener = null; } else { this.listeners = null; } } for (;;) { if (listener != null) { notifyListener0(this, listener); } else { notifyListeners0(listeners); } synchronized (this) { if (this.listener == null && this.listeners == null) { // Nothing can throw from within this method, so setting notifyingListeners back to false does not // need to be in a finally block. notifyingListeners = false; return; } listener = this.listener; listeners = this.listeners; if (listener != null) { this.listener = null; } else { this.listeners = null; } } } } private void notifyListeners0(DefaultFutureListeners listeners) { GenericFutureListener<?>[] a = listeners.listeners(); int size = listeners.size(); for (int i = 0; i < size; i ++) { notifyListener0(this, a[i]); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private static void notifyListener0(Future future, GenericFutureListener l) { try { l.operationComplete(future); } catch (Throwable t) { if (logger.isWarnEnabled()) { logger.warn("An exception was thrown by " + l.getClass().getName() + ".operationComplete()", t); } } } private void addListener0(GenericFutureListener<? extends Future<? super V>> listener) { if (this.listener == null) { if (listeners == null) { this.listener = listener; } else { listeners.add(listener); } } else { assert listeners == null; listeners = new DefaultFutureListeners(this.listener, listener); this.listener = null; } } private void removeListener0(GenericFutureListener<? extends Future<? super V>> toRemove) { if (listener == toRemove) { listener = null; } else if (listeners != null) { listeners.remove(toRemove); // Removal is rare, no need for compaction if (listeners.size() == 0) { listeners = null; } } } private boolean setSuccess0(V result) { return setValue0(result == null ? SUCCESS : result); } private boolean setFailure0(Throwable cause) { return setValue0(new CauseHolder(checkNotNull(cause, "cause"))); } private boolean setValue0(Object objResult) { if (RESULT_UPDATER.compareAndSet(this, null, objResult) || RESULT_UPDATER.compareAndSet(this, UNCANCELLABLE, objResult)) { if (checkNotifyWaiters()) { notifyListeners(); } return true; } return false; } /** * Check if there are any waiters and if so notify these. * @return {@code true} if there are any listeners attached to the promise, {@code false} otherwise. */ private synchronized boolean checkNotifyWaiters() { if (waiters > 0) { notifyAll(); } return listener != null || listeners != null; } private void incWaiters() { if (waiters == Short.MAX_VALUE) { throw new IllegalStateException("too many waiters: " + this); } ++waiters; } private void decWaiters() { --waiters; } private void rethrowIfFailed() { Throwable cause = cause(); if (cause == null) { return; } PlatformDependent.throwException(cause); } private boolean await0(long timeoutNanos, boolean interruptable) throws InterruptedException { if (isDone()) { return true; } if (timeoutNanos <= 0) { return isDone(); } if (interruptable && Thread.interrupted()) { throw new InterruptedException(toString()); } checkDeadLock(); // Start counting time from here instead of the first line of this method, // to avoid/postpone performance cost of System.nanoTime(). final long startTime = System.nanoTime(); synchronized (this) { boolean interrupted = false; try { long waitTime = timeoutNanos; while (!isDone() && waitTime > 0) { incWaiters(); try { wait(waitTime / 1000000, (int) (waitTime % 1000000)); } catch (InterruptedException e) { if (interruptable) { throw e; } else { interrupted = true; } } finally { decWaiters(); } // Check isDone() in advance, try to avoid calculating the elapsed time later. if (isDone()) { return true; } // Calculate the elapsed time here instead of in the while condition, // try to avoid performance cost of System.nanoTime() in the first loop of while. waitTime = timeoutNanos - (System.nanoTime() - startTime); } return isDone(); } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } } /** * Notify all progressive listeners. * <p> * No attempt is made to ensure notification order if multiple calls are made to this method before * the original invocation completes. * <p> * This will do an iteration over all listeners to get all of type {@link GenericProgressiveFutureListener}s. * @param progress the new progress. * @param total the total progress. */ @SuppressWarnings("unchecked") void notifyProgressiveListeners(final long progress, final long total) { final Object listeners = progressiveListeners(); if (listeners == null) { return; } final ProgressiveFuture<V> self = (ProgressiveFuture<V>) this; EventExecutor executor = executor(); if (executor.inEventLoop()) { if (listeners instanceof GenericProgressiveFutureListener[]) { notifyProgressiveListeners0( self, (GenericProgressiveFutureListener<?>[]) listeners, progress, total); } else { notifyProgressiveListener0( self, (GenericProgressiveFutureListener<ProgressiveFuture<V>>) listeners, progress, total); } } else { if (listeners instanceof GenericProgressiveFutureListener[]) { final GenericProgressiveFutureListener<?>[] array = (GenericProgressiveFutureListener<?>[]) listeners; safeExecute(executor, new Runnable() { @Override public void run() { notifyProgressiveListeners0(self, array, progress, total); } }); } else { final GenericProgressiveFutureListener<ProgressiveFuture<V>> l = (GenericProgressiveFutureListener<ProgressiveFuture<V>>) listeners; safeExecute(executor, new Runnable() { @Override public void run() { notifyProgressiveListener0(self, l, progress, total); } }); } } } /** * Returns a {@link GenericProgressiveFutureListener}, an array of {@link GenericProgressiveFutureListener}, or * {@code null}. */ private synchronized Object progressiveListeners() { final GenericFutureListener listener = this.listener; final DefaultFutureListeners listeners = this.listeners; if (listener == null && listeners == null) { // No listeners added return null; } if (listeners != null) { // Copy DefaultFutureListeners into an array of listeners. DefaultFutureListeners dfl = listeners; int progressiveSize = dfl.progressiveSize(); switch (progressiveSize) { case 0: return null; case 1: for (GenericFutureListener<?> l: dfl.listeners()) { if (l instanceof GenericProgressiveFutureListener) { return l; } } return null; } GenericFutureListener<?>[] array = dfl.listeners(); GenericProgressiveFutureListener<?>[] copy = new GenericProgressiveFutureListener[progressiveSize]; for (int i = 0, j = 0; j < progressiveSize; i ++) { GenericFutureListener<?> l = array[i]; if (l instanceof GenericProgressiveFutureListener) { copy[j ++] = (GenericProgressiveFutureListener<?>) l; } } return copy; } else if (listener instanceof GenericProgressiveFutureListener) { return listener; } else { // Only one listener was added and it's not a progressive listener. return null; } } private static void notifyProgressiveListeners0( ProgressiveFuture<?> future, GenericProgressiveFutureListener<?>[] listeners, long progress, long total) { for (GenericProgressiveFutureListener<?> l: listeners) { if (l == null) { break; } notifyProgressiveListener0(future, l, progress, total); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private static void notifyProgressiveListener0( ProgressiveFuture future, GenericProgressiveFutureListener l, long progress, long total) { try { l.operationProgressed(future, progress, total); } catch (Throwable t) { if (logger.isWarnEnabled()) { logger.warn("An exception was thrown by " + l.getClass().getName() + ".operationProgressed()", t); } } } private static boolean isCancelled0(Object result) { return result instanceof CauseHolder && ((CauseHolder) result).cause instanceof CancellationException; } private static boolean isDone0(Object result) { return result != null && result != UNCANCELLABLE; } private static final class CauseHolder { final Throwable cause; CauseHolder(Throwable cause) { this.cause = cause; } } private static void safeExecute(EventExecutor executor, Runnable task) { try { executor.execute(task); } catch (Throwable t) { rejectedExecutionLogger.error("Failed to submit a listener notification task. Event loop shut down?", t); } } private static final class StacklessCancellationException extends CancellationException { private static final long serialVersionUID = -2974906711413716191L; private StacklessCancellationException() { } // Override fillInStackTrace() so we not populate the backtrace via a native call and so leak the // Classloader. @Override public Throwable fillInStackTrace() { return this; } static StacklessCancellationException newInstance(Class<?> clazz, String method) { return ThrowableUtil.unknownStackTrace(new StacklessCancellationException(), clazz, method); } } }
netty/netty
common/src/main/java/io/netty/util/concurrent/DefaultPromise.java
936
/* * Copyright 2002-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.core; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.sql.BatchUpdateException; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Spliterator; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.sql.DataSource; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.support.DataAccessUtils; import org.springframework.jdbc.InvalidResultSetAccessException; import org.springframework.jdbc.SQLWarningException; import org.springframework.jdbc.UncategorizedSQLException; import org.springframework.jdbc.datasource.ConnectionProxy; import org.springframework.jdbc.datasource.DataSourceUtils; import org.springframework.jdbc.support.JdbcAccessor; import org.springframework.jdbc.support.JdbcUtils; import org.springframework.jdbc.support.KeyHolder; import org.springframework.jdbc.support.rowset.SqlRowSet; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.LinkedCaseInsensitiveMap; import org.springframework.util.StringUtils; /** * <b>This is the central delegate in the JDBC core package.</b> * It can be used directly for many data access purposes, supporting any kind * of JDBC operation. For a more focused and convenient facade on top of this, * consider {@link org.springframework.jdbc.core.simple.JdbcClient} as of 6.1. * * <p>This class simplifies the use of JDBC and helps to avoid common errors. * It executes core JDBC workflow, leaving application code to provide SQL * and extract results. This class executes SQL queries or updates, initiating * iteration over ResultSets and catching JDBC exceptions and translating * them to the common {@code org.springframework.dao} exception hierarchy. * * <p>Code using this class need only implement callback interfaces, giving * them a clearly defined contract. The {@link PreparedStatementCreator} callback * interface creates a prepared statement given a Connection, providing SQL and * any necessary parameters. The {@link ResultSetExtractor} interface extracts * values from a ResultSet. See also {@link PreparedStatementSetter} and * {@link RowMapper} for two popular alternative callback interfaces. * * <p>An instance of this template class is thread-safe once configured. * Can be used within a service implementation via direct instantiation * with a DataSource reference, or get prepared in an application context * and given to services as bean reference. Note: The DataSource should * always be configured as a bean in the application context, in the first case * given to the service directly, in the second case to the prepared template. * * <p>Because this class is parameterizable by the callback interfaces and * the {@link org.springframework.jdbc.support.SQLExceptionTranslator} * interface, there should be no need to subclass it. * * <p>All SQL operations performed by this class are logged at debug level, * using "org.springframework.jdbc.core.JdbcTemplate" as log category. * * <p><b>NOTE: As of 6.1, there is a unified JDBC access facade available in * the form of {@link org.springframework.jdbc.core.simple.JdbcClient}.</b> * {@code JdbcClient} provides a fluent API style for common JDBC queries/updates * with flexible use of indexed or named parameters. It delegates to a * {@code JdbcTemplate}/{@code NamedParameterJdbcTemplate} for actual execution. * * @author Rod Johnson * @author Juergen Hoeller * @author Thomas Risberg * @since May 3, 2001 * @see JdbcOperations * @see PreparedStatementCreator * @see PreparedStatementSetter * @see CallableStatementCreator * @see PreparedStatementCallback * @see CallableStatementCallback * @see ResultSetExtractor * @see RowCallbackHandler * @see RowMapper * @see org.springframework.jdbc.support.SQLExceptionTranslator * @see org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate */ public class JdbcTemplate extends JdbcAccessor implements JdbcOperations { private static final String RETURN_RESULT_SET_PREFIX = "#result-set-"; private static final String RETURN_UPDATE_COUNT_PREFIX = "#update-count-"; /** If this variable is {@code false}, we will throw exceptions on SQL warnings. */ private boolean ignoreWarnings = true; /** * If this variable is set to a non-negative value, it will be used for setting the * fetchSize property on statements used for query processing. */ private int fetchSize = -1; /** * If this variable is set to a non-negative value, it will be used for setting the * maxRows property on statements used for query processing. */ private int maxRows = -1; /** * If this variable is set to a non-negative value, it will be used for setting the * queryTimeout property on statements used for query processing. */ private int queryTimeout = -1; /** * If this variable is set to true, then all results checking will be bypassed for any * callable statement processing. This can be used to avoid a bug in some older Oracle * JDBC drivers like 10.1.0.2. */ private boolean skipResultsProcessing = false; /** * If this variable is set to true then all results from a stored procedure call * that don't have a corresponding SqlOutParameter declaration will be bypassed. * All other results processing will be take place unless the variable * {@code skipResultsProcessing} is set to {@code true}. */ private boolean skipUndeclaredResults = false; /** * If this variable is set to true then execution of a CallableStatement will return * the results in a Map that uses case-insensitive names for the parameters. */ private boolean resultsMapCaseInsensitive = false; /** * Construct a new JdbcTemplate for bean usage. * <p>Note: The DataSource has to be set before using the instance. * @see #setDataSource */ public JdbcTemplate() { } /** * Construct a new JdbcTemplate, given a DataSource to obtain connections from. * <p>Note: This will not trigger initialization of the exception translator. * @param dataSource the JDBC DataSource to obtain connections from */ public JdbcTemplate(DataSource dataSource) { setDataSource(dataSource); afterPropertiesSet(); } /** * Construct a new JdbcTemplate, given a DataSource to obtain connections from. * <p>Note: Depending on the "lazyInit" flag, initialization of the exception translator * will be triggered. * @param dataSource the JDBC DataSource to obtain connections from * @param lazyInit whether to lazily initialize the SQLExceptionTranslator */ public JdbcTemplate(DataSource dataSource, boolean lazyInit) { setDataSource(dataSource); setLazyInit(lazyInit); afterPropertiesSet(); } /** * Set whether we want to ignore JDBC statement warnings ({@link SQLWarning}). * <p>Default is {@code true}, swallowing and logging all warnings. Switch this flag to * {@code false} to make this JdbcTemplate throw a {@link SQLWarningException} instead * (or chain the {@link SQLWarning} into the primary {@link SQLException}, if any). * @see Statement#getWarnings() * @see java.sql.SQLWarning * @see org.springframework.jdbc.SQLWarningException * @see #handleWarnings(Statement) */ public void setIgnoreWarnings(boolean ignoreWarnings) { this.ignoreWarnings = ignoreWarnings; } /** * Return whether we ignore SQLWarnings. */ public boolean isIgnoreWarnings() { return this.ignoreWarnings; } /** * Set the fetch size for this JdbcTemplate. This is important for processing large * result sets: Setting this higher than the default value will increase processing * speed at the cost of memory consumption; setting this lower can avoid transferring * row data that will never be read by the application. * <p>Default is -1, indicating to use the JDBC driver's default configuration * (i.e. to not pass a specific fetch size setting on to the driver). * <p>Note: As of 4.3, negative values other than -1 will get passed on to the * driver, since e.g. MySQL supports special behavior for {@code Integer.MIN_VALUE}. * @see java.sql.Statement#setFetchSize */ public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } /** * Return the fetch size specified for this JdbcTemplate. */ public int getFetchSize() { return this.fetchSize; } /** * Set the maximum number of rows for this JdbcTemplate. This is important for * processing subsets of large result sets, avoiding to read and hold the entire * result set in the database or in the JDBC driver if we're never interested in * the entire result in the first place (for example, when performing searches * that might return a large number of matches). * <p>Default is -1, indicating to use the JDBC driver's default configuration * (i.e. to not pass a specific max rows setting on to the driver). * <p>Note: As of 4.3, negative values other than -1 will get passed on to the * driver, in sync with {@link #setFetchSize}'s support for special MySQL values. * @see java.sql.Statement#setMaxRows */ public void setMaxRows(int maxRows) { this.maxRows = maxRows; } /** * Return the maximum number of rows specified for this JdbcTemplate. */ public int getMaxRows() { return this.maxRows; } /** * Set the query timeout for statements that this JdbcTemplate executes. * <p>Default is -1, indicating to use the JDBC driver's default * (i.e. to not pass a specific query timeout setting on the driver). * <p>Note: Any timeout specified here will be overridden by the remaining * transaction timeout when executing within a transaction that has a * timeout specified at the transaction level. * @see java.sql.Statement#setQueryTimeout */ public void setQueryTimeout(int queryTimeout) { this.queryTimeout = queryTimeout; } /** * Return the query timeout for statements that this JdbcTemplate executes. */ public int getQueryTimeout() { return this.queryTimeout; } /** * Set whether results processing should be skipped. Can be used to optimize callable * statement processing when we know that no results are being passed back - the processing * of out parameter will still take place. This can be used to avoid a bug in some older * Oracle JDBC drivers like 10.1.0.2. */ public void setSkipResultsProcessing(boolean skipResultsProcessing) { this.skipResultsProcessing = skipResultsProcessing; } /** * Return whether results processing should be skipped. */ public boolean isSkipResultsProcessing() { return this.skipResultsProcessing; } /** * Set whether undeclared results should be skipped. */ public void setSkipUndeclaredResults(boolean skipUndeclaredResults) { this.skipUndeclaredResults = skipUndeclaredResults; } /** * Return whether undeclared results should be skipped. */ public boolean isSkipUndeclaredResults() { return this.skipUndeclaredResults; } /** * Set whether execution of a CallableStatement will return the results in a Map * that uses case-insensitive names for the parameters. */ public void setResultsMapCaseInsensitive(boolean resultsMapCaseInsensitive) { this.resultsMapCaseInsensitive = resultsMapCaseInsensitive; } /** * Return whether execution of a CallableStatement will return the results in a Map * that uses case-insensitive names for the parameters. */ public boolean isResultsMapCaseInsensitive() { return this.resultsMapCaseInsensitive; } //------------------------------------------------------------------------- // Methods dealing with a plain java.sql.Connection //------------------------------------------------------------------------- @Override @Nullable public <T> T execute(ConnectionCallback<T> action) throws DataAccessException { Assert.notNull(action, "Callback object must not be null"); Connection con = DataSourceUtils.getConnection(obtainDataSource()); try { // Create close-suppressing Connection proxy, also preparing returned Statements. Connection conToUse = createConnectionProxy(con); return action.doInConnection(conToUse); } catch (SQLException ex) { // Release Connection early, to avoid potential connection pool deadlock // in the case when the exception translator hasn't been initialized yet. String sql = getSql(action); DataSourceUtils.releaseConnection(con, getDataSource()); con = null; throw translateException("ConnectionCallback", sql, ex); } finally { DataSourceUtils.releaseConnection(con, getDataSource()); } } /** * Create a close-suppressing proxy for the given JDBC Connection. * Called by the {@code execute} method. * <p>The proxy also prepares returned JDBC Statements, applying * statement settings such as fetch size, max rows, and query timeout. * @param con the JDBC Connection to create a proxy for * @return the Connection proxy * @see java.sql.Connection#close() * @see #execute(ConnectionCallback) * @see #applyStatementSettings */ protected Connection createConnectionProxy(Connection con) { return (Connection) Proxy.newProxyInstance( ConnectionProxy.class.getClassLoader(), new Class<?>[] {ConnectionProxy.class}, new CloseSuppressingInvocationHandler(con)); } //------------------------------------------------------------------------- // Methods dealing with static SQL (java.sql.Statement) //------------------------------------------------------------------------- @Nullable private <T> T execute(StatementCallback<T> action, boolean closeResources) throws DataAccessException { Assert.notNull(action, "Callback object must not be null"); Connection con = DataSourceUtils.getConnection(obtainDataSource()); Statement stmt = null; try { stmt = con.createStatement(); applyStatementSettings(stmt); T result = action.doInStatement(stmt); handleWarnings(stmt); return result; } catch (SQLException ex) { // Release Connection early, to avoid potential connection pool deadlock // in the case when the exception translator hasn't been initialized yet. if (stmt != null) { handleWarnings(stmt, ex); } String sql = getSql(action); JdbcUtils.closeStatement(stmt); stmt = null; DataSourceUtils.releaseConnection(con, getDataSource()); con = null; throw translateException("StatementCallback", sql, ex); } finally { if (closeResources) { JdbcUtils.closeStatement(stmt); DataSourceUtils.releaseConnection(con, getDataSource()); } } } @Override @Nullable public <T> T execute(StatementCallback<T> action) throws DataAccessException { return execute(action, true); } @Override public void execute(final String sql) throws DataAccessException { if (logger.isDebugEnabled()) { logger.debug("Executing SQL statement [" + sql + "]"); } // Callback to execute the statement. class ExecuteStatementCallback implements StatementCallback<Object>, SqlProvider { @Override @Nullable public Object doInStatement(Statement stmt) throws SQLException { stmt.execute(sql); return null; } @Override public String getSql() { return sql; } } execute(new ExecuteStatementCallback(), true); } @Override @Nullable public <T> T query(final String sql, final ResultSetExtractor<T> rse) throws DataAccessException { Assert.notNull(sql, "SQL must not be null"); Assert.notNull(rse, "ResultSetExtractor must not be null"); if (logger.isDebugEnabled()) { logger.debug("Executing SQL query [" + sql + "]"); } // Callback to execute the query. class QueryStatementCallback implements StatementCallback<T>, SqlProvider { @Override @Nullable public T doInStatement(Statement stmt) throws SQLException { ResultSet rs = null; try { rs = stmt.executeQuery(sql); return rse.extractData(rs); } finally { JdbcUtils.closeResultSet(rs); } } @Override public String getSql() { return sql; } } return execute(new QueryStatementCallback(), true); } @Override public void query(String sql, RowCallbackHandler rch) throws DataAccessException { query(sql, new RowCallbackHandlerResultSetExtractor(rch)); } @Override public <T> List<T> query(String sql, RowMapper<T> rowMapper) throws DataAccessException { return result(query(sql, new RowMapperResultSetExtractor<>(rowMapper))); } @Override public <T> Stream<T> queryForStream(String sql, RowMapper<T> rowMapper) throws DataAccessException { class StreamStatementCallback implements StatementCallback<Stream<T>>, SqlProvider { @Override public Stream<T> doInStatement(Statement stmt) throws SQLException { ResultSet rs = stmt.executeQuery(sql); Connection con = stmt.getConnection(); return new ResultSetSpliterator<>(rs, rowMapper).stream().onClose(() -> { JdbcUtils.closeResultSet(rs); JdbcUtils.closeStatement(stmt); DataSourceUtils.releaseConnection(con, getDataSource()); }); } @Override public String getSql() { return sql; } } return result(execute(new StreamStatementCallback(), false)); } @Override public Map<String, Object> queryForMap(String sql) throws DataAccessException { return result(queryForObject(sql, getColumnMapRowMapper())); } @Override @Nullable public <T> T queryForObject(String sql, RowMapper<T> rowMapper) throws DataAccessException { List<T> results = query(sql, rowMapper); return DataAccessUtils.nullableSingleResult(results); } @Override @Nullable public <T> T queryForObject(String sql, Class<T> requiredType) throws DataAccessException { return queryForObject(sql, getSingleColumnRowMapper(requiredType)); } @Override public <T> List<T> queryForList(String sql, Class<T> elementType) throws DataAccessException { return query(sql, getSingleColumnRowMapper(elementType)); } @Override public List<Map<String, Object>> queryForList(String sql) throws DataAccessException { return query(sql, getColumnMapRowMapper()); } @Override public SqlRowSet queryForRowSet(String sql) throws DataAccessException { return result(query(sql, new SqlRowSetResultSetExtractor())); } @Override public int update(final String sql) throws DataAccessException { Assert.notNull(sql, "SQL must not be null"); if (logger.isDebugEnabled()) { logger.debug("Executing SQL update [" + sql + "]"); } // Callback to execute the update statement. class UpdateStatementCallback implements StatementCallback<Integer>, SqlProvider { @Override public Integer doInStatement(Statement stmt) throws SQLException { int rows = stmt.executeUpdate(sql); if (logger.isTraceEnabled()) { logger.trace("SQL update affected " + rows + " rows"); } return rows; } @Override public String getSql() { return sql; } } return updateCount(execute(new UpdateStatementCallback(), true)); } @Override public int[] batchUpdate(final String... sql) throws DataAccessException { Assert.notEmpty(sql, "SQL array must not be empty"); if (logger.isDebugEnabled()) { logger.debug("Executing SQL batch update of " + sql.length + " statements"); } // Callback to execute the batch update. class BatchUpdateStatementCallback implements StatementCallback<int[]>, SqlProvider { @Nullable private String currSql; @Override public int[] doInStatement(Statement stmt) throws SQLException, DataAccessException { int[] rowsAffected = new int[sql.length]; if (JdbcUtils.supportsBatchUpdates(stmt.getConnection())) { for (String sqlStmt : sql) { this.currSql = appendSql(this.currSql, sqlStmt); stmt.addBatch(sqlStmt); } try { rowsAffected = stmt.executeBatch(); } catch (BatchUpdateException ex) { String batchExceptionSql = null; for (int i = 0; i < ex.getUpdateCounts().length; i++) { if (ex.getUpdateCounts()[i] == Statement.EXECUTE_FAILED) { batchExceptionSql = appendSql(batchExceptionSql, sql[i]); } } if (StringUtils.hasLength(batchExceptionSql)) { this.currSql = batchExceptionSql; } throw ex; } } else { for (int i = 0; i < sql.length; i++) { this.currSql = sql[i]; if (!stmt.execute(sql[i])) { rowsAffected[i] = stmt.getUpdateCount(); } else { throw new InvalidDataAccessApiUsageException("Invalid batch SQL statement: " + sql[i]); } } } return rowsAffected; } private String appendSql(@Nullable String sql, String statement) { return (StringUtils.hasLength(sql) ? sql + "; " + statement : statement); } @Override @Nullable public String getSql() { return this.currSql; } } int[] result = execute(new BatchUpdateStatementCallback(), true); Assert.state(result != null, "No update counts"); return result; } //------------------------------------------------------------------------- // Methods dealing with prepared statements //------------------------------------------------------------------------- @Nullable private <T> T execute(PreparedStatementCreator psc, PreparedStatementCallback<T> action, boolean closeResources) throws DataAccessException { Assert.notNull(psc, "PreparedStatementCreator must not be null"); Assert.notNull(action, "Callback object must not be null"); if (logger.isDebugEnabled()) { String sql = getSql(psc); logger.debug("Executing prepared SQL statement" + (sql != null ? " [" + sql + "]" : "")); } Connection con = DataSourceUtils.getConnection(obtainDataSource()); PreparedStatement ps = null; try { ps = psc.createPreparedStatement(con); applyStatementSettings(ps); T result = action.doInPreparedStatement(ps); handleWarnings(ps); return result; } catch (SQLException ex) { // Release Connection early, to avoid potential connection pool deadlock // in the case when the exception translator hasn't been initialized yet. if (psc instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } if (ps != null) { handleWarnings(ps, ex); } String sql = getSql(psc); psc = null; JdbcUtils.closeStatement(ps); ps = null; DataSourceUtils.releaseConnection(con, getDataSource()); con = null; throw translateException("PreparedStatementCallback", sql, ex); } finally { if (closeResources) { if (psc instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } JdbcUtils.closeStatement(ps); DataSourceUtils.releaseConnection(con, getDataSource()); } } } @Override @Nullable public <T> T execute(PreparedStatementCreator psc, PreparedStatementCallback<T> action) throws DataAccessException { return execute(psc, action, true); } @Override @Nullable public <T> T execute(String sql, PreparedStatementCallback<T> action) throws DataAccessException { return execute(new SimplePreparedStatementCreator(sql), action, true); } /** * Query using a prepared statement, allowing for a PreparedStatementCreator * and a PreparedStatementSetter. Most other query methods use this method, * but application code will always work with either a creator or a setter. * @param psc a callback that creates a PreparedStatement given a Connection * @param pss a callback that knows how to set values on the prepared statement. * If this is {@code null}, the SQL will be assumed to contain no bind parameters. * @param rse a callback that will extract results * @return an arbitrary result object, as returned by the ResultSetExtractor * @throws DataAccessException if there is any problem */ @Nullable public <T> T query( PreparedStatementCreator psc, @Nullable final PreparedStatementSetter pss, final ResultSetExtractor<T> rse) throws DataAccessException { Assert.notNull(rse, "ResultSetExtractor must not be null"); logger.debug("Executing prepared SQL query"); return execute(psc, new PreparedStatementCallback<>() { @Override @Nullable public T doInPreparedStatement(PreparedStatement ps) throws SQLException { ResultSet rs = null; try { if (pss != null) { pss.setValues(ps); } rs = ps.executeQuery(); return rse.extractData(rs); } finally { JdbcUtils.closeResultSet(rs); if (pss instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } } } }, true); } @Override @Nullable public <T> T query(PreparedStatementCreator psc, ResultSetExtractor<T> rse) throws DataAccessException { return query(psc, null, rse); } @Override @Nullable public <T> T query(String sql, @Nullable PreparedStatementSetter pss, ResultSetExtractor<T> rse) throws DataAccessException { return query(new SimplePreparedStatementCreator(sql), pss, rse); } @Override @Nullable public <T> T query(String sql, Object[] args, int[] argTypes, ResultSetExtractor<T> rse) throws DataAccessException { return query(sql, newArgTypePreparedStatementSetter(args, argTypes), rse); } @Deprecated @Override @Nullable public <T> T query(String sql, @Nullable Object[] args, ResultSetExtractor<T> rse) throws DataAccessException { return query(sql, newArgPreparedStatementSetter(args), rse); } @Override @Nullable public <T> T query(String sql, ResultSetExtractor<T> rse, @Nullable Object... args) throws DataAccessException { return query(sql, newArgPreparedStatementSetter(args), rse); } @Override public void query(PreparedStatementCreator psc, RowCallbackHandler rch) throws DataAccessException { query(psc, new RowCallbackHandlerResultSetExtractor(rch)); } @Override public void query(String sql, @Nullable PreparedStatementSetter pss, RowCallbackHandler rch) throws DataAccessException { query(sql, pss, new RowCallbackHandlerResultSetExtractor(rch)); } @Override public void query(String sql, Object[] args, int[] argTypes, RowCallbackHandler rch) throws DataAccessException { query(sql, newArgTypePreparedStatementSetter(args, argTypes), rch); } @Deprecated @Override public void query(String sql, @Nullable Object[] args, RowCallbackHandler rch) throws DataAccessException { query(sql, newArgPreparedStatementSetter(args), rch); } @Override public void query(String sql, RowCallbackHandler rch, @Nullable Object... args) throws DataAccessException { query(sql, newArgPreparedStatementSetter(args), rch); } @Override public <T> List<T> query(PreparedStatementCreator psc, RowMapper<T> rowMapper) throws DataAccessException { return result(query(psc, new RowMapperResultSetExtractor<>(rowMapper))); } @Override public <T> List<T> query(String sql, @Nullable PreparedStatementSetter pss, RowMapper<T> rowMapper) throws DataAccessException { return result(query(sql, pss, new RowMapperResultSetExtractor<>(rowMapper))); } @Override public <T> List<T> query(String sql, Object[] args, int[] argTypes, RowMapper<T> rowMapper) throws DataAccessException { return result(query(sql, args, argTypes, new RowMapperResultSetExtractor<>(rowMapper))); } @Deprecated @Override public <T> List<T> query(String sql, @Nullable Object[] args, RowMapper<T> rowMapper) throws DataAccessException { return result(query(sql, args, new RowMapperResultSetExtractor<>(rowMapper))); } @Override public <T> List<T> query(String sql, RowMapper<T> rowMapper, @Nullable Object... args) throws DataAccessException { return result(query(sql, args, new RowMapperResultSetExtractor<>(rowMapper))); } /** * Query using a prepared statement, allowing for a PreparedStatementCreator * and a PreparedStatementSetter. Most other query methods use this method, * but application code will always work with either a creator or a setter. * @param psc a callback that creates a PreparedStatement given a Connection * @param pss a callback that knows how to set values on the prepared statement. * If this is {@code null}, the SQL will be assumed to contain no bind parameters. * @param rowMapper a callback that will map one object per row * @return the result Stream, containing mapped objects, needing to be * closed once fully processed (e.g. through a try-with-resources clause) * @throws DataAccessException if the query fails * @since 5.3 */ public <T> Stream<T> queryForStream(PreparedStatementCreator psc, @Nullable PreparedStatementSetter pss, RowMapper<T> rowMapper) throws DataAccessException { return result(execute(psc, ps -> { if (pss != null) { pss.setValues(ps); } ResultSet rs = ps.executeQuery(); Connection con = ps.getConnection(); return new ResultSetSpliterator<>(rs, rowMapper).stream().onClose(() -> { JdbcUtils.closeResultSet(rs); if (pss instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } JdbcUtils.closeStatement(ps); DataSourceUtils.releaseConnection(con, getDataSource()); }); }, false)); } @Override public <T> Stream<T> queryForStream(PreparedStatementCreator psc, RowMapper<T> rowMapper) throws DataAccessException { return queryForStream(psc, null, rowMapper); } @Override public <T> Stream<T> queryForStream(String sql, @Nullable PreparedStatementSetter pss, RowMapper<T> rowMapper) throws DataAccessException { return queryForStream(new SimplePreparedStatementCreator(sql), pss, rowMapper); } @Override public <T> Stream<T> queryForStream(String sql, RowMapper<T> rowMapper, @Nullable Object... args) throws DataAccessException { return queryForStream(new SimplePreparedStatementCreator(sql), newArgPreparedStatementSetter(args), rowMapper); } @Override @Nullable public <T> T queryForObject(String sql, Object[] args, int[] argTypes, RowMapper<T> rowMapper) throws DataAccessException { List<T> results = query(sql, args, argTypes, new RowMapperResultSetExtractor<>(rowMapper, 1)); return DataAccessUtils.nullableSingleResult(results); } @Deprecated @Override @Nullable public <T> T queryForObject(String sql, @Nullable Object[] args, RowMapper<T> rowMapper) throws DataAccessException { List<T> results = query(sql, args, new RowMapperResultSetExtractor<>(rowMapper, 1)); return DataAccessUtils.nullableSingleResult(results); } @Override @Nullable public <T> T queryForObject(String sql, RowMapper<T> rowMapper, @Nullable Object... args) throws DataAccessException { List<T> results = query(sql, args, new RowMapperResultSetExtractor<>(rowMapper, 1)); return DataAccessUtils.nullableSingleResult(results); } @Override @Nullable public <T> T queryForObject(String sql, Object[] args, int[] argTypes, Class<T> requiredType) throws DataAccessException { return queryForObject(sql, args, argTypes, getSingleColumnRowMapper(requiredType)); } @Deprecated @Override @Nullable public <T> T queryForObject(String sql, @Nullable Object[] args, Class<T> requiredType) throws DataAccessException { return queryForObject(sql, args, getSingleColumnRowMapper(requiredType)); } @Override @Nullable public <T> T queryForObject(String sql, Class<T> requiredType, @Nullable Object... args) throws DataAccessException { return queryForObject(sql, args, getSingleColumnRowMapper(requiredType)); } @Override public Map<String, Object> queryForMap(String sql, Object[] args, int[] argTypes) throws DataAccessException { return result(queryForObject(sql, args, argTypes, getColumnMapRowMapper())); } @Override public Map<String, Object> queryForMap(String sql, @Nullable Object... args) throws DataAccessException { return result(queryForObject(sql, args, getColumnMapRowMapper())); } @Override public <T> List<T> queryForList(String sql, Object[] args, int[] argTypes, Class<T> elementType) throws DataAccessException { return query(sql, args, argTypes, getSingleColumnRowMapper(elementType)); } @Deprecated @Override public <T> List<T> queryForList(String sql, @Nullable Object[] args, Class<T> elementType) throws DataAccessException { return query(sql, args, getSingleColumnRowMapper(elementType)); } @Override public <T> List<T> queryForList(String sql, Class<T> elementType, @Nullable Object... args) throws DataAccessException { return query(sql, args, getSingleColumnRowMapper(elementType)); } @Override public List<Map<String, Object>> queryForList(String sql, Object[] args, int[] argTypes) throws DataAccessException { return query(sql, args, argTypes, getColumnMapRowMapper()); } @Override public List<Map<String, Object>> queryForList(String sql, @Nullable Object... args) throws DataAccessException { return query(sql, args, getColumnMapRowMapper()); } @Override public SqlRowSet queryForRowSet(String sql, Object[] args, int[] argTypes) throws DataAccessException { return result(query(sql, args, argTypes, new SqlRowSetResultSetExtractor())); } @Override public SqlRowSet queryForRowSet(String sql, @Nullable Object... args) throws DataAccessException { return result(query(sql, args, new SqlRowSetResultSetExtractor())); } protected int update(final PreparedStatementCreator psc, @Nullable final PreparedStatementSetter pss) throws DataAccessException { logger.debug("Executing prepared SQL update"); return updateCount(execute(psc, ps -> { try { if (pss != null) { pss.setValues(ps); } int rows = ps.executeUpdate(); if (logger.isTraceEnabled()) { logger.trace("SQL update affected " + rows + " rows"); } return rows; } finally { if (pss instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } } }, true)); } @Override public int update(PreparedStatementCreator psc) throws DataAccessException { return update(psc, (PreparedStatementSetter) null); } @Override public int update(final PreparedStatementCreator psc, final KeyHolder generatedKeyHolder) throws DataAccessException { Assert.notNull(generatedKeyHolder, "KeyHolder must not be null"); logger.debug("Executing SQL update and returning generated keys"); return updateCount(execute(psc, ps -> { int rows = ps.executeUpdate(); generatedKeyHolder.getKeyList().clear(); storeGeneratedKeys(generatedKeyHolder, ps, 1); if (logger.isTraceEnabled()) { logger.trace("SQL update affected " + rows + " rows and returned " + generatedKeyHolder.getKeyList().size() + " keys"); } return rows; }, true)); } @Override public int update(String sql, @Nullable PreparedStatementSetter pss) throws DataAccessException { return update(new SimplePreparedStatementCreator(sql), pss); } @Override public int update(String sql, Object[] args, int[] argTypes) throws DataAccessException { return update(sql, newArgTypePreparedStatementSetter(args, argTypes)); } @Override public int update(String sql, @Nullable Object... args) throws DataAccessException { return update(sql, newArgPreparedStatementSetter(args)); } @Override public int[] batchUpdate(final PreparedStatementCreator psc, final BatchPreparedStatementSetter pss, final KeyHolder generatedKeyHolder) throws DataAccessException { int[] result = execute(psc, getPreparedStatementCallback(pss, generatedKeyHolder)); Assert.state(result != null, "No result array"); return result; } @Override public int[] batchUpdate(String sql, final BatchPreparedStatementSetter pss) throws DataAccessException { if (logger.isDebugEnabled()) { logger.debug("Executing SQL batch update [" + sql + "]"); } int batchSize = pss.getBatchSize(); if (batchSize == 0) { return new int[0]; } int[] result = execute(sql, getPreparedStatementCallback(pss, null)); Assert.state(result != null, "No result array"); return result; } @Override public int[] batchUpdate(String sql, List<Object[]> batchArgs) throws DataAccessException { return batchUpdate(sql, batchArgs, new int[0]); } @Override public int[] batchUpdate(String sql, List<Object[]> batchArgs, final int[] argTypes) throws DataAccessException { if (batchArgs.isEmpty()) { return new int[0]; } return batchUpdate( sql, new BatchPreparedStatementSetter() { @Override public void setValues(PreparedStatement ps, int i) throws SQLException { Object[] values = batchArgs.get(i); int colIndex = 0; for (Object value : values) { colIndex++; if (value instanceof SqlParameterValue paramValue) { StatementCreatorUtils.setParameterValue(ps, colIndex, paramValue, paramValue.getValue()); } else { int colType; if (argTypes.length < colIndex) { colType = SqlTypeValue.TYPE_UNKNOWN; } else { colType = argTypes[colIndex - 1]; } StatementCreatorUtils.setParameterValue(ps, colIndex, colType, value); } } } @Override public int getBatchSize() { return batchArgs.size(); } }); } @Override public <T> int[][] batchUpdate(String sql, final Collection<T> batchArgs, final int batchSize, final ParameterizedPreparedStatementSetter<T> pss) throws DataAccessException { if (logger.isDebugEnabled()) { logger.debug("Executing SQL batch update [" + sql + "] with a batch size of " + batchSize); } int[][] result = execute(sql, (PreparedStatementCallback<int[][]>) ps -> { List<int[]> rowsAffected = new ArrayList<>(); try { boolean batchSupported = JdbcUtils.supportsBatchUpdates(ps.getConnection()); int n = 0; for (T obj : batchArgs) { pss.setValues(ps, obj); n++; if (batchSupported) { ps.addBatch(); if (n % batchSize == 0 || n == batchArgs.size()) { if (logger.isTraceEnabled()) { int batchIdx = (n % batchSize == 0) ? n / batchSize : (n / batchSize) + 1; int items = n - ((n % batchSize == 0) ? n / batchSize - 1 : (n / batchSize)) * batchSize; logger.trace("Sending SQL batch update #" + batchIdx + " with " + items + " items"); } rowsAffected.add(ps.executeBatch()); } } else { int i = ps.executeUpdate(); rowsAffected.add(new int[] {i}); } } int[][] result1 = new int[rowsAffected.size()][]; for (int i = 0; i < result1.length; i++) { result1[i] = rowsAffected.get(i); } return result1; } finally { if (pss instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } } }); Assert.state(result != null, "No result array"); return result; } //------------------------------------------------------------------------- // Methods dealing with callable statements //------------------------------------------------------------------------- @Override @Nullable public <T> T execute(CallableStatementCreator csc, CallableStatementCallback<T> action) throws DataAccessException { Assert.notNull(csc, "CallableStatementCreator must not be null"); Assert.notNull(action, "Callback object must not be null"); if (logger.isDebugEnabled()) { String sql = getSql(csc); logger.debug("Calling stored procedure" + (sql != null ? " [" + sql + "]" : "")); } Connection con = DataSourceUtils.getConnection(obtainDataSource()); CallableStatement cs = null; try { cs = csc.createCallableStatement(con); applyStatementSettings(cs); T result = action.doInCallableStatement(cs); handleWarnings(cs); return result; } catch (SQLException ex) { // Release Connection early, to avoid potential connection pool deadlock // in the case when the exception translator hasn't been initialized yet. if (csc instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } if (cs != null) { handleWarnings(cs, ex); } String sql = getSql(csc); csc = null; JdbcUtils.closeStatement(cs); cs = null; DataSourceUtils.releaseConnection(con, getDataSource()); con = null; throw translateException("CallableStatementCallback", sql, ex); } finally { if (csc instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } JdbcUtils.closeStatement(cs); DataSourceUtils.releaseConnection(con, getDataSource()); } } @Override @Nullable public <T> T execute(String callString, CallableStatementCallback<T> action) throws DataAccessException { return execute(new SimpleCallableStatementCreator(callString), action); } @Override public Map<String, Object> call(CallableStatementCreator csc, List<SqlParameter> declaredParameters) throws DataAccessException { final List<SqlParameter> updateCountParameters = new ArrayList<>(); final List<SqlParameter> resultSetParameters = new ArrayList<>(); final List<SqlParameter> callParameters = new ArrayList<>(); for (SqlParameter parameter : declaredParameters) { if (parameter.isResultsParameter()) { if (parameter instanceof SqlReturnResultSet) { resultSetParameters.add(parameter); } else { updateCountParameters.add(parameter); } } else { callParameters.add(parameter); } } Map<String, Object> result = execute(csc, cs -> { boolean retVal = cs.execute(); int updateCount = cs.getUpdateCount(); if (logger.isTraceEnabled()) { logger.trace("CallableStatement.execute() returned '" + retVal + "'"); logger.trace("CallableStatement.getUpdateCount() returned " + updateCount); } Map<String, Object> resultsMap = createResultsMap(); if (retVal || updateCount != -1) { resultsMap.putAll(extractReturnedResults(cs, updateCountParameters, resultSetParameters, updateCount)); } resultsMap.putAll(extractOutputParameters(cs, callParameters)); return resultsMap; }); Assert.state(result != null, "No result map"); return result; } /** * Extract returned ResultSets from the completed stored procedure. * @param cs a JDBC wrapper for the stored procedure * @param updateCountParameters the parameter list of declared update count parameters for the stored procedure * @param resultSetParameters the parameter list of declared resultSet parameters for the stored procedure * @return a Map that contains returned results */ protected Map<String, Object> extractReturnedResults(CallableStatement cs, @Nullable List<SqlParameter> updateCountParameters, @Nullable List<SqlParameter> resultSetParameters, int updateCount) throws SQLException { Map<String, Object> results = new LinkedHashMap<>(4); int rsIndex = 0; int updateIndex = 0; boolean moreResults; if (!this.skipResultsProcessing) { do { if (updateCount == -1) { if (resultSetParameters != null && resultSetParameters.size() > rsIndex) { SqlReturnResultSet declaredRsParam = (SqlReturnResultSet) resultSetParameters.get(rsIndex); results.putAll(processResultSet(cs.getResultSet(), declaredRsParam)); rsIndex++; } else { if (!this.skipUndeclaredResults) { String rsName = RETURN_RESULT_SET_PREFIX + (rsIndex + 1); SqlReturnResultSet undeclaredRsParam = new SqlReturnResultSet(rsName, getColumnMapRowMapper()); if (logger.isTraceEnabled()) { logger.trace("Added default SqlReturnResultSet parameter named '" + rsName + "'"); } results.putAll(processResultSet(cs.getResultSet(), undeclaredRsParam)); rsIndex++; } } } else { if (updateCountParameters != null && updateCountParameters.size() > updateIndex) { SqlReturnUpdateCount ucParam = (SqlReturnUpdateCount) updateCountParameters.get(updateIndex); String declaredUcName = ucParam.getName(); results.put(declaredUcName, updateCount); updateIndex++; } else { if (!this.skipUndeclaredResults) { String undeclaredName = RETURN_UPDATE_COUNT_PREFIX + (updateIndex + 1); if (logger.isTraceEnabled()) { logger.trace("Added default SqlReturnUpdateCount parameter named '" + undeclaredName + "'"); } results.put(undeclaredName, updateCount); updateIndex++; } } } moreResults = cs.getMoreResults(); updateCount = cs.getUpdateCount(); if (logger.isTraceEnabled()) { logger.trace("CallableStatement.getUpdateCount() returned " + updateCount); } } while (moreResults || updateCount != -1); } return results; } /** * Extract output parameters from the completed stored procedure. * @param cs the JDBC wrapper for the stored procedure * @param parameters parameter list for the stored procedure * @return a Map that contains returned results */ protected Map<String, Object> extractOutputParameters(CallableStatement cs, List<SqlParameter> parameters) throws SQLException { Map<String, Object> results = CollectionUtils.newLinkedHashMap(parameters.size()); int sqlColIndex = 1; for (SqlParameter param : parameters) { if (param instanceof SqlOutParameter outParam) { Assert.state(outParam.getName() != null, "Anonymous parameters not allowed"); SqlReturnType returnType = outParam.getSqlReturnType(); if (returnType != null) { Object out = returnType.getTypeValue(cs, sqlColIndex, outParam.getSqlType(), outParam.getTypeName()); results.put(outParam.getName(), out); } else { Object out = cs.getObject(sqlColIndex); if (out instanceof ResultSet resultSet) { if (outParam.isResultSetSupported()) { results.putAll(processResultSet(resultSet, outParam)); } else { String rsName = outParam.getName(); SqlReturnResultSet rsParam = new SqlReturnResultSet(rsName, getColumnMapRowMapper()); results.putAll(processResultSet(resultSet, rsParam)); if (logger.isTraceEnabled()) { logger.trace("Added default SqlReturnResultSet parameter named '" + rsName + "'"); } } } else { results.put(outParam.getName(), out); } } } if (!param.isResultsParameter()) { sqlColIndex++; } } return results; } /** * Process the given ResultSet from a stored procedure. * @param rs the ResultSet to process * @param param the corresponding stored procedure parameter * @return a Map that contains returned results */ protected Map<String, Object> processResultSet( @Nullable ResultSet rs, ResultSetSupportingSqlParameter param) throws SQLException { if (rs != null) { try { if (param.getRowMapper() != null) { RowMapper<?> rowMapper = param.getRowMapper(); Object data = (new RowMapperResultSetExtractor<>(rowMapper)).extractData(rs); return Collections.singletonMap(param.getName(), data); } else if (param.getRowCallbackHandler() != null) { RowCallbackHandler rch = param.getRowCallbackHandler(); (new RowCallbackHandlerResultSetExtractor(rch)).extractData(rs); return Collections.singletonMap(param.getName(), "ResultSet returned from stored procedure was processed"); } else if (param.getResultSetExtractor() != null) { Object data = param.getResultSetExtractor().extractData(rs); return Collections.singletonMap(param.getName(), data); } } finally { JdbcUtils.closeResultSet(rs); } } return Collections.emptyMap(); } //------------------------------------------------------------------------- // Implementation hooks and helper methods //------------------------------------------------------------------------- /** * Create a new RowMapper for reading columns as key-value pairs. * @return the RowMapper to use * @see ColumnMapRowMapper */ protected RowMapper<Map<String, Object>> getColumnMapRowMapper() { return new ColumnMapRowMapper(); } /** * Create a new RowMapper for reading result objects from a single column. * @param requiredType the type that each result object is expected to match * @return the RowMapper to use * @see SingleColumnRowMapper */ protected <T> RowMapper<T> getSingleColumnRowMapper(Class<T> requiredType) { return new SingleColumnRowMapper<>(requiredType); } /** * Create a Map instance to be used as the results map. * <p>If {@link #resultsMapCaseInsensitive} has been set to true, * a {@link LinkedCaseInsensitiveMap} will be created; otherwise, a * {@link LinkedHashMap} will be created. * @return the results Map instance * @see #setResultsMapCaseInsensitive * @see #isResultsMapCaseInsensitive */ protected Map<String, Object> createResultsMap() { if (isResultsMapCaseInsensitive()) { return new LinkedCaseInsensitiveMap<>(); } else { return new LinkedHashMap<>(); } } /** * Prepare the given JDBC Statement (or PreparedStatement or CallableStatement), * applying statement settings such as fetch size, max rows, and query timeout. * @param stmt the JDBC Statement to prepare * @throws SQLException if thrown by JDBC API * @see #setFetchSize * @see #setMaxRows * @see #setQueryTimeout * @see org.springframework.jdbc.datasource.DataSourceUtils#applyTransactionTimeout */ protected void applyStatementSettings(Statement stmt) throws SQLException { int fetchSize = getFetchSize(); if (fetchSize != -1) { stmt.setFetchSize(fetchSize); } int maxRows = getMaxRows(); if (maxRows != -1) { stmt.setMaxRows(maxRows); } DataSourceUtils.applyTimeout(stmt, getDataSource(), getQueryTimeout()); } /** * Create a new arg-based PreparedStatementSetter using the args passed in. * <p>By default, we'll create an {@link ArgumentPreparedStatementSetter}. * This method allows for the creation to be overridden by subclasses. * @param args object array with arguments * @return the new PreparedStatementSetter to use */ protected PreparedStatementSetter newArgPreparedStatementSetter(@Nullable Object[] args) { return new ArgumentPreparedStatementSetter(args); } /** * Create a new arg-type-based PreparedStatementSetter using the args and types passed in. * <p>By default, we'll create an {@link ArgumentTypePreparedStatementSetter}. * This method allows for the creation to be overridden by subclasses. * @param args object array with arguments * @param argTypes int array of SQLTypes for the associated arguments * @return the new PreparedStatementSetter to use */ protected PreparedStatementSetter newArgTypePreparedStatementSetter(Object[] args, int[] argTypes) { return new ArgumentTypePreparedStatementSetter(args, argTypes); } /** * Handle warnings before propagating a primary {@code SQLException} * from executing the given statement. * <p>Calls regular {@link #handleWarnings(Statement)} but catches * {@link SQLWarningException} in order to chain the {@link SQLWarning} * into the primary exception instead. * @param stmt the current JDBC statement * @param ex the primary exception after failed statement execution * @since 5.3.29 * @see #handleWarnings(Statement) * @see SQLException#setNextException */ protected void handleWarnings(Statement stmt, SQLException ex) { try { handleWarnings(stmt); } catch (SQLWarningException nonIgnoredWarning) { ex.setNextException(nonIgnoredWarning.getSQLWarning()); } catch (SQLException warningsEx) { logger.debug("Failed to retrieve warnings", warningsEx); } catch (Throwable warningsEx) { logger.debug("Failed to process warnings", warningsEx); } } /** * Handle the warnings for the given JDBC statement, if any. * <p>Throws a {@link SQLWarningException} if we're not ignoring warnings, * otherwise logs the warnings at debug level. * @param stmt the current JDBC statement * @throws SQLException in case of warnings retrieval failure * @throws SQLWarningException for a concrete warning to raise * (when not ignoring warnings) * @see #setIgnoreWarnings * @see #handleWarnings(SQLWarning) */ protected void handleWarnings(Statement stmt) throws SQLException, SQLWarningException { if (isIgnoreWarnings()) { if (logger.isDebugEnabled()) { SQLWarning warningToLog = stmt.getWarnings(); while (warningToLog != null) { logger.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '" + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]"); warningToLog = warningToLog.getNextWarning(); } } } else { handleWarnings(stmt.getWarnings()); } } /** * Throw a {@link SQLWarningException} if encountering an actual warning. * @param warning the warnings object from the current statement. * May be {@code null}, in which case this method does nothing. * @throws SQLWarningException in case of an actual warning to be raised */ protected void handleWarnings(@Nullable SQLWarning warning) throws SQLWarningException { if (warning != null) { throw new SQLWarningException("Warning not ignored", warning); } } /** * Translate the given {@link SQLException} into a generic {@link DataAccessException}. * @param task readable text describing the task being attempted * @param sql the SQL query or update that caused the problem (may be {@code null}) * @param ex the offending {@code SQLException} * @return a DataAccessException wrapping the {@code SQLException} (never {@code null}) * @since 5.0 * @see #getExceptionTranslator() */ protected DataAccessException translateException(String task, @Nullable String sql, SQLException ex) { DataAccessException dae = getExceptionTranslator().translate(task, sql, ex); return (dae != null ? dae : new UncategorizedSQLException(task, sql, ex)); } /** * Determine SQL from potential provider object. * @param obj object which is potentially an SqlProvider * @return the SQL string, or {@code null} if not known * @see SqlProvider */ @Nullable private static String getSql(Object obj) { return (obj instanceof SqlProvider sqlProvider ? sqlProvider.getSql() : null); } private static <T> T result(@Nullable T result) { Assert.state(result != null, "No result"); return result; } private static int updateCount(@Nullable Integer result) { Assert.state(result != null, "No update count"); return result; } private void storeGeneratedKeys(KeyHolder generatedKeyHolder, PreparedStatement ps, int rowsExpected) throws SQLException { List<Map<String, Object>> generatedKeys = generatedKeyHolder.getKeyList(); ResultSet keys = ps.getGeneratedKeys(); if (keys != null) { try { RowMapperResultSetExtractor<Map<String, Object>> rse = new RowMapperResultSetExtractor<>(getColumnMapRowMapper(), rowsExpected); generatedKeys.addAll(result(rse.extractData(keys))); } finally { JdbcUtils.closeResultSet(keys); } } } private PreparedStatementCallback<int[]> getPreparedStatementCallback(BatchPreparedStatementSetter pss, @Nullable KeyHolder generatedKeyHolder) { return ps -> { try { int batchSize = pss.getBatchSize(); InterruptibleBatchPreparedStatementSetter ipss = (pss instanceof InterruptibleBatchPreparedStatementSetter ibpss ? ibpss : null); if (generatedKeyHolder != null) { generatedKeyHolder.getKeyList().clear(); } if (JdbcUtils.supportsBatchUpdates(ps.getConnection())) { for (int i = 0; i < batchSize; i++) { pss.setValues(ps, i); if (ipss != null && ipss.isBatchExhausted(i)) { break; } ps.addBatch(); } int[] results = ps.executeBatch(); if (generatedKeyHolder != null) { storeGeneratedKeys(generatedKeyHolder, ps, batchSize); } return results; } else { List<Integer> rowsAffected = new ArrayList<>(); for (int i = 0; i < batchSize; i++) { pss.setValues(ps, i); if (ipss != null && ipss.isBatchExhausted(i)) { break; } rowsAffected.add(ps.executeUpdate()); if (generatedKeyHolder != null) { storeGeneratedKeys(generatedKeyHolder, ps, 1); } } int[] rowsAffectedArray = new int[rowsAffected.size()]; for (int i = 0; i < rowsAffectedArray.length; i++) { rowsAffectedArray[i] = rowsAffected.get(i); } return rowsAffectedArray; } } finally { if (pss instanceof ParameterDisposer parameterDisposer) { parameterDisposer.cleanupParameters(); } } }; } /** * Invocation handler that suppresses close calls on JDBC Connections. * Also prepares returned Statement (Prepared/CallbackStatement) objects. * @see java.sql.Connection#close() */ private class CloseSuppressingInvocationHandler implements InvocationHandler { private final Connection target; public CloseSuppressingInvocationHandler(Connection target) { this.target = target; } @Override @Nullable public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { // Invocation on ConnectionProxy interface coming in... return switch (method.getName()) { // Only consider equal when proxies are identical. case "equals" -> (proxy == args[0]); // Use hashCode of Connection proxy. case "hashCode" -> System.identityHashCode(proxy); // Handle close method: suppress, not valid. case "close" -> null; case "isClosed" -> false; // Handle getTargetConnection method: return underlying Connection. case "getTargetConnection" -> this.target; case "unwrap" -> (((Class<?>) args[0]).isInstance(proxy) ? proxy : this.target.unwrap((Class<?>) args[0])); case "isWrapperFor" -> (((Class<?>) args[0]).isInstance(proxy) || this.target.isWrapperFor((Class<?>) args[0])); default -> { try { // Invoke method on target Connection. Object retVal = method.invoke(this.target, args); // If return value is a JDBC Statement, apply statement settings // (fetch size, max rows, transaction timeout). if (retVal instanceof Statement statement) { applyStatementSettings(statement); } yield retVal; } catch (InvocationTargetException ex) { throw ex.getTargetException(); } } }; } } /** * Simple adapter for PreparedStatementCreator, allowing to use a plain SQL statement. */ private static class SimplePreparedStatementCreator implements PreparedStatementCreator, SqlProvider { private final String sql; public SimplePreparedStatementCreator(String sql) { Assert.notNull(sql, "SQL must not be null"); this.sql = sql; } @Override public PreparedStatement createPreparedStatement(Connection con) throws SQLException { return con.prepareStatement(this.sql); } @Override public String getSql() { return this.sql; } } /** * Simple adapter for CallableStatementCreator, allowing to use a plain SQL statement. */ private static class SimpleCallableStatementCreator implements CallableStatementCreator, SqlProvider { private final String callString; public SimpleCallableStatementCreator(String callString) { Assert.notNull(callString, "Call string must not be null"); this.callString = callString; } @Override public CallableStatement createCallableStatement(Connection con) throws SQLException { return con.prepareCall(this.callString); } @Override public String getSql() { return this.callString; } } /** * Adapter to enable use of a RowCallbackHandler inside a ResultSetExtractor. * <p>Uses a regular ResultSet, so we have to be careful when using it: * We don't use it for navigating since this could lead to unpredictable consequences. */ private static class RowCallbackHandlerResultSetExtractor implements ResultSetExtractor<Object> { private final RowCallbackHandler rch; public RowCallbackHandlerResultSetExtractor(RowCallbackHandler rch) { this.rch = rch; } @Override @Nullable public Object extractData(ResultSet rs) throws SQLException { while (rs.next()) { this.rch.processRow(rs); } return null; } } /** * Spliterator for queryForStream adaptation of a ResultSet to a Stream. * @since 5.3 */ private static class ResultSetSpliterator<T> implements Spliterator<T> { private final ResultSet rs; private final RowMapper<T> rowMapper; private int rowNum = 0; public ResultSetSpliterator(ResultSet rs, RowMapper<T> rowMapper) { this.rs = rs; this.rowMapper = rowMapper; } @Override public boolean tryAdvance(Consumer<? super T> action) { try { if (this.rs.next()) { action.accept(this.rowMapper.mapRow(this.rs, this.rowNum++)); return true; } return false; } catch (SQLException ex) { throw new InvalidResultSetAccessException(ex); } } @Override @Nullable public Spliterator<T> trySplit() { return null; } @Override public long estimateSize() { return Long.MAX_VALUE; } @Override public int characteristics() { return Spliterator.ORDERED; } public Stream<T> stream() { return StreamSupport.stream(this, false); } } }
spring-projects/spring-framework
spring-jdbc/src/main/java/org/springframework/jdbc/core/JdbcTemplate.java
937
package jadx.gui; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jadx.api.ICodeInfo; import jadx.api.JadxArgs; import jadx.api.JadxDecompiler; import jadx.api.JavaClass; import jadx.api.JavaNode; import jadx.api.JavaPackage; import jadx.api.ResourceFile; import jadx.api.impl.InMemoryCodeCache; import jadx.api.metadata.ICodeNodeRef; import jadx.api.usage.impl.EmptyUsageInfoCache; import jadx.api.usage.impl.InMemoryUsageInfoCache; import jadx.core.dex.nodes.ClassNode; import jadx.core.dex.nodes.ProcessState; import jadx.core.dex.nodes.RootNode; import jadx.core.utils.exceptions.JadxRuntimeException; import jadx.gui.cache.code.CodeStringCache; import jadx.gui.cache.code.disk.BufferCodeCache; import jadx.gui.cache.code.disk.DiskCodeCache; import jadx.gui.cache.usage.UsageInfoCache; import jadx.gui.plugins.context.CommonGuiPluginsContext; import jadx.gui.plugins.context.GuiPluginContext; import jadx.gui.settings.JadxProject; import jadx.gui.settings.JadxSettings; import jadx.gui.ui.MainWindow; import jadx.gui.utils.CacheObject; import jadx.plugins.tools.JadxExternalPluginsLoader; import static jadx.core.dex.nodes.ProcessState.GENERATED_AND_UNLOADED; import static jadx.core.dex.nodes.ProcessState.NOT_LOADED; import static jadx.core.dex.nodes.ProcessState.PROCESS_COMPLETE; @SuppressWarnings("ConstantConditions") public class JadxWrapper { private static final Logger LOG = LoggerFactory.getLogger(JadxWrapper.class); private static final Object DECOMPILER_UPDATE_SYNC = new Object(); private final MainWindow mainWindow; private volatile @Nullable JadxDecompiler decompiler; private CommonGuiPluginsContext guiPluginsContext; public JadxWrapper(MainWindow mainWindow) { this.mainWindow = mainWindow; } public void open() { close(); try { synchronized (DECOMPILER_UPDATE_SYNC) { JadxProject project = getProject(); JadxArgs jadxArgs = getSettings().toJadxArgs(); jadxArgs.setPluginLoader(new JadxExternalPluginsLoader()); project.fillJadxArgs(jadxArgs); decompiler = new JadxDecompiler(jadxArgs); initGuiPluginsContext(); initUsageCache(jadxArgs); decompiler.load(); initCodeCache(); } } catch (Exception e) { LOG.error("Jadx decompiler wrapper init error", e); close(); } } // TODO: check and move into core package public void unloadClasses() { for (ClassNode cls : getDecompiler().getRoot().getClasses()) { ProcessState clsState = cls.getState(); cls.unload(); cls.setState(clsState == PROCESS_COMPLETE ? GENERATED_AND_UNLOADED : NOT_LOADED); } } public void close() { try { synchronized (DECOMPILER_UPDATE_SYNC) { if (decompiler != null) { decompiler.close(); decompiler = null; } if (guiPluginsContext != null) { resetGuiPluginsContext(); guiPluginsContext = null; } } } catch (Exception e) { LOG.error("Jadx decompiler close error", e); } finally { mainWindow.getCacheObject().reset(); } } private void initCodeCache() { switch (getSettings().getCodeCacheMode()) { case MEMORY: getArgs().setCodeCache(new InMemoryCodeCache()); break; case DISK_WITH_CACHE: getArgs().setCodeCache(new CodeStringCache(buildBufferedDiskCache())); break; case DISK: getArgs().setCodeCache(buildBufferedDiskCache()); break; } } private BufferCodeCache buildBufferedDiskCache() { DiskCodeCache diskCache = new DiskCodeCache(getDecompiler().getRoot(), getProject().getCacheDir()); return new BufferCodeCache(diskCache); } private void initUsageCache(JadxArgs jadxArgs) { switch (getSettings().getUsageCacheMode()) { case NONE: jadxArgs.setUsageInfoCache(new EmptyUsageInfoCache()); break; case MEMORY: jadxArgs.setUsageInfoCache(new InMemoryUsageInfoCache()); break; case DISK: jadxArgs.setUsageInfoCache(new UsageInfoCache(getProject().getCacheDir(), jadxArgs.getInputFiles())); break; } } private void initGuiPluginsContext() { guiPluginsContext = new CommonGuiPluginsContext(mainWindow); decompiler.getPluginManager().registerAddPluginListener(pluginContext -> { GuiPluginContext guiContext = guiPluginsContext.buildForPlugin(pluginContext); pluginContext.setGuiContext(guiContext); }); } public CommonGuiPluginsContext getGuiPluginsContext() { return guiPluginsContext; } public void resetGuiPluginsContext() { guiPluginsContext.reset(); } /** * Get the complete list of classes */ public List<JavaClass> getClasses() { return getDecompiler().getClasses(); } /** * Get all classes that are not excluded by the excluded packages settings */ public List<JavaClass> getIncludedClasses() { List<JavaClass> classList = getDecompiler().getClasses(); List<String> excludedPackages = getExcludedPackages(); if (excludedPackages.isEmpty()) { return classList; } return classList.stream() .filter(cls -> isClassIncluded(excludedPackages, cls)) .collect(Collectors.toList()); } /** * Get all classes that are not excluded by the excluded packages settings including inner classes */ public List<JavaClass> getIncludedClassesWithInners() { List<JavaClass> classes = getDecompiler().getClassesWithInners(); List<String> excludedPackages = getExcludedPackages(); if (excludedPackages.isEmpty()) { return classes; } return classes.stream() .filter(cls -> isClassIncluded(excludedPackages, cls)) .collect(Collectors.toList()); } private static boolean isClassIncluded(List<String> excludedPackages, JavaClass cls) { for (String exclude : excludedPackages) { String clsFullName = cls.getFullName(); if (clsFullName.equals(exclude) || clsFullName.startsWith(exclude + '.')) { return false; } } return true; } public List<List<JavaClass>> buildDecompileBatches(List<JavaClass> classes) { return getDecompiler().getDecompileScheduler().buildBatches(classes); } // TODO: move to CLI and filter classes in JadxDecompiler public List<String> getExcludedPackages() { String excludedPackages = getSettings().getExcludedPackages().trim(); if (excludedPackages.isEmpty()) { return Collections.emptyList(); } return Arrays.asList(excludedPackages.split(" +")); } public void setExcludedPackages(List<String> packagesToExclude) { getSettings().setExcludedPackages(String.join(" ", packagesToExclude).trim()); getSettings().sync(); } public void addExcludedPackage(String packageToExclude) { String newExclusion = getSettings().getExcludedPackages() + ' ' + packageToExclude; getSettings().setExcludedPackages(newExclusion.trim()); getSettings().sync(); } public void removeExcludedPackage(String packageToRemoveFromExclusion) { List<String> list = new ArrayList<>(getExcludedPackages()); list.remove(packageToRemoveFromExclusion); getSettings().setExcludedPackages(String.join(" ", list)); getSettings().sync(); } public Optional<JadxDecompiler> getCurrentDecompiler() { synchronized (DECOMPILER_UPDATE_SYNC) { return Optional.ofNullable(decompiler); } } /** * TODO: make method private * Do not store JadxDecompiler in fields to not leak old instances */ public @NotNull JadxDecompiler getDecompiler() { if (decompiler == null || decompiler.getRoot() == null) { throw new JadxRuntimeException("Decompiler not yet loaded"); } return decompiler; } // TODO: forbid usage of this method public RootNode getRootNode() { return getDecompiler().getRoot(); } public void reloadCodeData() { getDecompiler().reloadCodeData(); } public JavaNode getJavaNodeByRef(ICodeNodeRef nodeRef) { return getDecompiler().getJavaNodeByRef(nodeRef); } public @Nullable JavaNode getEnclosingNode(ICodeInfo codeInfo, int pos) { return getDecompiler().getEnclosingNode(codeInfo, pos); } public List<JavaPackage> getPackages() { return getDecompiler().getPackages(); } public List<ResourceFile> getResources() { return getDecompiler().getResources(); } public JadxArgs getArgs() { return getDecompiler().getArgs(); } public JadxProject getProject() { return mainWindow.getProject(); } public JadxSettings getSettings() { return mainWindow.getSettings(); } public CacheObject getCache() { return mainWindow.getCacheObject(); } /** * @param fullName Full name of an outer class. Inner classes are not supported. */ public @Nullable JavaClass searchJavaClassByFullAlias(String fullName) { return getDecompiler().getClasses().stream() .filter(cls -> cls.getFullName().equals(fullName)) .findFirst() .orElse(null); } public @Nullable JavaClass searchJavaClassByOrigClassName(String fullName) { return getDecompiler().searchJavaClassByOrigFullName(fullName); } /** * @param rawName Full raw name of an outer class. Inner classes are not supported. */ public @Nullable JavaClass searchJavaClassByRawName(String rawName) { return getDecompiler().getClasses().stream() .filter(cls -> cls.getRawName().equals(rawName)) .findFirst() .orElse(null); } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/JadxWrapper.java
938
package jadx.gui.treemodel; import java.util.Comparator; import java.util.List; import java.util.Set; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JPopupMenu; import org.fife.ui.rsyntaxtextarea.SyntaxConstants; import org.jetbrains.annotations.NotNull; import jadx.api.JavaField; import jadx.api.JavaNode; import jadx.api.data.ICodeRename; import jadx.api.data.impl.JadxCodeRename; import jadx.api.data.impl.JadxNodeRef; import jadx.api.metadata.ICodeNodeRef; import jadx.core.deobf.NameMapper; import jadx.core.dex.attributes.AFlag; import jadx.core.dex.info.AccessInfo; import jadx.gui.ui.MainWindow; import jadx.gui.ui.dialog.RenameDialog; import jadx.gui.utils.Icons; import jadx.gui.utils.UiUtils; public class JField extends JNode implements JRenameNode { private static final long serialVersionUID = 1712572192106793359L; private static final ImageIcon ICON_FLD_PRI = UiUtils.openSvgIcon("nodes/privateField"); private static final ImageIcon ICON_FLD_PRO = UiUtils.openSvgIcon("nodes/protectedField"); private static final ImageIcon ICON_FLD_PUB = UiUtils.openSvgIcon("nodes/publicField"); private final transient JavaField field; private final transient JClass jParent; public JField(JavaField javaField, JClass jClass) { this.field = javaField; this.jParent = jClass; } public JavaField getJavaField() { return (JavaField) getJavaNode(); } @Override public JavaNode getJavaNode() { return field; } @Override public ICodeNodeRef getCodeNodeRef() { return field.getFieldNode(); } @Override public JClass getJParent() { return jParent; } @Override public JClass getRootClass() { return jParent.getRootClass(); } @Override public boolean canRename() { return !field.getFieldNode().contains(AFlag.DONT_RENAME); } @Override public JPopupMenu onTreePopupMenu(MainWindow mainWindow) { return RenameDialog.buildRenamePopup(mainWindow, this); } @Override public String getTitle() { return makeLongStringHtml(); } @Override public ICodeRename buildCodeRename(String newName, Set<ICodeRename> renames) { return new JadxCodeRename(JadxNodeRef.forFld(field), newName); } @Override public boolean isValidName(String newName) { return NameMapper.isValidIdentifier(newName); } @Override public void removeAlias() { field.removeAlias(); } @Override public void addUpdateNodes(List<JavaNode> toUpdate) { toUpdate.add(field); toUpdate.addAll(field.getUseIn()); } @Override public void reload(MainWindow mainWindow) { mainWindow.reloadTree(); } @Override public Icon getIcon() { AccessInfo af = field.getAccessFlags(); return UiUtils.makeIcon(af, ICON_FLD_PUB, ICON_FLD_PRI, ICON_FLD_PRO, Icons.FIELD); } @Override public String getSyntaxName() { return SyntaxConstants.SYNTAX_STYLE_JAVA; } @Override public String makeString() { return UiUtils.typeFormat(field.getName(), field.getType()); } @Override public String makeStringHtml() { return UiUtils.typeFormatHtml(field.getName(), field.getType()); } @Override public String makeLongString() { return UiUtils.typeFormat(field.getFullName(), field.getType()); } @Override public String makeLongStringHtml() { return UiUtils.typeFormatHtml(field.getFullName(), field.getType()); } @Override public String getTooltip() { String fullType = UiUtils.escapeHtml(field.getType().toString()); return UiUtils.wrapHtml(fullType + ' ' + UiUtils.escapeHtml(field.getName())); } @Override public String makeDescString() { return UiUtils.typeStr(field.getType()) + " " + field.getName(); } @Override public boolean disableHtml() { return false; } @Override public boolean hasDescString() { return false; } @Override public int hashCode() { return field.hashCode(); } @Override public boolean equals(Object o) { return this == o || o instanceof JField && field.equals(((JField) o).field); } private static final Comparator<JField> COMPARATOR = Comparator .comparing(JField::getJParent) .thenComparing(JNode::getName) .thenComparingInt(JField::getPos); public int compareToFld(@NotNull JField other) { return COMPARATOR.compare(this, other); } @Override public int compareTo(@NotNull JNode other) { if (other instanceof JField) { return compareToFld(((JField) other)); } return super.compareTo(other); } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JField.java
939
package jadx.gui.treemodel; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JPopupMenu; import org.fife.ui.rsyntaxtextarea.SyntaxConstants; import org.jetbrains.annotations.NotNull; import jadx.api.JavaMethod; import jadx.api.JavaNode; import jadx.api.data.ICodeRename; import jadx.api.data.impl.JadxCodeRename; import jadx.api.data.impl.JadxNodeRef; import jadx.api.metadata.ICodeNodeRef; import jadx.core.deobf.NameMapper; import jadx.core.dex.attributes.AFlag; import jadx.core.dex.info.AccessInfo; import jadx.core.dex.instructions.args.ArgType; import jadx.gui.ui.MainWindow; import jadx.gui.ui.dialog.RenameDialog; import jadx.gui.utils.Icons; import jadx.gui.utils.OverlayIcon; import jadx.gui.utils.UiUtils; public class JMethod extends JNode implements JRenameNode { private static final long serialVersionUID = 3834526867464663751L; private static final ImageIcon ICON_METHOD_ABSTRACT = UiUtils.openSvgIcon("nodes/abstractMethod"); private static final ImageIcon ICON_METHOD_PRIVATE = UiUtils.openSvgIcon("nodes/privateMethod"); private static final ImageIcon ICON_METHOD_PROTECTED = UiUtils.openSvgIcon("nodes/protectedMethod"); private static final ImageIcon ICON_METHOD_PUBLIC = UiUtils.openSvgIcon("nodes/publicMethod"); private static final ImageIcon ICON_METHOD_CONSTRUCTOR = UiUtils.openSvgIcon("nodes/constructorMethod"); private static final ImageIcon ICON_METHOD_SYNC = UiUtils.openSvgIcon("nodes/methodReference"); private final transient JavaMethod mth; private final transient JClass jParent; public JMethod(JavaMethod javaMethod, JClass jClass) { this.mth = javaMethod; this.jParent = jClass; } @Override public JavaNode getJavaNode() { return mth; } public JavaMethod getJavaMethod() { return mth; } @Override public ICodeNodeRef getCodeNodeRef() { return mth.getMethodNode(); } @Override public JClass getJParent() { return jParent; } public ArgType getReturnType() { return mth.getReturnType(); } @Override public JClass getRootClass() { return jParent.getRootClass(); } @Override public Icon getIcon() { AccessInfo accessFlags = mth.getAccessFlags(); Icon icon = Icons.METHOD; if (accessFlags.isAbstract()) { icon = ICON_METHOD_ABSTRACT; } if (accessFlags.isConstructor()) { icon = ICON_METHOD_CONSTRUCTOR; } if (accessFlags.isPublic()) { icon = ICON_METHOD_PUBLIC; } if (accessFlags.isPrivate()) { icon = ICON_METHOD_PRIVATE; } if (accessFlags.isProtected()) { icon = ICON_METHOD_PROTECTED; } if (accessFlags.isSynchronized()) { icon = ICON_METHOD_SYNC; } OverlayIcon overIcon = new OverlayIcon(icon); if (accessFlags.isFinal()) { overIcon.add(Icons.FINAL); } if (accessFlags.isStatic()) { overIcon.add(Icons.STATIC); } return overIcon; } @Override public String getSyntaxName() { return SyntaxConstants.SYNTAX_STYLE_JAVA; } @Override public JPopupMenu onTreePopupMenu(MainWindow mainWindow) { return RenameDialog.buildRenamePopup(mainWindow, this); } String makeBaseString() { if (mth.isClassInit()) { return "{...}"; } StringBuilder base = new StringBuilder(); if (mth.isConstructor()) { base.append(mth.getDeclaringClass().getName()); } else { base.append(mth.getName()); } base.append('('); for (Iterator<ArgType> it = mth.getArguments().iterator(); it.hasNext();) { base.append(UiUtils.typeStr(it.next())); if (it.hasNext()) { base.append(", "); } } base.append(')'); return base.toString(); } @Override public String getName() { return mth.getName(); } @Override public String getTitle() { return makeLongStringHtml(); } @Override public boolean canRename() { if (mth.isClassInit()) { return false; } return !mth.getMethodNode().contains(AFlag.DONT_RENAME); } @Override public JRenameNode replace() { if (mth.isConstructor()) { // rename class instead constructor return jParent; } return this; } @Override public ICodeRename buildCodeRename(String newName, Set<ICodeRename> renames) { List<JavaMethod> relatedMethods = mth.getOverrideRelatedMethods(); if (!relatedMethods.isEmpty()) { for (JavaMethod relatedMethod : relatedMethods) { renames.remove(new JadxCodeRename(JadxNodeRef.forMth(relatedMethod), "")); } } return new JadxCodeRename(JadxNodeRef.forMth(mth), newName); } @Override public boolean isValidName(String newName) { return NameMapper.isValidIdentifier(newName); } @Override public void removeAlias() { mth.removeAlias(); } @Override public void addUpdateNodes(List<JavaNode> toUpdate) { toUpdate.add(mth); toUpdate.addAll(mth.getUseIn()); List<JavaMethod> overrideRelatedMethods = mth.getOverrideRelatedMethods(); toUpdate.addAll(overrideRelatedMethods); for (JavaMethod ovrdMth : overrideRelatedMethods) { toUpdate.addAll(ovrdMth.getUseIn()); } } @Override public void reload(MainWindow mainWindow) { mainWindow.reloadTree(); } @Override public String makeString() { return UiUtils.typeFormat(makeBaseString(), getReturnType()); } @Override public String makeStringHtml() { return UiUtils.typeFormatHtml(makeBaseString(), getReturnType()); } @Override public String makeLongString() { String name = mth.getDeclaringClass().getFullName() + '.' + makeBaseString(); return UiUtils.typeFormat(name, getReturnType()); } @Override public String makeLongStringHtml() { String name = mth.getDeclaringClass().getFullName() + '.' + makeBaseString(); return UiUtils.typeFormatHtml(name, getReturnType()); } @Override public boolean disableHtml() { return false; } @Override public String makeDescString() { return UiUtils.typeStr(getReturnType()) + " " + makeBaseString(); } @Override public boolean hasDescString() { return false; } @Override public int getPos() { return mth.getDefPos(); } @Override public int hashCode() { return mth.hashCode(); } @Override public boolean equals(Object o) { return this == o || o instanceof JMethod && mth.equals(((JMethod) o).mth); } private static final Comparator<JMethod> COMPARATOR = Comparator .comparing(JMethod::getJParent) .thenComparing(jMethod -> jMethod.mth.getMethodNode().getMethodInfo().getShortId()) .thenComparingInt(JMethod::getPos); public int compareToMth(@NotNull JMethod other) { return COMPARATOR.compare(this, other); } @Override public int compareTo(@NotNull JNode other) { if (other instanceof JMethod) { return compareToMth(((JMethod) other)); } if (other instanceof JClass) { JClass cls = (JClass) other; int cmp = jParent.compareToCls(cls); if (cmp != 0) { return cmp; } return 1; } return super.compareTo(other); } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JMethod.java
940
package jadx.gui.treemodel; import java.util.List; import javax.swing.Icon; import javax.swing.JPopupMenu; import jadx.api.JavaNode; import jadx.api.JavaPackage; import jadx.gui.ui.MainWindow; import jadx.gui.ui.popupmenu.JPackagePopupMenu; import jadx.gui.utils.Icons; import static jadx.gui.utils.UiUtils.escapeHtml; import static jadx.gui.utils.UiUtils.fadeHtml; import static jadx.gui.utils.UiUtils.wrapHtml; public class JPackage extends JNode { private static final long serialVersionUID = -4120718634156839804L; public static final String PACKAGE_DEFAULT_HTML_STR = wrapHtml(fadeHtml(escapeHtml("<empty>"))); private final JavaPackage pkg; private final boolean enabled; private final List<JClass> classes; private final List<JPackage> subPackages; /** * Package created by full package alias, don't have a raw package reference. * `pkg` field point to the closest raw package leaf. */ private final boolean synthetic; private String name; public JPackage(JavaPackage pkg, boolean enabled, List<JClass> classes, List<JPackage> subPackages, boolean synthetic) { this.pkg = pkg; this.enabled = enabled; this.classes = classes; this.subPackages = subPackages; this.synthetic = synthetic; } public void update() { removeAllChildren(); if (isEnabled()) { for (JPackage pkg : subPackages) { pkg.update(); add(pkg); } for (JClass cls : classes) { cls.update(); add(cls); } } } @Override public JPopupMenu onTreePopupMenu(MainWindow mainWindow) { return new JPackagePopupMenu(mainWindow, this); } public JavaPackage getPkg() { return pkg; } public JavaNode getJavaNode() { return pkg; } @Override public String getName() { return name; } public void setName(String name) { this.name = name; } public List<JPackage> getSubPackages() { return subPackages; } public List<JClass> getClasses() { return classes; } public boolean isEnabled() { return enabled; } public boolean isSynthetic() { return synthetic; } @Override public Icon getIcon() { return Icons.PACKAGE; } @Override public JClass getJParent() { return null; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } return pkg.equals(((JPackage) o).pkg); } @Override public int hashCode() { return pkg.hashCode(); } @Override public String makeString() { return name; } @Override public String makeStringHtml() { if (name.isEmpty()) { return PACKAGE_DEFAULT_HTML_STR; } return name; } @Override public boolean disableHtml() { if (name.isEmpty()) { // show PACKAGE_DEFAULT_HTML_STR for empty package return false; } return true; } @Override public String makeLongString() { return pkg.getFullName(); } @Override public String toString() { return name; } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JPackage.java
942
package jadx.gui.treemodel; import java.util.List; import java.util.Set; import javax.swing.Icon; import jadx.api.JavaNode; import jadx.api.JavaVariable; import jadx.api.data.ICodeRename; import jadx.api.data.impl.JadxCodeRef; import jadx.api.data.impl.JadxCodeRename; import jadx.api.data.impl.JadxNodeRef; import jadx.api.metadata.ICodeNodeRef; import jadx.core.deobf.NameMapper; import jadx.gui.ui.MainWindow; import jadx.gui.utils.UiUtils; public class JVariable extends JNode implements JRenameNode { private static final long serialVersionUID = -3002100457834453783L; private final JMethod jMth; private final JavaVariable var; public JVariable(JMethod jMth, JavaVariable var) { this.jMth = jMth; this.var = var; } public JavaVariable getJavaVarNode() { return var; } @Override public JavaNode getJavaNode() { return var; } @Override public JClass getRootClass() { return jMth.getRootClass(); } @Override public ICodeNodeRef getCodeNodeRef() { return var.getVarNode(); } @Override public JClass getJParent() { return jMth.getJParent(); } @Override public int getPos() { return var.getDefPos(); } @Override public Icon getIcon() { return null; } @Override public String makeString() { return var.getName(); } @Override public String makeLongString() { return var.getFullName(); } @Override public String makeLongStringHtml() { return UiUtils.typeFormatHtml(var.getName(), var.getType()); } @Override public boolean disableHtml() { return false; } @Override public String getTooltip() { String name = var.getName() + " (r" + var.getReg() + "v" + var.getSsa() + ")"; String fullType = UiUtils.escapeHtml(var.getType().toString()); return UiUtils.wrapHtml(fullType + ' ' + UiUtils.escapeHtml(name)); } @Override public boolean canRename() { return var.getName() != null; } @Override public String getTitle() { return makeLongStringHtml(); } @Override public boolean isValidName(String newName) { return NameMapper.isValidIdentifier(newName); } @Override public ICodeRename buildCodeRename(String newName, Set<ICodeRename> renames) { return new JadxCodeRename(JadxNodeRef.forMth(var.getMth()), JadxCodeRef.forVar(var), newName); } @Override public void removeAlias() { var.removeAlias(); } @Override public void addUpdateNodes(List<JavaNode> toUpdate) { toUpdate.add(var.getMth()); } @Override public void reload(MainWindow mainWindow) { } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JVariable.java
943
package jadx.gui.treemodel; import java.util.List; import java.util.Set; import javax.swing.Icon; import jadx.api.JavaNode; import jadx.api.data.ICodeRename; import jadx.gui.ui.MainWindow; public interface JRenameNode { JavaNode getJavaNode(); String getTitle(); String getName(); Icon getIcon(); boolean canRename(); default JRenameNode replace() { return this; } ICodeRename buildCodeRename(String newName, Set<ICodeRename> renames); boolean isValidName(String newName); void removeAlias(); void addUpdateNodes(List<JavaNode> toUpdate); void reload(MainWindow mainWindow); }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JRenameNode.java
944
package jadx.gui.ui.codearea; import java.awt.Point; import java.awt.event.InputEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Objects; import javax.swing.event.PopupMenuEvent; import org.fife.ui.rsyntaxtextarea.RSyntaxDocument; import org.fife.ui.rsyntaxtextarea.Token; import org.fife.ui.rsyntaxtextarea.TokenTypes; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jadx.api.ICodeInfo; import jadx.api.JavaClass; import jadx.api.JavaNode; import jadx.api.metadata.ICodeAnnotation; import jadx.gui.JadxWrapper; import jadx.gui.settings.JadxProject; import jadx.gui.treemodel.JClass; import jadx.gui.treemodel.JNode; import jadx.gui.treemodel.JResource; import jadx.gui.ui.MainWindow; import jadx.gui.ui.panel.ContentPanel; import jadx.gui.utils.CaretPositionFix; import jadx.gui.utils.DefaultPopupMenuListener; import jadx.gui.utils.JNodeCache; import jadx.gui.utils.JumpPosition; import jadx.gui.utils.UiUtils; import jadx.gui.utils.shortcut.ShortcutsController; /** * The {@link AbstractCodeArea} implementation used for displaying Java code and text based * resources (e.g. AndroidManifest.xml) */ public final class CodeArea extends AbstractCodeArea { private static final Logger LOG = LoggerFactory.getLogger(CodeArea.class); private static final long serialVersionUID = 6312736869579635796L; private @Nullable ICodeInfo cachedCodeInfo; private final ShortcutsController shortcutsController; CodeArea(ContentPanel contentPanel, JNode node) { super(contentPanel, node); this.shortcutsController = getMainWindow().getShortcutsController(); setSyntaxEditingStyle(node.getSyntaxName()); boolean isJavaCode = node instanceof JClass; if (isJavaCode) { ((RSyntaxDocument) getDocument()).setSyntaxStyle(new JadxTokenMaker(this)); addMenuItems(); } if (node instanceof JResource && node.makeString().endsWith(".json")) { addMenuForJsonFile(); } setHyperlinksEnabled(true); setCodeFoldingEnabled(true); setLinkScanningMask(InputEvent.CTRL_DOWN_MASK); CodeLinkGenerator codeLinkGenerator = new CodeLinkGenerator(this); setLinkGenerator(codeLinkGenerator); addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (e.isControlDown() || jumpOnDoubleClick(e)) { navToDecl(e.getPoint()); } } }); if (isJavaCode) { addMouseMotionListener(new MouseHoverHighlighter(this, codeLinkGenerator)); } } private boolean jumpOnDoubleClick(MouseEvent e) { return e.getClickCount() == 2 && getMainWindow().getSettings().isJumpOnDoubleClick(); } private void navToDecl(Point point) { int offs = viewToModel2D(point); JNode node = getJNodeAtOffset(adjustOffsetForWordToken(offs)); if (node != null) { contentPanel.getTabbedPane().codeJump(node); } } @Override public ICodeInfo getCodeInfo() { if (cachedCodeInfo == null) { if (isDisposed()) { LOG.debug("CodeArea used after dispose!"); return ICodeInfo.EMPTY; } cachedCodeInfo = Objects.requireNonNull(node.getCodeInfo()); } return cachedCodeInfo; } @Override public void load() { if (getText().isEmpty()) { setText(getCodeInfo().getCodeStr()); setCaretPosition(0); setLoaded(); } } @Override public void refresh() { cachedCodeInfo = null; setText(getCodeInfo().getCodeStr()); } private void addMenuItems() { ShortcutsController shortcutsController = getMainWindow().getShortcutsController(); JNodePopupBuilder popup = new JNodePopupBuilder(this, getPopupMenu(), shortcutsController); popup.addSeparator(); popup.add(new FindUsageAction(this)); popup.add(new GoToDeclarationAction(this)); popup.add(new CommentAction(this)); popup.add(new CommentSearchAction(this)); popup.add(new RenameAction(this)); popup.addSeparator(); popup.add(new FridaAction(this)); popup.add(new XposedAction(this)); getMainWindow().getWrapper().getGuiPluginsContext().appendPopupMenus(this, popup); // move caret on mouse right button click popup.getMenu().addPopupMenuListener(new DefaultPopupMenuListener() { @Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) { CodeArea codeArea = CodeArea.this; if (codeArea.getSelectedText() == null) { int offset = UiUtils.getOffsetAtMousePosition(codeArea); if (offset >= 0) { codeArea.setCaretPosition(offset); } } } }); } private void addMenuForJsonFile() { ShortcutsController shortcutsController = getMainWindow().getShortcutsController(); JNodePopupBuilder popup = new JNodePopupBuilder(this, getPopupMenu(), shortcutsController); popup.addSeparator(); popup.add(new JsonPrettifyAction(this)); } /** * Search start of word token at specified offset * * @return -1 if no word token found */ public int adjustOffsetForWordToken(int offset) { Token token = getWordTokenAtOffset(offset); if (token == null) { return -1; } int type = token.getType(); if (node instanceof JClass) { if (type == TokenTypes.IDENTIFIER || type == TokenTypes.FUNCTION) { return token.getOffset(); } if (type == TokenTypes.ANNOTATION && token.length() > 1) { return token.getOffset() + 1; } } else if (type == TokenTypes.MARKUP_TAG_ATTRIBUTE_VALUE) { return token.getOffset() + 1; // skip quote at start (") } return -1; } /** * Search node by offset in {@code jCls} code and return its definition position * (useful for jumps from usage) */ @Nullable public JumpPosition getDefPosForNodeAtOffset(int offset) { if (offset == -1) { return null; } JavaNode foundNode = getJavaNodeAtOffset(offset); if (foundNode == null) { return null; } if (foundNode == node.getJavaNode()) { // current node return new JumpPosition(node); } JNode jNode = convertJavaNode(foundNode); return new JumpPosition(jNode); } private JNode convertJavaNode(JavaNode javaNode) { JNodeCache nodeCache = getMainWindow().getCacheObject().getNodeCache(); return nodeCache.makeFrom(javaNode); } @Nullable public JNode getNodeUnderCaret() { int caretPos = getCaretPosition(); return getJNodeAtOffset(adjustOffsetForWordToken(caretPos)); } @Nullable public JNode getEnclosingNodeUnderCaret() { int caretPos = getCaretPosition(); int start = adjustOffsetForWordToken(caretPos); if (start == -1) { start = caretPos; } return getEnclosingJNodeAtOffset(start); } @Nullable public JNode getNodeUnderMouse() { Point pos = UiUtils.getMousePosition(this); return getJNodeAtOffset(adjustOffsetForWordToken(viewToModel2D(pos))); } @Nullable public JNode getEnclosingNodeUnderMouse() { Point pos = UiUtils.getMousePosition(this); return getEnclosingJNodeAtOffset(adjustOffsetForWordToken(viewToModel2D(pos))); } @Nullable public JNode getEnclosingJNodeAtOffset(int offset) { JavaNode javaNode = getEnclosingJavaNode(offset); if (javaNode != null) { return convertJavaNode(javaNode); } return null; } @Nullable public JNode getJNodeAtOffset(int offset) { JavaNode javaNode = getJavaNodeAtOffset(offset); if (javaNode != null) { return convertJavaNode(javaNode); } return null; } /** * Search referenced java node by offset in {@code jCls} code */ public JavaNode getJavaNodeAtOffset(int offset) { if (offset == -1) { return null; } try { return getJadxWrapper().getDecompiler().getJavaNodeAtPosition(getCodeInfo(), offset); } catch (Exception e) { LOG.error("Can't get java node by offset: {}", offset, e); } return null; } public JavaNode getClosestJavaNode(int offset) { if (offset == -1) { return null; } try { return getJadxWrapper().getDecompiler().getClosestJavaNode(getCodeInfo(), offset); } catch (Exception e) { LOG.error("Can't get java node by offset: {}", offset, e); return null; } } public JavaNode getEnclosingJavaNode(int offset) { if (offset == -1) { return null; } try { return getJadxWrapper().getDecompiler().getEnclosingNode(getCodeInfo(), offset); } catch (Exception e) { LOG.error("Can't get java node by offset: {}", offset, e); return null; } } public JavaClass getJavaClassIfAtPos(int pos) { try { ICodeInfo codeInfo = getCodeInfo(); if (codeInfo.hasMetadata()) { ICodeAnnotation ann = codeInfo.getCodeMetadata().getAt(pos); if (ann != null && ann.getAnnType() == ICodeAnnotation.AnnType.CLASS) { return (JavaClass) getJadxWrapper().getDecompiler().getJavaNodeByCodeAnnotation(codeInfo, ann); } } } catch (Exception e) { LOG.error("Can't get java node by offset: {}", pos, e); } return null; } public void refreshClass() { if (node instanceof JClass) { JClass cls = node.getRootClass(); try { CaretPositionFix caretFix = new CaretPositionFix(this); caretFix.save(); cachedCodeInfo = cls.reload(getMainWindow().getCacheObject()); ClassCodeContentPanel codeContentPanel = (ClassCodeContentPanel) this.contentPanel; codeContentPanel.getTabbedPane().refresh(cls); codeContentPanel.getJavaCodePanel().refresh(caretFix); } catch (Exception e) { LOG.error("Failed to reload class: {}", cls.getFullName(), e); } } } public MainWindow getMainWindow() { return contentPanel.getTabbedPane().getMainWindow(); } public JadxWrapper getJadxWrapper() { return getMainWindow().getWrapper(); } public JadxProject getProject() { return getMainWindow().getProject(); } @Override public void dispose() { shortcutsController.unbindActionsForComponent(this); super.dispose(); cachedCodeInfo = null; } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/ui/codearea/CodeArea.java
945
/* * Copyright 2012-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.build.bom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.function.Function; import org.apache.maven.artifact.versioning.DefaultArtifactVersion; import org.apache.maven.artifact.versioning.VersionRange; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.result.DependencyResult; import org.springframework.boot.build.bom.bomr.version.DependencyVersion; /** * A collection of modules, Maven plugins, and Maven boms that are versioned and released * together. * * @author Andy Wilkinson */ public class Library { private final String name; private final String calendarName; private final LibraryVersion version; private final List<Group> groups; private final String versionProperty; private final List<ProhibitedVersion> prohibitedVersions; private final boolean considerSnapshots; private final VersionAlignment versionAlignment; private final String alignsWithBom; private final String linkRootName; private final Map<String, Function<LibraryVersion, String>> links; /** * Create a new {@code Library} with the given {@code name}, {@code version}, and * {@code groups}. * @param name name of the library * @param calendarName name of the library as it appears in the Spring Calendar. May * be {@code null} in which case the {@code name} is used. * @param version version of the library * @param groups groups in the library * @param prohibitedVersions version of the library that are prohibited * @param considerSnapshots whether to consider snapshots * @param versionAlignment version alignment, if any, for the library * @param alignsWithBom the coordinates of the bom, if any, that this library should * align with * @param linkRootName the root name to use when generating link variable or * {@code null} to generate one based on the library {@code name} * @param links a list of HTTP links relevant to the library */ public Library(String name, String calendarName, LibraryVersion version, List<Group> groups, List<ProhibitedVersion> prohibitedVersions, boolean considerSnapshots, VersionAlignment versionAlignment, String alignsWithBom, String linkRootName, Map<String, Function<LibraryVersion, String>> links) { this.name = name; this.calendarName = (calendarName != null) ? calendarName : name; this.version = version; this.groups = groups; this.versionProperty = "Spring Boot".equals(name) ? null : name.toLowerCase(Locale.ENGLISH).replace(' ', '-') + ".version"; this.prohibitedVersions = prohibitedVersions; this.considerSnapshots = considerSnapshots; this.versionAlignment = versionAlignment; this.alignsWithBom = alignsWithBom; this.linkRootName = (linkRootName != null) ? linkRootName : generateLinkRootName(name); this.links = Collections.unmodifiableMap(links); } private static String generateLinkRootName(String name) { return name.replace("-", "").replace(" ", "-").toLowerCase(); } public String getName() { return this.name; } public String getCalendarName() { return this.calendarName; } public LibraryVersion getVersion() { return this.version; } public List<Group> getGroups() { return this.groups; } public String getVersionProperty() { return this.versionProperty; } public List<ProhibitedVersion> getProhibitedVersions() { return this.prohibitedVersions; } public boolean isConsiderSnapshots() { return this.considerSnapshots; } public VersionAlignment getVersionAlignment() { return this.versionAlignment; } public String getLinkRootName() { return this.linkRootName; } public String getAlignsWithBom() { return this.alignsWithBom; } public Map<String, String> getLinks() { Map<String, String> links = new TreeMap<>(); this.links.forEach((name, linkFactory) -> links.put(name, linkFactory.apply(this.version))); return Collections.unmodifiableMap(links); } /** * A version or range of versions that are prohibited from being used in a bom. */ public static class ProhibitedVersion { private final VersionRange range; private final List<String> startsWith; private final List<String> endsWith; private final List<String> contains; private final String reason; public ProhibitedVersion(VersionRange range, List<String> startsWith, List<String> endsWith, List<String> contains, String reason) { this.range = range; this.startsWith = startsWith; this.endsWith = endsWith; this.contains = contains; this.reason = reason; } public VersionRange getRange() { return this.range; } public List<String> getStartsWith() { return this.startsWith; } public List<String> getEndsWith() { return this.endsWith; } public List<String> getContains() { return this.contains; } public String getReason() { return this.reason; } public boolean isProhibited(String candidate) { boolean result = false; result = result || (this.range != null && this.range.containsVersion(new DefaultArtifactVersion(candidate))); result = result || this.startsWith.stream().anyMatch(candidate::startsWith); result = result || this.endsWith.stream().anyMatch(candidate::endsWith); result = result || this.contains.stream().anyMatch(candidate::contains); return result; } } public static class LibraryVersion { private final DependencyVersion version; public LibraryVersion(DependencyVersion version) { this.version = version; } public DependencyVersion getVersion() { return this.version; } public int[] componentInts() { return Arrays.stream(parts()).mapToInt(Integer::parseInt).toArray(); } public String major() { return parts()[0]; } public String minor() { return parts()[1]; } public String patch() { return parts()[2]; } @Override public String toString() { return this.version.toString(); } public String toString(String separator) { return this.version.toString().replace(".", separator); } public String forAntora() { String[] parts = parts(); String result = parts[0] + "." + parts[1]; if (toString().endsWith("SNAPSHOT")) { result += "-SNAPSHOT"; } return result; } private String[] parts() { return toString().split("[.-]"); } } /** * A collection of modules, Maven plugins, and Maven boms with the same group ID. */ public static class Group { private final String id; private final List<Module> modules; private final List<String> plugins; private final List<String> boms; public Group(String id, List<Module> modules, List<String> plugins, List<String> boms) { this.id = id; this.modules = modules; this.plugins = plugins; this.boms = boms; } public String getId() { return this.id; } public List<Module> getModules() { return this.modules; } public List<String> getPlugins() { return this.plugins; } public List<String> getBoms() { return this.boms; } } /** * A module in a group. */ public static class Module { private final String name; private final String type; private final String classifier; private final List<Exclusion> exclusions; public Module(String name) { this(name, Collections.emptyList()); } public Module(String name, String type) { this(name, type, null, Collections.emptyList()); } public Module(String name, List<Exclusion> exclusions) { this(name, null, null, exclusions); } public Module(String name, String type, String classifier, List<Exclusion> exclusions) { this.name = name; this.type = type; this.classifier = (classifier != null) ? classifier : ""; this.exclusions = exclusions; } public String getName() { return this.name; } public String getClassifier() { return this.classifier; } public String getType() { return this.type; } public List<Exclusion> getExclusions() { return this.exclusions; } } /** * An exclusion of a dependency identified by its group ID and artifact ID. */ public static class Exclusion { private final String groupId; private final String artifactId; public Exclusion(String groupId, String artifactId) { this.groupId = groupId; this.artifactId = artifactId; } public String getGroupId() { return this.groupId; } public String getArtifactId() { return this.artifactId; } } /** * Version alignment for a library. */ public static class VersionAlignment { private final String from; private final String managedBy; private final Project project; private final List<Library> libraries; private final List<Group> groups; private Set<String> alignedVersions; VersionAlignment(String from, String managedBy, Project project, List<Library> libraries, List<Group> groups) { this.from = from; this.managedBy = managedBy; this.project = project; this.libraries = libraries; this.groups = groups; } public Set<String> resolve() { if (this.managedBy == null) { throw new IllegalStateException("Version alignment without managedBy is not supported"); } if (this.alignedVersions != null) { return this.alignedVersions; } Library managingLibrary = this.libraries.stream() .filter((candidate) -> this.managedBy.equals(candidate.getName())) .findFirst() .orElseThrow(() -> new IllegalStateException("Managing library '" + this.managedBy + "' not found.")); Map<String, String> versions = resolveAligningDependencies(managingLibrary); Set<String> versionsInLibrary = new HashSet<>(); for (Group group : this.groups) { for (Module module : group.getModules()) { String version = versions.get(group.getId() + ":" + module.getName()); if (version != null) { versionsInLibrary.add(version); } } for (String plugin : group.getPlugins()) { String version = versions.get(group.getId() + ":" + plugin); if (version != null) { versionsInLibrary.add(version); } } } this.alignedVersions = versionsInLibrary; return this.alignedVersions; } private Map<String, String> resolveAligningDependencies(Library manager) { DependencyHandler dependencyHandler = this.project.getDependencies(); List<Dependency> boms = manager.getGroups() .stream() .flatMap((group) -> group.getBoms() .stream() .map((bom) -> dependencyHandler .platform(group.getId() + ":" + bom + ":" + manager.getVersion().getVersion()))) .toList(); List<Dependency> dependencies = new ArrayList<>(); dependencies.addAll(boms); dependencies.add(dependencyHandler.create(this.from)); Configuration alignmentConfiguration = this.project.getConfigurations() .detachedConfiguration(dependencies.toArray(new Dependency[0])); Map<String, String> versions = new HashMap<>(); for (DependencyResult dependency : alignmentConfiguration.getIncoming() .getResolutionResult() .getAllDependencies()) { versions.put(dependency.getFrom().getModuleVersion().getModule().toString(), dependency.getFrom().getModuleVersion().getVersion()); } return versions; } String getFrom() { return this.from; } String getManagedBy() { return this.managedBy; } @Override public String toString() { return "version from dependencies of " + this.from + " that is managed by " + this.managedBy; } } }
spring-projects/spring-boot
buildSrc/src/main/java/org/springframework/boot/build/bom/Library.java
946
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.combinator; import java.util.ArrayList; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Complex finders consisting of simple finder. */ public class Finders { private Finders() { } /** * Finder to find a complex query. * @param query to find * @param orQuery alternative to find * @param notQuery exclude from search * @return new finder */ public static Finder advancedFinder(String query, String orQuery, String notQuery) { return Finder.contains(query) .or(Finder.contains(orQuery)) .not(Finder.contains(notQuery)); } /** * Filtered finder looking a query with excluded queries as well. * @param query to find * @param excludeQueries to exclude * @return new finder */ public static Finder filteredFinder(String query, String... excludeQueries) { var finder = Finder.contains(query); for (String q : excludeQueries) { finder = finder.not(Finder.contains(q)); } return finder; } /** * Specialized query. Every next query is looked in previous result. * @param queries array with queries * @return new finder */ public static Finder specializedFinder(String... queries) { var finder = identMult(); for (String query : queries) { finder = finder.and(Finder.contains(query)); } return finder; } /** * Expanded query. Looking for alternatives. * @param queries array with queries. * @return new finder */ public static Finder expandedFinder(String... queries) { var finder = identSum(); for (String query : queries) { finder = finder.or(Finder.contains(query)); } return finder; } private static Finder identMult() { return txt -> Stream.of(txt.split("\n")).collect(Collectors.toList()); } private static Finder identSum() { return txt -> new ArrayList<>(); } }
smedals/java-design-patterns
combinator/src/main/java/com/iluwatar/combinator/Finders.java
947
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.cqrs.constants; /** * Class to define the constants. */ public class AppConstants { public static final String E_EVANS = "eEvans"; public static final String J_BLOCH = "jBloch"; public static final String M_FOWLER = "mFowler"; public static final String USER_NAME = "username"; }
smedals/java-design-patterns
cqrs/src/main/java/com/iluwatar/cqrs/constants/AppConstants.java
948
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Optional; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.sql.DataSource; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; /** * An implementation of {@link CustomerDao} that persists customers in RDBMS. */ @Slf4j @RequiredArgsConstructor public class DbCustomerDao implements CustomerDao { private final DataSource dataSource; /** * Get all customers as Java Stream. * * @return a lazily populated stream of customers. Note the stream returned must be closed to free * all the acquired resources. The stream keeps an open connection to the database till it is * complete or is closed manually. */ @Override public Stream<Customer> getAll() throws Exception { try { var connection = getConnection(); var statement = connection.prepareStatement("SELECT * FROM CUSTOMERS"); // NOSONAR var resultSet = statement.executeQuery(); // NOSONAR return StreamSupport.stream(new Spliterators.AbstractSpliterator<Customer>(Long.MAX_VALUE, Spliterator.ORDERED) { @Override public boolean tryAdvance(Consumer<? super Customer> action) { try { if (!resultSet.next()) { return false; } action.accept(createCustomer(resultSet)); return true; } catch (SQLException e) { throw new RuntimeException(e); // NOSONAR } } }, false).onClose(() -> mutedClose(connection, statement, resultSet)); } catch (SQLException e) { throw new CustomException(e.getMessage(), e); } } private Connection getConnection() throws SQLException { return dataSource.getConnection(); } private void mutedClose(Connection connection, PreparedStatement statement, ResultSet resultSet) { try { resultSet.close(); statement.close(); connection.close(); } catch (SQLException e) { LOGGER.info("Exception thrown " + e.getMessage()); } } private Customer createCustomer(ResultSet resultSet) throws SQLException { return new Customer(resultSet.getInt("ID"), resultSet.getString("FNAME"), resultSet.getString("LNAME")); } /** * {@inheritDoc} */ @Override public Optional<Customer> getById(int id) throws Exception { ResultSet resultSet = null; try (var connection = getConnection(); var statement = connection.prepareStatement("SELECT * FROM CUSTOMERS WHERE ID = ?")) { statement.setInt(1, id); resultSet = statement.executeQuery(); if (resultSet.next()) { return Optional.of(createCustomer(resultSet)); } else { return Optional.empty(); } } catch (SQLException ex) { throw new CustomException(ex.getMessage(), ex); } finally { if (resultSet != null) { resultSet.close(); } } } /** * {@inheritDoc} */ @Override public boolean add(Customer customer) throws Exception { if (getById(customer.getId()).isPresent()) { return false; } try (var connection = getConnection(); var statement = connection.prepareStatement("INSERT INTO CUSTOMERS VALUES (?,?,?)")) { statement.setInt(1, customer.getId()); statement.setString(2, customer.getFirstName()); statement.setString(3, customer.getLastName()); statement.execute(); return true; } catch (SQLException ex) { throw new CustomException(ex.getMessage(), ex); } } /** * {@inheritDoc} */ @Override public boolean update(Customer customer) throws Exception { try (var connection = getConnection(); var statement = connection .prepareStatement("UPDATE CUSTOMERS SET FNAME = ?, LNAME = ? WHERE ID = ?")) { statement.setString(1, customer.getFirstName()); statement.setString(2, customer.getLastName()); statement.setInt(3, customer.getId()); return statement.executeUpdate() > 0; } catch (SQLException ex) { throw new CustomException(ex.getMessage(), ex); } } /** * {@inheritDoc} */ @Override public boolean delete(Customer customer) throws Exception { try (var connection = getConnection(); var statement = connection.prepareStatement("DELETE FROM CUSTOMERS WHERE ID = ?")) { statement.setInt(1, customer.getId()); return statement.executeUpdate() > 0; } catch (SQLException ex) { throw new CustomException(ex.getMessage(), ex); } } }
smedals/java-design-patterns
dao/src/main/java/com/iluwatar/dao/DbCustomerDao.java
949
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). package org.rocksdb; /** * The logical mapping of tickers defined in rocksdb::Tickers. * * Java byte value mappings don't align 1:1 to the c++ values. c++ rocksdb::Tickers enumeration type * is uint32_t and java org.rocksdb.TickerType is byte, this causes mapping issues when * rocksdb::Tickers value is greater then 127 (0x7F) for jbyte jni interface as range greater is not * available. Without breaking interface in minor versions, value mappings for * org.rocksdb.TickerType leverage full byte range [-128 (-0x80), (0x7F)]. Newer tickers added * should descend into negative values until TICKER_ENUM_MAX reaches -128 (-0x80). */ public enum TickerType { /** * total block cache misses * * REQUIRES: BLOCK_CACHE_MISS == BLOCK_CACHE_INDEX_MISS + * BLOCK_CACHE_FILTER_MISS + * BLOCK_CACHE_DATA_MISS; */ BLOCK_CACHE_MISS((byte) 0x0), /** * total block cache hit * * REQUIRES: BLOCK_CACHE_HIT == BLOCK_CACHE_INDEX_HIT + * BLOCK_CACHE_FILTER_HIT + * BLOCK_CACHE_DATA_HIT; */ BLOCK_CACHE_HIT((byte) 0x1), BLOCK_CACHE_ADD((byte) 0x2), /** * # of failures when adding blocks to block cache. */ BLOCK_CACHE_ADD_FAILURES((byte) 0x3), /** * # of times cache miss when accessing index block from block cache. */ BLOCK_CACHE_INDEX_MISS((byte) 0x4), /** * # of times cache hit when accessing index block from block cache. */ BLOCK_CACHE_INDEX_HIT((byte) 0x5), /** * # of index blocks added to block cache. */ BLOCK_CACHE_INDEX_ADD((byte) 0x6), /** * # of bytes of index blocks inserted into cache */ BLOCK_CACHE_INDEX_BYTES_INSERT((byte) 0x7), /** * # of bytes of index block erased from cache */ BLOCK_CACHE_INDEX_BYTES_EVICT((byte) 0x8), /** * # of times cache miss when accessing filter block from block cache. */ BLOCK_CACHE_FILTER_MISS((byte) 0x9), /** * # of times cache hit when accessing filter block from block cache. */ BLOCK_CACHE_FILTER_HIT((byte) 0xA), /** * # of filter blocks added to block cache. */ BLOCK_CACHE_FILTER_ADD((byte) 0xB), /** * # of bytes of bloom filter blocks inserted into cache */ BLOCK_CACHE_FILTER_BYTES_INSERT((byte) 0xC), /** * # of bytes of bloom filter block erased from cache */ BLOCK_CACHE_FILTER_BYTES_EVICT((byte) 0xD), /** * # of times cache miss when accessing data block from block cache. */ BLOCK_CACHE_DATA_MISS((byte) 0xE), /** * # of times cache hit when accessing data block from block cache. */ BLOCK_CACHE_DATA_HIT((byte) 0xF), /** * # of data blocks added to block cache. */ BLOCK_CACHE_DATA_ADD((byte) 0x10), /** * # of bytes of data blocks inserted into cache */ BLOCK_CACHE_DATA_BYTES_INSERT((byte) 0x11), /** * # of bytes read from cache. */ BLOCK_CACHE_BYTES_READ((byte) 0x12), /** * # of bytes written into cache. */ BLOCK_CACHE_BYTES_WRITE((byte) 0x13), /** * # of times bloom filter has avoided file reads. */ BLOOM_FILTER_USEFUL((byte) 0x14), /** * # persistent cache hit */ PERSISTENT_CACHE_HIT((byte) 0x15), /** * # persistent cache miss */ PERSISTENT_CACHE_MISS((byte) 0x16), /** * # total simulation block cache hits */ SIM_BLOCK_CACHE_HIT((byte) 0x17), /** * # total simulation block cache misses */ SIM_BLOCK_CACHE_MISS((byte) 0x18), /** * # of memtable hits. */ MEMTABLE_HIT((byte) 0x19), /** * # of memtable misses. */ MEMTABLE_MISS((byte) 0x1A), /** * # of Get() queries served by L0 */ GET_HIT_L0((byte) 0x1B), /** * # of Get() queries served by L1 */ GET_HIT_L1((byte) 0x1C), /** * # of Get() queries served by L2 and up */ GET_HIT_L2_AND_UP((byte) 0x1D), /** * COMPACTION_KEY_DROP_* count the reasons for key drop during compaction * There are 4 reasons currently. */ /** * key was written with a newer value. */ COMPACTION_KEY_DROP_NEWER_ENTRY((byte) 0x1E), /** * Also includes keys dropped for range del. * The key is obsolete. */ COMPACTION_KEY_DROP_OBSOLETE((byte) 0x1F), /** * key was covered by a range tombstone. */ COMPACTION_KEY_DROP_RANGE_DEL((byte) 0x20), /** * User compaction function has dropped the key. */ COMPACTION_KEY_DROP_USER((byte) 0x21), /** * all keys in range were deleted. */ COMPACTION_RANGE_DEL_DROP_OBSOLETE((byte) 0x22), /** * Number of keys written to the database via the Put and Write call's. */ NUMBER_KEYS_WRITTEN((byte) 0x23), /** * Number of Keys read. */ NUMBER_KEYS_READ((byte) 0x24), /** * Number keys updated, if inplace update is enabled */ NUMBER_KEYS_UPDATED((byte) 0x25), /** * The number of uncompressed bytes issued by DB::Put(), DB::Delete(),\ * DB::Merge(), and DB::Write(). */ BYTES_WRITTEN((byte) 0x26), /** * The number of uncompressed bytes read from DB::Get(). It could be * either from memtables, cache, or table files. * * For the number of logical bytes read from DB::MultiGet(), * please use {@link #NUMBER_MULTIGET_BYTES_READ}. */ BYTES_READ((byte) 0x27), /** * The number of calls to seek. */ NUMBER_DB_SEEK((byte) 0x28), /** * The number of calls to next. */ NUMBER_DB_NEXT((byte) 0x29), /** * The number of calls to prev. */ NUMBER_DB_PREV((byte) 0x2A), /** * The number of calls to seek that returned data. */ NUMBER_DB_SEEK_FOUND((byte) 0x2B), /** * The number of calls to next that returned data. */ NUMBER_DB_NEXT_FOUND((byte) 0x2C), /** * The number of calls to prev that returned data. */ NUMBER_DB_PREV_FOUND((byte) 0x2D), /** * The number of uncompressed bytes read from an iterator. * Includes size of key and value. */ ITER_BYTES_READ((byte) 0x2E), NO_FILE_CLOSES((byte) 0x2F), NO_FILE_OPENS((byte) 0x30), NO_FILE_ERRORS((byte) 0x31), /** * Time system had to wait to do LO-L1 compactions. * * @deprecated */ @Deprecated STALL_L0_SLOWDOWN_MICROS((byte) 0x32), /** * Time system had to wait to move memtable to L1. * * @deprecated */ @Deprecated STALL_MEMTABLE_COMPACTION_MICROS((byte) 0x33), /** * write throttle because of too many files in L0. * * @deprecated */ @Deprecated STALL_L0_NUM_FILES_MICROS((byte) 0x34), /** * Writer has to wait for compaction or flush to finish. */ STALL_MICROS((byte) 0x35), /** * The wait time for db mutex. * * Disabled by default. To enable it set stats level to {@link StatsLevel#ALL} */ DB_MUTEX_WAIT_MICROS((byte) 0x36), RATE_LIMIT_DELAY_MILLIS((byte) 0x37), /** * Number of iterators created. * */ NO_ITERATORS((byte) 0x38), /** * Number of MultiGet calls. */ NUMBER_MULTIGET_CALLS((byte) 0x39), /** * Number of MultiGet keys read. */ NUMBER_MULTIGET_KEYS_READ((byte) 0x3A), /** * Number of MultiGet bytes read. */ NUMBER_MULTIGET_BYTES_READ((byte) 0x3B), /** * Number of deletes records that were not required to be * written to storage because key does not exist. */ NUMBER_FILTERED_DELETES((byte) 0x3C), NUMBER_MERGE_FAILURES((byte) 0x3D), /** * Number of times bloom was checked before creating iterator on a * file, and the number of times the check was useful in avoiding * iterator creation (and thus likely IOPs). */ BLOOM_FILTER_PREFIX_CHECKED((byte) 0x3E), BLOOM_FILTER_PREFIX_USEFUL((byte) 0x3F), /** * Number of times we had to reseek inside an iteration to skip * over large number of keys with same userkey. */ NUMBER_OF_RESEEKS_IN_ITERATION((byte) 0x40), /** * Record the number of calls to {@link RocksDB#getUpdatesSince(long)}. Useful to keep track of * transaction log iterator refreshes. */ GET_UPDATES_SINCE_CALLS((byte) 0x41), /** * Miss in the compressed block cache. */ BLOCK_CACHE_COMPRESSED_MISS((byte) 0x42), /** * Hit in the compressed block cache. */ BLOCK_CACHE_COMPRESSED_HIT((byte) 0x43), /** * Number of blocks added to compressed block cache. */ BLOCK_CACHE_COMPRESSED_ADD((byte) 0x44), /** * Number of failures when adding blocks to compressed block cache. */ BLOCK_CACHE_COMPRESSED_ADD_FAILURES((byte) 0x45), /** * Number of times WAL sync is done. */ WAL_FILE_SYNCED((byte) 0x46), /** * Number of bytes written to WAL. */ WAL_FILE_BYTES((byte) 0x47), /** * Writes can be processed by requesting thread or by the thread at the * head of the writers queue. */ WRITE_DONE_BY_SELF((byte) 0x48), /** * Equivalent to writes done for others. */ WRITE_DONE_BY_OTHER((byte) 0x49), /** * Number of writes ending up with timed-out. */ WRITE_TIMEDOUT((byte) 0x4A), /** * Number of Write calls that request WAL. */ WRITE_WITH_WAL((byte) 0x4B), /** * Bytes read during compaction. */ COMPACT_READ_BYTES((byte) 0x4C), /** * Bytes written during compaction. */ COMPACT_WRITE_BYTES((byte) 0x4D), /** * Bytes written during flush. */ FLUSH_WRITE_BYTES((byte) 0x4E), /** * Number of table's properties loaded directly from file, without creating * table reader object. */ NUMBER_DIRECT_LOAD_TABLE_PROPERTIES((byte) 0x4F), NUMBER_SUPERVERSION_ACQUIRES((byte) 0x50), NUMBER_SUPERVERSION_RELEASES((byte) 0x51), NUMBER_SUPERVERSION_CLEANUPS((byte) 0x52), /** * # of compressions/decompressions executed */ NUMBER_BLOCK_COMPRESSED((byte) 0x53), NUMBER_BLOCK_DECOMPRESSED((byte) 0x54), NUMBER_BLOCK_NOT_COMPRESSED((byte) 0x55), MERGE_OPERATION_TOTAL_TIME((byte) 0x56), FILTER_OPERATION_TOTAL_TIME((byte) 0x57), /** * Row cache. */ ROW_CACHE_HIT((byte) 0x58), ROW_CACHE_MISS((byte) 0x59), /** * Read amplification statistics. * * Read amplification can be calculated using this formula * (READ_AMP_TOTAL_READ_BYTES / READ_AMP_ESTIMATE_USEFUL_BYTES) * * REQUIRES: ReadOptions::read_amp_bytes_per_bit to be enabled */ /** * Estimate of total bytes actually used. */ READ_AMP_ESTIMATE_USEFUL_BYTES((byte) 0x5A), /** * Total size of loaded data blocks. */ READ_AMP_TOTAL_READ_BYTES((byte) 0x5B), /** * Number of refill intervals where rate limiter's bytes are fully consumed. */ NUMBER_RATE_LIMITER_DRAINS((byte) 0x5C), /** * Number of internal skipped during iteration */ NUMBER_ITER_SKIP((byte) 0x5D), /** * Number of MultiGet keys found (vs number requested) */ NUMBER_MULTIGET_KEYS_FOUND((byte) 0x5E), // -0x01 to fixate the new value that incorrectly changed TICKER_ENUM_MAX /** * Number of iterators created. */ NO_ITERATOR_CREATED((byte) -0x01), /** * Number of iterators deleted. */ NO_ITERATOR_DELETED((byte) 0x60), /** * Deletions obsoleted before bottom level due to file gap optimization. */ COMPACTION_OPTIMIZED_DEL_DROP_OBSOLETE((byte) 0x61), /** * If a compaction was cancelled in sfm to prevent ENOSPC */ COMPACTION_CANCELLED((byte) 0x62), /** * # of times bloom FullFilter has not avoided the reads. */ BLOOM_FILTER_FULL_POSITIVE((byte) 0x63), /** * # of times bloom FullFilter has not avoided the reads and data actually * exist. */ BLOOM_FILTER_FULL_TRUE_POSITIVE((byte) 0x64), /** * BlobDB specific stats * # of Put/PutTTL/PutUntil to BlobDB. */ BLOB_DB_NUM_PUT((byte) 0x65), /** * # of Write to BlobDB. */ BLOB_DB_NUM_WRITE((byte) 0x66), /** * # of Get to BlobDB. */ BLOB_DB_NUM_GET((byte) 0x67), /** * # of MultiGet to BlobDB. */ BLOB_DB_NUM_MULTIGET((byte) 0x68), /** * # of Seek/SeekToFirst/SeekToLast/SeekForPrev to BlobDB iterator. */ BLOB_DB_NUM_SEEK((byte) 0x69), /** * # of Next to BlobDB iterator. */ BLOB_DB_NUM_NEXT((byte) 0x6A), /** * # of Prev to BlobDB iterator. */ BLOB_DB_NUM_PREV((byte) 0x6B), /** * # of keys written to BlobDB. */ BLOB_DB_NUM_KEYS_WRITTEN((byte) 0x6C), /** * # of keys read from BlobDB. */ BLOB_DB_NUM_KEYS_READ((byte) 0x6D), /** * # of bytes (key + value) written to BlobDB. */ BLOB_DB_BYTES_WRITTEN((byte) 0x6E), /** * # of bytes (keys + value) read from BlobDB. */ BLOB_DB_BYTES_READ((byte) 0x6F), /** * # of keys written by BlobDB as non-TTL inlined value. */ BLOB_DB_WRITE_INLINED((byte) 0x70), /** * # of keys written by BlobDB as TTL inlined value. */ BLOB_DB_WRITE_INLINED_TTL((byte) 0x71), /** * # of keys written by BlobDB as non-TTL blob value. */ BLOB_DB_WRITE_BLOB((byte) 0x72), /** * # of keys written by BlobDB as TTL blob value. */ BLOB_DB_WRITE_BLOB_TTL((byte) 0x73), /** * # of bytes written to blob file. */ BLOB_DB_BLOB_FILE_BYTES_WRITTEN((byte) 0x74), /** * # of bytes read from blob file. */ BLOB_DB_BLOB_FILE_BYTES_READ((byte) 0x75), /** * # of times a blob files being synced. */ BLOB_DB_BLOB_FILE_SYNCED((byte) 0x76), /** * # of blob index evicted from base DB by BlobDB compaction filter because * of expiration. */ BLOB_DB_BLOB_INDEX_EXPIRED_COUNT((byte) 0x77), /** * Size of blob index evicted from base DB by BlobDB compaction filter * because of expiration. */ BLOB_DB_BLOB_INDEX_EXPIRED_SIZE((byte) 0x78), /** * # of blob index evicted from base DB by BlobDB compaction filter because * of corresponding file deleted. */ BLOB_DB_BLOB_INDEX_EVICTED_COUNT((byte) 0x79), /** * Size of blob index evicted from base DB by BlobDB compaction filter * because of corresponding file deleted. */ BLOB_DB_BLOB_INDEX_EVICTED_SIZE((byte) 0x7A), /** * # of blob files being garbage collected. */ BLOB_DB_GC_NUM_FILES((byte) 0x7B), /** * # of blob files generated by garbage collection. */ BLOB_DB_GC_NUM_NEW_FILES((byte) 0x7C), /** * # of BlobDB garbage collection failures. */ BLOB_DB_GC_FAILURES((byte) 0x7D), /** * # of keys drop by BlobDB garbage collection because they had been * overwritten. */ BLOB_DB_GC_NUM_KEYS_OVERWRITTEN((byte) 0x7E), /** * # of keys drop by BlobDB garbage collection because of expiration. */ BLOB_DB_GC_NUM_KEYS_EXPIRED((byte) 0x7F), /** * # of keys relocated to new blob file by garbage collection. */ BLOB_DB_GC_NUM_KEYS_RELOCATED((byte) -0x02), /** * # of bytes drop by BlobDB garbage collection because they had been * overwritten. */ BLOB_DB_GC_BYTES_OVERWRITTEN((byte) -0x03), /** * # of bytes drop by BlobDB garbage collection because of expiration. */ BLOB_DB_GC_BYTES_EXPIRED((byte) -0x04), /** * # of bytes relocated to new blob file by garbage collection. */ BLOB_DB_GC_BYTES_RELOCATED((byte) -0x05), /** * # of blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_NUM_FILES_EVICTED((byte) -0x06), /** * # of keys in the blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_NUM_KEYS_EVICTED((byte) -0x07), /** * # of bytes in the blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_BYTES_EVICTED((byte) -0x08), /** * These counters indicate a performance issue in WritePrepared transactions. * We should not seem them ticking them much. * # of times prepare_mutex_ is acquired in the fast path. */ TXN_PREPARE_MUTEX_OVERHEAD((byte) -0x09), /** * # of times old_commit_map_mutex_ is acquired in the fast path. */ TXN_OLD_COMMIT_MAP_MUTEX_OVERHEAD((byte) -0x0A), /** * # of times we checked a batch for duplicate keys. */ TXN_DUPLICATE_KEY_OVERHEAD((byte) -0x0B), /** * # of times snapshot_mutex_ is acquired in the fast path. */ TXN_SNAPSHOT_MUTEX_OVERHEAD((byte) -0x0C), /** * # of times ::Get returned TryAgain due to expired snapshot seq */ TXN_GET_TRY_AGAIN((byte) -0x0D), /** * # of files marked as trash by delete scheduler */ FILES_MARKED_TRASH((byte) -0x0E), /** * # of files deleted immediately by delete scheduler */ FILES_DELETED_IMMEDIATELY((byte) -0x0f), /** * Compaction read and write statistics broken down by CompactionReason */ COMPACT_READ_BYTES_MARKED((byte) -0x10), COMPACT_READ_BYTES_PERIODIC((byte) -0x11), COMPACT_READ_BYTES_TTL((byte) -0x12), COMPACT_WRITE_BYTES_MARKED((byte) -0x13), COMPACT_WRITE_BYTES_PERIODIC((byte) -0x14), COMPACT_WRITE_BYTES_TTL((byte) -0x15), /** * DB error handler statistics */ ERROR_HANDLER_BG_ERROR_COUNT((byte) -0x16), ERROR_HANDLER_BG_IO_ERROR_COUNT((byte) -0x17), ERROR_HANDLER_BG_RETRYABLE_IO_ERROR_COUNT((byte) -0x18), ERROR_HANDLER_AUTORESUME_COUNT((byte) -0x19), ERROR_HANDLER_AUTORESUME_RETRY_TOTAL_COUNT((byte) -0x1A), ERROR_HANDLER_AUTORESUME_SUCCESS_COUNT((byte) -0x1B), TICKER_ENUM_MAX((byte) 0x5F); private final byte value; TickerType(final byte value) { this.value = value; } /** * Returns the byte value of the enumerations value * * @return byte representation */ public byte getValue() { return value; } /** * Get Ticker type by byte value. * * @param value byte representation of TickerType. * * @return {@link org.rocksdb.TickerType} instance. * @throws java.lang.IllegalArgumentException if an invalid * value is provided. */ public static TickerType getTickerType(final byte value) { for (final TickerType tickerType : TickerType.values()) { if (tickerType.getValue() == value) { return tickerType; } } throw new IllegalArgumentException( "Illegal value provided for TickerType."); } }
facebook/rocksdb
java/src/main/java/org/rocksdb/TickerType.java
950
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). package org.rocksdb; /** * The class that controls the get behavior. * * Note that dispose() must be called before an Options instance * become out-of-scope to release the allocated memory in c++. */ public class ReadOptions extends RocksObject { public ReadOptions() { super(newReadOptions()); } /** * @param verifyChecksums verification will be performed on every read * when set to true * @param fillCache if true, then fill-cache behavior will be performed. */ public ReadOptions(final boolean verifyChecksums, final boolean fillCache) { super(newReadOptions(verifyChecksums, fillCache)); } /** * Copy constructor. * * NOTE: This does a shallow copy, which means snapshot, iterate_upper_bound * and other pointers will be cloned! * * @param other The ReadOptions to copy. */ public ReadOptions(ReadOptions other) { super(copyReadOptions(other.nativeHandle_)); this.iterateLowerBoundSlice_ = other.iterateLowerBoundSlice_; this.iterateUpperBoundSlice_ = other.iterateUpperBoundSlice_; } /** * If true, all data read from underlying storage will be * verified against corresponding checksums. * Default: true * * @return true if checksum verification is on. */ public boolean verifyChecksums() { assert(isOwningHandle()); return verifyChecksums(nativeHandle_); } /** * If true, all data read from underlying storage will be * verified against corresponding checksums. * Default: true * * @param verifyChecksums if true, then checksum verification * will be performed on every read. * @return the reference to the current ReadOptions. */ public ReadOptions setVerifyChecksums( final boolean verifyChecksums) { assert(isOwningHandle()); setVerifyChecksums(nativeHandle_, verifyChecksums); return this; } // TODO(yhchiang): this option seems to be block-based table only. // move this to a better place? /** * Fill the cache when loading the block-based sst formated db. * Callers may wish to set this field to false for bulk scans. * Default: true * * @return true if the fill-cache behavior is on. */ public boolean fillCache() { assert(isOwningHandle()); return fillCache(nativeHandle_); } /** * Fill the cache when loading the block-based sst formatted db. * Callers may wish to set this field to false for bulk scans. * Default: true * * @param fillCache if true, then fill-cache behavior will be * performed. * @return the reference to the current ReadOptions. */ public ReadOptions setFillCache(final boolean fillCache) { assert(isOwningHandle()); setFillCache(nativeHandle_, fillCache); return this; } /** * Returns the currently assigned Snapshot instance. * * @return the Snapshot assigned to this instance. If no Snapshot * is assigned null. */ public Snapshot snapshot() { assert(isOwningHandle()); long snapshotHandle = snapshot(nativeHandle_); if (snapshotHandle != 0) { return new Snapshot(snapshotHandle); } return null; } /** * <p>If "snapshot" is non-nullptr, read as of the supplied snapshot * (which must belong to the DB that is being read and which must * not have been released). If "snapshot" is nullptr, use an implicit * snapshot of the state at the beginning of this read operation.</p> * <p>Default: null</p> * * @param snapshot {@link Snapshot} instance * @return the reference to the current ReadOptions. */ public ReadOptions setSnapshot(final Snapshot snapshot) { assert(isOwningHandle()); if (snapshot != null) { setSnapshot(nativeHandle_, snapshot.nativeHandle_); } else { setSnapshot(nativeHandle_, 0l); } return this; } /** * Returns the current read tier. * * @return the read tier in use, by default {@link ReadTier#READ_ALL_TIER} */ public ReadTier readTier() { assert(isOwningHandle()); return ReadTier.getReadTier(readTier(nativeHandle_)); } /** * Specify if this read request should process data that ALREADY * resides on a particular cache. If the required data is not * found at the specified cache, then {@link RocksDBException} is thrown. * * @param readTier {@link ReadTier} instance * @return the reference to the current ReadOptions. */ public ReadOptions setReadTier(final ReadTier readTier) { assert(isOwningHandle()); setReadTier(nativeHandle_, readTier.getValue()); return this; } /** * Specify to create a tailing iterator -- a special iterator that has a * view of the complete database (i.e. it can also be used to read newly * added data) and is optimized for sequential reads. It will return records * that were inserted into the database after the creation of the iterator. * Default: false * * Not supported in {@code ROCKSDB_LITE} mode! * * @return true if tailing iterator is enabled. */ public boolean tailing() { assert(isOwningHandle()); return tailing(nativeHandle_); } /** * Specify to create a tailing iterator -- a special iterator that has a * view of the complete database (i.e. it can also be used to read newly * added data) and is optimized for sequential reads. It will return records * that were inserted into the database after the creation of the iterator. * Default: false * Not supported in ROCKSDB_LITE mode! * * @param tailing if true, then tailing iterator will be enabled. * @return the reference to the current ReadOptions. */ public ReadOptions setTailing(final boolean tailing) { assert(isOwningHandle()); setTailing(nativeHandle_, tailing); return this; } /** * Returns whether managed iterators will be used. * * @return the setting of whether managed iterators will be used, * by default false * * @deprecated This options is not used anymore. */ @Deprecated public boolean managed() { assert(isOwningHandle()); return managed(nativeHandle_); } /** * Specify to create a managed iterator -- a special iterator that * uses less resources by having the ability to free its underlying * resources on request. * * @param managed if true, then managed iterators will be enabled. * @return the reference to the current ReadOptions. * * @deprecated This options is not used anymore. */ @Deprecated public ReadOptions setManaged(final boolean managed) { assert(isOwningHandle()); setManaged(nativeHandle_, managed); return this; } /** * Returns whether a total seek order will be used * * @return the setting of whether a total seek order will be used */ public boolean totalOrderSeek() { assert(isOwningHandle()); return totalOrderSeek(nativeHandle_); } /** * Enable a total order seek regardless of index format (e.g. hash index) * used in the table. Some table format (e.g. plain table) may not support * this option. * * @param totalOrderSeek if true, then total order seek will be enabled. * @return the reference to the current ReadOptions. */ public ReadOptions setTotalOrderSeek(final boolean totalOrderSeek) { assert(isOwningHandle()); setTotalOrderSeek(nativeHandle_, totalOrderSeek); return this; } /** * Returns whether the iterator only iterates over the same prefix as the seek * * @return the setting of whether the iterator only iterates over the same * prefix as the seek, default is false */ public boolean prefixSameAsStart() { assert(isOwningHandle()); return prefixSameAsStart(nativeHandle_); } /** * Enforce that the iterator only iterates over the same prefix as the seek. * This option is effective only for prefix seeks, i.e. prefix_extractor is * non-null for the column family and {@link #totalOrderSeek()} is false. * Unlike iterate_upper_bound, {@link #setPrefixSameAsStart(boolean)} only * works within a prefix but in both directions. * * @param prefixSameAsStart if true, then the iterator only iterates over the * same prefix as the seek * @return the reference to the current ReadOptions. */ public ReadOptions setPrefixSameAsStart(final boolean prefixSameAsStart) { assert(isOwningHandle()); setPrefixSameAsStart(nativeHandle_, prefixSameAsStart); return this; } /** * Returns whether the blocks loaded by the iterator will be pinned in memory * * @return the setting of whether the blocks loaded by the iterator will be * pinned in memory */ public boolean pinData() { assert(isOwningHandle()); return pinData(nativeHandle_); } /** * Keep the blocks loaded by the iterator pinned in memory as long as the * iterator is not deleted, If used when reading from tables created with * BlockBasedTableOptions::use_delta_encoding = false, * Iterator's property "rocksdb.iterator.is-key-pinned" is guaranteed to * return 1. * * @param pinData if true, the blocks loaded by the iterator will be pinned * @return the reference to the current ReadOptions. */ public ReadOptions setPinData(final boolean pinData) { assert(isOwningHandle()); setPinData(nativeHandle_, pinData); return this; } /** * If true, when PurgeObsoleteFile is called in CleanupIteratorState, we * schedule a background job in the flush job queue and delete obsolete files * in background. * * Default: false * * @return true when PurgeObsoleteFile is called in CleanupIteratorState */ public boolean backgroundPurgeOnIteratorCleanup() { assert(isOwningHandle()); return backgroundPurgeOnIteratorCleanup(nativeHandle_); } /** * If true, when PurgeObsoleteFile is called in CleanupIteratorState, we * schedule a background job in the flush job queue and delete obsolete files * in background. * * Default: false * * @param backgroundPurgeOnIteratorCleanup true when PurgeObsoleteFile is * called in CleanupIteratorState * @return the reference to the current ReadOptions. */ public ReadOptions setBackgroundPurgeOnIteratorCleanup( final boolean backgroundPurgeOnIteratorCleanup) { assert(isOwningHandle()); setBackgroundPurgeOnIteratorCleanup(nativeHandle_, backgroundPurgeOnIteratorCleanup); return this; } /** * If non-zero, NewIterator will create a new table reader which * performs reads of the given size. Using a large size (&gt; 2MB) can * improve the performance of forward iteration on spinning disks. * * Default: 0 * * @return The readahead size is bytes */ public long readaheadSize() { assert(isOwningHandle()); return readaheadSize(nativeHandle_); } /** * If non-zero, NewIterator will create a new table reader which * performs reads of the given size. Using a large size (&gt; 2MB) can * improve the performance of forward iteration on spinning disks. * * Default: 0 * * @param readaheadSize The readahead size is bytes * @return the reference to the current ReadOptions. */ public ReadOptions setReadaheadSize(final long readaheadSize) { assert(isOwningHandle()); setReadaheadSize(nativeHandle_, readaheadSize); return this; } /** * A threshold for the number of keys that can be skipped before failing an * iterator seek as incomplete. * * @return the number of keys that can be skipped * before failing an iterator seek as incomplete. */ public long maxSkippableInternalKeys() { assert(isOwningHandle()); return maxSkippableInternalKeys(nativeHandle_); } /** * A threshold for the number of keys that can be skipped before failing an * iterator seek as incomplete. The default value of 0 should be used to * never fail a request as incomplete, even on skipping too many keys. * * Default: 0 * * @param maxSkippableInternalKeys the number of keys that can be skipped * before failing an iterator seek as incomplete. * * @return the reference to the current ReadOptions. */ public ReadOptions setMaxSkippableInternalKeys( final long maxSkippableInternalKeys) { assert(isOwningHandle()); setMaxSkippableInternalKeys(nativeHandle_, maxSkippableInternalKeys); return this; } /** * If true, keys deleted using the DeleteRange() API will be visible to * readers until they are naturally deleted during compaction. This improves * read performance in DBs with many range deletions. * * Default: false * * @return true if keys deleted using the DeleteRange() API will be visible */ public boolean ignoreRangeDeletions() { assert(isOwningHandle()); return ignoreRangeDeletions(nativeHandle_); } /** * If true, keys deleted using the DeleteRange() API will be visible to * readers until they are naturally deleted during compaction. This improves * read performance in DBs with many range deletions. * * Default: false * * @param ignoreRangeDeletions true if keys deleted using the DeleteRange() * API should be visible * @return the reference to the current ReadOptions. */ public ReadOptions setIgnoreRangeDeletions(final boolean ignoreRangeDeletions) { assert(isOwningHandle()); setIgnoreRangeDeletions(nativeHandle_, ignoreRangeDeletions); return this; } /** * Defines the smallest key at which the backward * iterator can return an entry. Once the bound is passed, * {@link RocksIterator#isValid()} will be false. * * The lower bound is inclusive i.e. the bound value is a valid * entry. * * If prefix_extractor is not null, the Seek target and `iterate_lower_bound` * need to have the same prefix. This is because ordering is not guaranteed * outside of prefix domain. * * Default: null * * @param iterateLowerBound Slice representing the upper bound * @return the reference to the current ReadOptions. */ public ReadOptions setIterateLowerBound(final AbstractSlice<?> iterateLowerBound) { assert(isOwningHandle()); setIterateLowerBound( nativeHandle_, iterateLowerBound == null ? 0 : iterateLowerBound.getNativeHandle()); // Hold onto a reference so it doesn't get garbage collected out from under us. iterateLowerBoundSlice_ = iterateLowerBound; return this; } /** * Returns the smallest key at which the backward * iterator can return an entry. * * The lower bound is inclusive i.e. the bound value is a valid entry. * * @return the smallest key, or null if there is no lower bound defined. */ public Slice iterateLowerBound() { assert(isOwningHandle()); final long lowerBoundSliceHandle = iterateLowerBound(nativeHandle_); if (lowerBoundSliceHandle != 0) { // Disown the new slice - it's owned by the C++ side of the JNI boundary // from the perspective of this method. return new Slice(lowerBoundSliceHandle, false); } return null; } /** * Defines the extent up to which the forward iterator * can returns entries. Once the bound is reached, * {@link RocksIterator#isValid()} will be false. * * The upper bound is exclusive i.e. the bound value is not a valid entry. * * If prefix_extractor is not null, the Seek target and iterate_upper_bound * need to have the same prefix. This is because ordering is not guaranteed * outside of prefix domain. * * Default: null * * @param iterateUpperBound Slice representing the upper bound * @return the reference to the current ReadOptions. */ public ReadOptions setIterateUpperBound(final AbstractSlice<?> iterateUpperBound) { assert(isOwningHandle()); setIterateUpperBound( nativeHandle_, iterateUpperBound == null ? 0 : iterateUpperBound.getNativeHandle()); // Hold onto a reference so it doesn't get garbage collected out from under us. iterateUpperBoundSlice_ = iterateUpperBound; return this; } /** * Returns the largest key at which the forward * iterator can return an entry. * * The upper bound is exclusive i.e. the bound value is not a valid entry. * * @return the largest key, or null if there is no upper bound defined. */ public Slice iterateUpperBound() { assert(isOwningHandle()); final long upperBoundSliceHandle = iterateUpperBound(nativeHandle_); if (upperBoundSliceHandle != 0) { // Disown the new slice - it's owned by the C++ side of the JNI boundary // from the perspective of this method. return new Slice(upperBoundSliceHandle, false); } return null; } /** * A callback to determine whether relevant keys for this scan exist in a * given table based on the table's properties. The callback is passed the * properties of each table during iteration. If the callback returns false, * the table will not be scanned. This option only affects Iterators and has * no impact on point lookups. * * Default: null (every table will be scanned) * * @param tableFilter the table filter for the callback. * * @return the reference to the current ReadOptions. */ public ReadOptions setTableFilter(final AbstractTableFilter tableFilter) { assert(isOwningHandle()); setTableFilter(nativeHandle_, tableFilter.nativeHandle_); return this; } /** * Needed to support differential snapshots. Has 2 effects: * 1) Iterator will skip all internal keys with seqnum &lt; iter_start_seqnum * 2) if this param &gt; 0 iterator will return INTERNAL keys instead of user * keys; e.g. return tombstones as well. * * Default: 0 (don't filter by seqnum, return user keys) * * @param startSeqnum the starting sequence number. * * @return the reference to the current ReadOptions. */ public ReadOptions setIterStartSeqnum(final long startSeqnum) { assert(isOwningHandle()); setIterStartSeqnum(nativeHandle_, startSeqnum); return this; } /** * Returns the starting Sequence Number of any iterator. * See {@link #setIterStartSeqnum(long)}. * * @return the starting sequence number of any iterator. */ public long iterStartSeqnum() { assert(isOwningHandle()); return iterStartSeqnum(nativeHandle_); } // instance variables // NOTE: If you add new member variables, please update the copy constructor above! // // Hold a reference to any iterate lower or upper bound that was set on this // object until we're destroyed or it's overwritten. That way the caller can // freely leave scope without us losing the Java Slice object, which during // close() would also reap its associated rocksdb::Slice native object since // it's possibly (likely) to be an owning handle. private AbstractSlice<?> iterateLowerBoundSlice_; private AbstractSlice<?> iterateUpperBoundSlice_; private native static long newReadOptions(); private native static long newReadOptions(final boolean verifyChecksums, final boolean fillCache); private native static long copyReadOptions(long handle); @Override protected final native void disposeInternal(final long handle); private native boolean verifyChecksums(long handle); private native void setVerifyChecksums(long handle, boolean verifyChecksums); private native boolean fillCache(long handle); private native void setFillCache(long handle, boolean fillCache); private native long snapshot(long handle); private native void setSnapshot(long handle, long snapshotHandle); private native byte readTier(long handle); private native void setReadTier(long handle, byte readTierValue); private native boolean tailing(long handle); private native void setTailing(long handle, boolean tailing); private native boolean managed(long handle); private native void setManaged(long handle, boolean managed); private native boolean totalOrderSeek(long handle); private native void setTotalOrderSeek(long handle, boolean totalOrderSeek); private native boolean prefixSameAsStart(long handle); private native void setPrefixSameAsStart(long handle, boolean prefixSameAsStart); private native boolean pinData(long handle); private native void setPinData(long handle, boolean pinData); private native boolean backgroundPurgeOnIteratorCleanup(final long handle); private native void setBackgroundPurgeOnIteratorCleanup(final long handle, final boolean backgroundPurgeOnIteratorCleanup); private native long readaheadSize(final long handle); private native void setReadaheadSize(final long handle, final long readaheadSize); private native long maxSkippableInternalKeys(final long handle); private native void setMaxSkippableInternalKeys(final long handle, final long maxSkippableInternalKeys); private native boolean ignoreRangeDeletions(final long handle); private native void setIgnoreRangeDeletions(final long handle, final boolean ignoreRangeDeletions); private native void setIterateUpperBound(final long handle, final long upperBoundSliceHandle); private native long iterateUpperBound(final long handle); private native void setIterateLowerBound(final long handle, final long lowerBoundSliceHandle); private native long iterateLowerBound(final long handle); private native void setTableFilter(final long handle, final long tableFilterHandle); private native void setIterStartSeqnum(final long handle, final long seqNum); private native long iterStartSeqnum(final long handle); }
facebook/rocksdb
java/src/main/java/org/rocksdb/ReadOptions.java
951
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ /** * Abstraction of a byte buffer - the fundamental data structure * to represent a low-level binary and text message. * * Netty uses its own buffer API instead of NIO {@link java.nio.ByteBuffer} to * represent a sequence of bytes. This approach has significant advantage over * using {@link java.nio.ByteBuffer}. Netty's new buffer type, * {@link io.netty.buffer.ByteBuf}, has been designed from ground * up to address the problems of {@link java.nio.ByteBuffer} and to meet the * daily needs of network application developers. To list a few cool features: * <ul> * <li>You can define your buffer type if necessary.</li> * <li>Transparent zero copy is achieved by built-in composite buffer type.</li> * <li>A dynamic buffer type is provided out-of-the-box, whose capacity is * expanded on demand, just like {@link java.lang.StringBuffer}.</li> * <li>There's no need to call the {@code flip()} method anymore.</li> * <li>It is often faster than {@link java.nio.ByteBuffer}.</li> * </ul> * * <h3>Extensibility</h3> * * {@link io.netty.buffer.ByteBuf} has rich set of operations * optimized for rapid protocol implementation. For example, * {@link io.netty.buffer.ByteBuf} provides various operations * for accessing unsigned values and strings and searching for certain byte * sequence in a buffer. You can also extend or wrap existing buffer type * to add convenient accessors. The custom buffer type still implements * {@link io.netty.buffer.ByteBuf} interface rather than * introducing an incompatible type. * * <h3>Transparent Zero Copy</h3> * * To lift up the performance of a network application to the extreme, you need * to reduce the number of memory copy operation. You might have a set of * buffers that could be sliced and combined to compose a whole message. Netty * provides a composite buffer which allows you to create a new buffer from the * arbitrary number of existing buffers with no memory copy. For example, a * message could be composed of two parts; header and body. In a modularized * application, the two parts could be produced by different modules and * assembled later when the message is sent out. * <pre> * +--------+----------+ * | header | body | * +--------+----------+ * </pre> * If {@link java.nio.ByteBuffer} were used, you would have to create a new big * buffer and copy the two parts into the new buffer. Alternatively, you can * perform a gathering write operation in NIO, but it restricts you to represent * the composite of buffers as an array of {@link java.nio.ByteBuffer}s rather * than a single buffer, breaking the abstraction and introducing complicated * state management. Moreover, it's of no use if you are not going to read or * write from an NIO channel. * <pre> * // The composite type is incompatible with the component type. * ByteBuffer[] message = new ByteBuffer[] { header, body }; * </pre> * By contrast, {@link io.netty.buffer.ByteBuf} does not have such * caveats because it is fully extensible and has a built-in composite buffer * type. * <pre> * // The composite type is compatible with the component type. * {@link io.netty.buffer.ByteBuf} message = {@link io.netty.buffer.Unpooled}.wrappedBuffer(header, body); * * // Therefore, you can even create a composite by mixing a composite and an * // ordinary buffer. * {@link io.netty.buffer.ByteBuf} messageWithFooter = {@link io.netty.buffer.Unpooled}.wrappedBuffer(message, footer); * * // Because the composite is still a {@link io.netty.buffer.ByteBuf}, you can access its content * // easily, and the accessor method will behave just like it's a single buffer * // even if the region you want to access spans over multiple components. The * // unsigned integer being read here is located across body and footer. * messageWithFooter.getUnsignedInt( * messageWithFooter.readableBytes() - footer.readableBytes() - 1); * </pre> * * <h3>Automatic Capacity Extension</h3> * * Many protocols define variable length messages, which means there's no way to * determine the length of a message until you construct the message or it is * difficult and inconvenient to calculate the length precisely. It is just * like when you build a {@link java.lang.String}. You often estimate the length * of the resulting string and let {@link java.lang.StringBuffer} expand itself * on demand. * <pre> * // A new dynamic buffer is created. Internally, the actual buffer is created * // lazily to avoid potentially wasted memory space. * {@link io.netty.buffer.ByteBuf} b = {@link io.netty.buffer.Unpooled}.buffer(4); * * // When the first write attempt is made, the internal buffer is created with * // the specified initial capacity (4). * b.writeByte('1'); * * b.writeByte('2'); * b.writeByte('3'); * b.writeByte('4'); * * // When the number of written bytes exceeds the initial capacity (4), the * // internal buffer is reallocated automatically with a larger capacity. * b.writeByte('5'); * </pre> * * <h3>Better Performance</h3> * * Most frequently used buffer implementation of * {@link io.netty.buffer.ByteBuf} is a very thin wrapper of a * byte array (i.e. {@code byte[]}). Unlike {@link java.nio.ByteBuffer}, it has * no complicated boundary check and index compensation, and therefore it is * easier for a JVM to optimize the buffer access. More complicated buffer * implementation is used only for sliced or composite buffers, and it performs * as well as {@link java.nio.ByteBuffer}. */ package io.netty.buffer;
netty/netty
buffer/src/main/java/io/netty/buffer/package-info.java
952
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). package org.rocksdb; import java.util.Collection; import java.util.List; public interface DBOptionsInterface<T extends DBOptionsInterface<T>> { /** * Use this if your DB is very small (like under 1GB) and you don't want to * spend lots of memory for memtables. * * @return the instance of the current object. */ T optimizeForSmallDb(); /** * Use the specified object to interact with the environment, * e.g. to read/write files, schedule background work, etc. * Default: {@link Env#getDefault()} * * @param env {@link Env} instance. * @return the instance of the current Options. */ T setEnv(final Env env); /** * Returns the set RocksEnv instance. * * @return {@link RocksEnv} instance set in the options. */ Env getEnv(); /** * <p>By default, RocksDB uses only one background thread for flush and * compaction. Calling this function will set it up such that total of * `total_threads` is used.</p> * * <p>You almost definitely want to call this function if your system is * bottlenecked by RocksDB.</p> * * @param totalThreads The total number of threads to be used by RocksDB. * A good value is the number of cores. * * @return the instance of the current Options */ T setIncreaseParallelism(int totalThreads); /** * If this value is set to true, then the database will be created * if it is missing during {@code RocksDB.open()}. * Default: false * * @param flag a flag indicating whether to create a database the * specified database in {@link RocksDB#open(org.rocksdb.Options, String)} operation * is missing. * @return the instance of the current Options * @see RocksDB#open(org.rocksdb.Options, String) */ T setCreateIfMissing(boolean flag); /** * Return true if the create_if_missing flag is set to true. * If true, the database will be created if it is missing. * * @return true if the createIfMissing option is set to true. * @see #setCreateIfMissing(boolean) */ boolean createIfMissing(); /** * <p>If true, missing column families will be automatically created</p> * * <p>Default: false</p> * * @param flag a flag indicating if missing column families shall be * created automatically. * @return true if missing column families shall be created automatically * on open. */ T setCreateMissingColumnFamilies(boolean flag); /** * Return true if the create_missing_column_families flag is set * to true. If true column families be created if missing. * * @return true if the createMissingColumnFamilies is set to * true. * @see #setCreateMissingColumnFamilies(boolean) */ boolean createMissingColumnFamilies(); /** * If true, an error will be thrown during RocksDB.open() if the * database already exists. * Default: false * * @param errorIfExists if true, an exception will be thrown * during {@code RocksDB.open()} if the database already exists. * @return the reference to the current option. * @see RocksDB#open(org.rocksdb.Options, String) */ T setErrorIfExists(boolean errorIfExists); /** * If true, an error will be thrown during RocksDB.open() if the * database already exists. * * @return if true, an error is raised when the specified database * already exists before open. */ boolean errorIfExists(); /** * If true, the implementation will do aggressive checking of the * data it is processing and will stop early if it detects any * errors. This may have unforeseen ramifications: for example, a * corruption of one DB entry may cause a large number of entries to * become unreadable or for the entire DB to become unopenable. * If any of the writes to the database fails (Put, Delete, Merge, Write), * the database will switch to read-only mode and fail all other * Write operations. * Default: true * * @param paranoidChecks a flag to indicate whether paranoid-check * is on. * @return the reference to the current option. */ T setParanoidChecks(boolean paranoidChecks); /** * If true, the implementation will do aggressive checking of the * data it is processing and will stop early if it detects any * errors. This may have unforeseen ramifications: for example, a * corruption of one DB entry may cause a large number of entries to * become unreadable or for the entire DB to become unopenable. * If any of the writes to the database fails (Put, Delete, Merge, Write), * the database will switch to read-only mode and fail all other * Write operations. * * @return a boolean indicating whether paranoid-check is on. */ boolean paranoidChecks(); /** * Use to control write rate of flush and compaction. Flush has higher * priority than compaction. Rate limiting is disabled if nullptr. * Default: nullptr * * @param rateLimiter {@link org.rocksdb.RateLimiter} instance. * @return the instance of the current object. * * @since 3.10.0 */ T setRateLimiter(RateLimiter rateLimiter); /** * Use to track SST files and control their file deletion rate. * * Features: * - Throttle the deletion rate of the SST files. * - Keep track the total size of all SST files. * - Set a maximum allowed space limit for SST files that when reached * the DB wont do any further flushes or compactions and will set the * background error. * - Can be shared between multiple dbs. * * Limitations: * - Only track and throttle deletes of SST files in * first db_path (db_name if db_paths is empty). * * @param sstFileManager The SST File Manager for the db. * @return the instance of the current object. */ T setSstFileManager(SstFileManager sstFileManager); /** * <p>Any internal progress/error information generated by * the db will be written to the Logger if it is non-nullptr, * or to a file stored in the same directory as the DB * contents if info_log is nullptr.</p> * * <p>Default: nullptr</p> * * @param logger {@link Logger} instance. * @return the instance of the current object. */ T setLogger(Logger logger); /** * <p>Sets the RocksDB log level. Default level is INFO</p> * * @param infoLogLevel log level to set. * @return the instance of the current object. */ T setInfoLogLevel(InfoLogLevel infoLogLevel); /** * <p>Returns currently set log level.</p> * @return {@link org.rocksdb.InfoLogLevel} instance. */ InfoLogLevel infoLogLevel(); /** * If {@link MutableDBOptionsInterface#maxOpenFiles()} is -1, DB will open * all files on DB::Open(). You can use this option to increase the number * of threads used to open the files. * * Default: 16 * * @param maxFileOpeningThreads the maximum number of threads to use to * open files * * @return the reference to the current options. */ T setMaxFileOpeningThreads(int maxFileOpeningThreads); /** * If {@link MutableDBOptionsInterface#maxOpenFiles()} is -1, DB will open all * files on DB::Open(). You can use this option to increase the number of * threads used to open the files. * * Default: 16 * * @return the maximum number of threads to use to open files */ int maxFileOpeningThreads(); /** * <p>Sets the statistics object which collects metrics about database operations. * Statistics objects should not be shared between DB instances as * it does not use any locks to prevent concurrent updates.</p> * * @param statistics The statistics to set * * @return the instance of the current object. * * @see RocksDB#open(org.rocksdb.Options, String) */ T setStatistics(final Statistics statistics); /** * <p>Returns statistics object.</p> * * @return the instance of the statistics object or null if there is no * statistics object. * * @see #setStatistics(Statistics) */ Statistics statistics(); /** * <p>If true, then every store to stable storage will issue a fsync.</p> * <p>If false, then every store to stable storage will issue a fdatasync. * This parameter should be set to true while storing data to * filesystem like ext3 that can lose files after a reboot.</p> * <p>Default: false</p> * * @param useFsync a boolean flag to specify whether to use fsync * @return the instance of the current object. */ T setUseFsync(boolean useFsync); /** * <p>If true, then every store to stable storage will issue a fsync.</p> * <p>If false, then every store to stable storage will issue a fdatasync. * This parameter should be set to true while storing data to * filesystem like ext3 that can lose files after a reboot.</p> * * @return boolean value indicating if fsync is used. */ boolean useFsync(); /** * A list of paths where SST files can be put into, with its target size. * Newer data is placed into paths specified earlier in the vector while * older data gradually moves to paths specified later in the vector. * * For example, you have a flash device with 10GB allocated for the DB, * as well as a hard drive of 2TB, you should config it to be: * [{"/flash_path", 10GB}, {"/hard_drive", 2TB}] * * The system will try to guarantee data under each path is close to but * not larger than the target size. But current and future file sizes used * by determining where to place a file are based on best-effort estimation, * which means there is a chance that the actual size under the directory * is slightly more than target size under some workloads. User should give * some buffer room for those cases. * * If none of the paths has sufficient room to place a file, the file will * be placed to the last path anyway, despite to the target size. * * Placing newer data to earlier paths is also best-efforts. User should * expect user files to be placed in higher levels in some extreme cases. * * If left empty, only one path will be used, which is db_name passed when * opening the DB. * * Default: empty * * @param dbPaths the paths and target sizes * * @return the reference to the current options */ T setDbPaths(final Collection<DbPath> dbPaths); /** * A list of paths where SST files can be put into, with its target size. * Newer data is placed into paths specified earlier in the vector while * older data gradually moves to paths specified later in the vector. * * For example, you have a flash device with 10GB allocated for the DB, * as well as a hard drive of 2TB, you should config it to be: * [{"/flash_path", 10GB}, {"/hard_drive", 2TB}] * * The system will try to guarantee data under each path is close to but * not larger than the target size. But current and future file sizes used * by determining where to place a file are based on best-effort estimation, * which means there is a chance that the actual size under the directory * is slightly more than target size under some workloads. User should give * some buffer room for those cases. * * If none of the paths has sufficient room to place a file, the file will * be placed to the last path anyway, despite to the target size. * * Placing newer data to earlier paths is also best-efforts. User should * expect user files to be placed in higher levels in some extreme cases. * * If left empty, only one path will be used, which is db_name passed when * opening the DB. * * Default: {@link java.util.Collections#emptyList()} * * @return dbPaths the paths and target sizes */ List<DbPath> dbPaths(); /** * This specifies the info LOG dir. * If it is empty, the log files will be in the same dir as data. * If it is non empty, the log files will be in the specified dir, * and the db data dir's absolute path will be used as the log file * name's prefix. * * @param dbLogDir the path to the info log directory * @return the instance of the current object. */ T setDbLogDir(String dbLogDir); /** * Returns the directory of info log. * * If it is empty, the log files will be in the same dir as data. * If it is non empty, the log files will be in the specified dir, * and the db data dir's absolute path will be used as the log file * name's prefix. * * @return the path to the info log directory */ String dbLogDir(); /** * This specifies the absolute dir path for write-ahead logs (WAL). * If it is empty, the log files will be in the same dir as data, * dbname is used as the data dir by default * If it is non empty, the log files will be in kept the specified dir. * When destroying the db, * all log files in wal_dir and the dir itself is deleted * * @param walDir the path to the write-ahead-log directory. * @return the instance of the current object. */ T setWalDir(String walDir); /** * Returns the path to the write-ahead-logs (WAL) directory. * * If it is empty, the log files will be in the same dir as data, * dbname is used as the data dir by default * If it is non empty, the log files will be in kept the specified dir. * When destroying the db, * all log files in wal_dir and the dir itself is deleted * * @return the path to the write-ahead-logs (WAL) directory. */ String walDir(); /** * The periodicity when obsolete files get deleted. The default * value is 6 hours. The files that get out of scope by compaction * process will still get automatically delete on every compaction, * regardless of this setting * * @param micros the time interval in micros * @return the instance of the current object. */ T setDeleteObsoleteFilesPeriodMicros(long micros); /** * The periodicity when obsolete files get deleted. The default * value is 6 hours. The files that get out of scope by compaction * process will still get automatically delete on every compaction, * regardless of this setting * * @return the time interval in micros when obsolete files will be deleted. */ long deleteObsoleteFilesPeriodMicros(); /** * This value represents the maximum number of threads that will * concurrently perform a compaction job by breaking it into multiple, * smaller ones that are run simultaneously. * Default: 1 (i.e. no subcompactions) * * @param maxSubcompactions The maximum number of threads that will * concurrently perform a compaction job * * @return the instance of the current object. */ T setMaxSubcompactions(int maxSubcompactions); /** * This value represents the maximum number of threads that will * concurrently perform a compaction job by breaking it into multiple, * smaller ones that are run simultaneously. * Default: 1 (i.e. no subcompactions) * * @return The maximum number of threads that will concurrently perform a * compaction job */ int maxSubcompactions(); /** * NOT SUPPORTED ANYMORE: RocksDB automatically decides this based on the * value of max_background_jobs. For backwards compatibility we will set * `max_background_jobs = max_background_compactions + max_background_flushes` * in the case where user sets at least one of `max_background_compactions` or * `max_background_flushes`. * * Specifies the maximum number of concurrent background flush jobs. * If you're increasing this, also consider increasing number of threads in * HIGH priority thread pool. For more information, see * Default: -1 * * @param maxBackgroundFlushes number of max concurrent flush jobs * @return the instance of the current object. * * @see RocksEnv#setBackgroundThreads(int) * @see RocksEnv#setBackgroundThreads(int, Priority) * @see MutableDBOptionsInterface#maxBackgroundCompactions() * * @deprecated Use {@link MutableDBOptionsInterface#setMaxBackgroundJobs(int)} */ @Deprecated T setMaxBackgroundFlushes(int maxBackgroundFlushes); /** * NOT SUPPORTED ANYMORE: RocksDB automatically decides this based on the * value of max_background_jobs. For backwards compatibility we will set * `max_background_jobs = max_background_compactions + max_background_flushes` * in the case where user sets at least one of `max_background_compactions` or * `max_background_flushes`. * * Returns the maximum number of concurrent background flush jobs. * If you're increasing this, also consider increasing number of threads in * HIGH priority thread pool. For more information, see * Default: -1 * * @return the maximum number of concurrent background flush jobs. * @see RocksEnv#setBackgroundThreads(int) * @see RocksEnv#setBackgroundThreads(int, Priority) */ @Deprecated int maxBackgroundFlushes(); /** * Specifies the maximum size of a info log file. If the current log file * is larger than `max_log_file_size`, a new info log file will * be created. * If 0, all logs will be written to one log file. * * @param maxLogFileSize the maximum size of a info log file. * @return the instance of the current object. * @throws java.lang.IllegalArgumentException thrown on 32-Bit platforms * while overflowing the underlying platform specific value. */ T setMaxLogFileSize(long maxLogFileSize); /** * Returns the maximum size of a info log file. If the current log file * is larger than this size, a new info log file will be created. * If 0, all logs will be written to one log file. * * @return the maximum size of the info log file. */ long maxLogFileSize(); /** * Specifies the time interval for the info log file to roll (in seconds). * If specified with non-zero value, log file will be rolled * if it has been active longer than `log_file_time_to_roll`. * Default: 0 (disabled) * * @param logFileTimeToRoll the time interval in seconds. * @return the instance of the current object. * @throws java.lang.IllegalArgumentException thrown on 32-Bit platforms * while overflowing the underlying platform specific value. */ T setLogFileTimeToRoll(long logFileTimeToRoll); /** * Returns the time interval for the info log file to roll (in seconds). * If specified with non-zero value, log file will be rolled * if it has been active longer than `log_file_time_to_roll`. * Default: 0 (disabled) * * @return the time interval in seconds. */ long logFileTimeToRoll(); /** * Specifies the maximum number of info log files to be kept. * Default: 1000 * * @param keepLogFileNum the maximum number of info log files to be kept. * @return the instance of the current object. * @throws java.lang.IllegalArgumentException thrown on 32-Bit platforms * while overflowing the underlying platform specific value. */ T setKeepLogFileNum(long keepLogFileNum); /** * Returns the maximum number of info log files to be kept. * Default: 1000 * * @return the maximum number of info log files to be kept. */ long keepLogFileNum(); /** * Recycle log files. * * If non-zero, we will reuse previously written log files for new * logs, overwriting the old data. The value indicates how many * such files we will keep around at any point in time for later * use. * * This is more efficient because the blocks are already * allocated and fdatasync does not need to update the inode after * each write. * * Default: 0 * * @param recycleLogFileNum the number of log files to keep for recycling * * @return the reference to the current options */ T setRecycleLogFileNum(long recycleLogFileNum); /** * Recycle log files. * * If non-zero, we will reuse previously written log files for new * logs, overwriting the old data. The value indicates how many * such files we will keep around at any point in time for later * use. * * This is more efficient because the blocks are already * allocated and fdatasync does not need to update the inode after * each write. * * Default: 0 * * @return the number of log files kept for recycling */ long recycleLogFileNum(); /** * Manifest file is rolled over on reaching this limit. * The older manifest file be deleted. * The default value is 1GB so that the manifest file can grow, but not * reach the limit of storage capacity. * * @param maxManifestFileSize the size limit of a manifest file. * @return the instance of the current object. */ T setMaxManifestFileSize(long maxManifestFileSize); /** * Manifest file is rolled over on reaching this limit. * The older manifest file be deleted. * The default value is 1GB so that the manifest file can grow, but not * reach the limit of storage capacity. * * @return the size limit of a manifest file. */ long maxManifestFileSize(); /** * Number of shards used for table cache. * * @param tableCacheNumshardbits the number of chards * @return the instance of the current object. */ T setTableCacheNumshardbits(int tableCacheNumshardbits); /** * Number of shards used for table cache. * * @return the number of shards used for table cache. */ int tableCacheNumshardbits(); /** * {@link #walTtlSeconds()} and {@link #walSizeLimitMB()} affect how archived logs * will be deleted. * <ol> * <li>If both set to 0, logs will be deleted asap and will not get into * the archive.</li> * <li>If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, * WAL files will be checked every 10 min and if total size is greater * then WAL_size_limit_MB, they will be deleted starting with the * earliest until size_limit is met. All empty files will be deleted.</li> * <li>If WAL_ttl_seconds is not 0 and WAL_size_limit_MB is 0, then * WAL files will be checked every WAL_ttl_seconds / 2 and those that * are older than WAL_ttl_seconds will be deleted.</li> * <li>If both are not 0, WAL files will be checked every 10 min and both * checks will be performed with ttl being first.</li> * </ol> * * @param walTtlSeconds the ttl seconds * @return the instance of the current object. * @see #setWalSizeLimitMB(long) */ T setWalTtlSeconds(long walTtlSeconds); /** * WalTtlSeconds() and walSizeLimitMB() affect how archived logs * will be deleted. * <ol> * <li>If both set to 0, logs will be deleted asap and will not get into * the archive.</li> * <li>If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, * WAL files will be checked every 10 min and if total size is greater * then WAL_size_limit_MB, they will be deleted starting with the * earliest until size_limit is met. All empty files will be deleted.</li> * <li>If WAL_ttl_seconds is not 0 and WAL_size_limit_MB is 0, then * WAL files will be checked every WAL_ttl_seconds / 2 and those that * are older than WAL_ttl_seconds will be deleted.</li> * <li>If both are not 0, WAL files will be checked every 10 min and both * checks will be performed with ttl being first.</li> * </ol> * * @return the wal-ttl seconds * @see #walSizeLimitMB() */ long walTtlSeconds(); /** * WalTtlSeconds() and walSizeLimitMB() affect how archived logs * will be deleted. * <ol> * <li>If both set to 0, logs will be deleted asap and will not get into * the archive.</li> * <li>If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, * WAL files will be checked every 10 min and if total size is greater * then WAL_size_limit_MB, they will be deleted starting with the * earliest until size_limit is met. All empty files will be deleted.</li> * <li>If WAL_ttl_seconds is not 0 and WAL_size_limit_MB is 0, then * WAL files will be checked every WAL_ttl_secondsi / 2 and those that * are older than WAL_ttl_seconds will be deleted.</li> * <li>If both are not 0, WAL files will be checked every 10 min and both * checks will be performed with ttl being first.</li> * </ol> * * @param sizeLimitMB size limit in mega-bytes. * @return the instance of the current object. * @see #setWalSizeLimitMB(long) */ T setWalSizeLimitMB(long sizeLimitMB); /** * {@link #walTtlSeconds()} and {@code #walSizeLimitMB()} affect how archived logs * will be deleted. * <ol> * <li>If both set to 0, logs will be deleted asap and will not get into * the archive.</li> * <li>If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, * WAL files will be checked every 10 min and if total size is greater * then WAL_size_limit_MB, they will be deleted starting with the * earliest until size_limit is met. All empty files will be deleted.</li> * <li>If WAL_ttl_seconds is not 0 and WAL_size_limit_MB is 0, then * WAL files will be checked every WAL_ttl_seconds i / 2 and those that * are older than WAL_ttl_seconds will be deleted.</li> * <li>If both are not 0, WAL files will be checked every 10 min and both * checks will be performed with ttl being first.</li> * </ol> * @return size limit in mega-bytes. * @see #walSizeLimitMB() */ long walSizeLimitMB(); /** * The maximum limit of number of bytes that are written in a single batch * of WAL or memtable write. It is followed when the leader write size * is larger than 1/8 of this limit. * * Default: 1 MB * * @param maxWriteBatchGroupSizeBytes the maximum limit of number of bytes, see description. * @return the instance of the current object. */ T setMaxWriteBatchGroupSizeBytes(final long maxWriteBatchGroupSizeBytes); /** * The maximum limit of number of bytes that are written in a single batch * of WAL or memtable write. It is followed when the leader write size * is larger than 1/8 of this limit. * * Default: 1 MB * * @return the maximum limit of number of bytes, see description. */ long maxWriteBatchGroupSizeBytes(); /** * Number of bytes to preallocate (via fallocate) the manifest * files. Default is 4mb, which is reasonable to reduce random IO * as well as prevent overallocation for mounts that preallocate * large amounts of data (such as xfs's allocsize option). * * @param size the size in byte * @return the instance of the current object. * @throws java.lang.IllegalArgumentException thrown on 32-Bit platforms * while overflowing the underlying platform specific value. */ T setManifestPreallocationSize(long size); /** * Number of bytes to preallocate (via fallocate) the manifest * files. Default is 4mb, which is reasonable to reduce random IO * as well as prevent overallocation for mounts that preallocate * large amounts of data (such as xfs's allocsize option). * * @return size in bytes. */ long manifestPreallocationSize(); /** * Enable the OS to use direct I/O for reading sst tables. * Default: false * * @param useDirectReads if true, then direct read is enabled * @return the instance of the current object. */ T setUseDirectReads(boolean useDirectReads); /** * Enable the OS to use direct I/O for reading sst tables. * Default: false * * @return if true, then direct reads are enabled */ boolean useDirectReads(); /** * Enable the OS to use direct reads and writes in flush and * compaction * Default: false * * @param useDirectIoForFlushAndCompaction if true, then direct * I/O will be enabled for background flush and compactions * @return the instance of the current object. */ T setUseDirectIoForFlushAndCompaction(boolean useDirectIoForFlushAndCompaction); /** * Enable the OS to use direct reads and writes in flush and * compaction * * @return if true, then direct I/O is enabled for flush and * compaction */ boolean useDirectIoForFlushAndCompaction(); /** * Whether fallocate calls are allowed * * @param allowFAllocate false if fallocate() calls are bypassed * * @return the reference to the current options. */ T setAllowFAllocate(boolean allowFAllocate); /** * Whether fallocate calls are allowed * * @return false if fallocate() calls are bypassed */ boolean allowFAllocate(); /** * Allow the OS to mmap file for reading sst tables. * Default: false * * @param allowMmapReads true if mmap reads are allowed. * @return the instance of the current object. */ T setAllowMmapReads(boolean allowMmapReads); /** * Allow the OS to mmap file for reading sst tables. * Default: false * * @return true if mmap reads are allowed. */ boolean allowMmapReads(); /** * Allow the OS to mmap file for writing. Default: false * * @param allowMmapWrites true if mmap writes are allowd. * @return the instance of the current object. */ T setAllowMmapWrites(boolean allowMmapWrites); /** * Allow the OS to mmap file for writing. Default: false * * @return true if mmap writes are allowed. */ boolean allowMmapWrites(); /** * Disable child process inherit open files. Default: true * * @param isFdCloseOnExec true if child process inheriting open * files is disabled. * @return the instance of the current object. */ T setIsFdCloseOnExec(boolean isFdCloseOnExec); /** * Disable child process inherit open files. Default: true * * @return true if child process inheriting open files is disabled. */ boolean isFdCloseOnExec(); /** * If set true, will hint the underlying file system that the file * access pattern is random, when a sst file is opened. * Default: true * * @param adviseRandomOnOpen true if hinting random access is on. * @return the instance of the current object. */ T setAdviseRandomOnOpen(boolean adviseRandomOnOpen); /** * If set true, will hint the underlying file system that the file * access pattern is random, when a sst file is opened. * Default: true * * @return true if hinting random access is on. */ boolean adviseRandomOnOpen(); /** * Amount of data to build up in memtables across all column * families before writing to disk. * * This is distinct from {@link ColumnFamilyOptions#writeBufferSize()}, * which enforces a limit for a single memtable. * * This feature is disabled by default. Specify a non-zero value * to enable it. * * Default: 0 (disabled) * * @param dbWriteBufferSize the size of the write buffer * * @return the reference to the current options. */ T setDbWriteBufferSize(long dbWriteBufferSize); /** * Use passed {@link WriteBufferManager} to control memory usage across * multiple column families and/or DB instances. * * Check <a href="https://github.com/facebook/rocksdb/wiki/Write-Buffer-Manager"> * https://github.com/facebook/rocksdb/wiki/Write-Buffer-Manager</a> * for more details on when to use it * * @param writeBufferManager The WriteBufferManager to use * @return the reference of the current options. */ T setWriteBufferManager(final WriteBufferManager writeBufferManager); /** * Reference to {@link WriteBufferManager} used by it. <br> * * Default: null (Disabled) * * @return a reference to WriteBufferManager */ WriteBufferManager writeBufferManager(); /** * Amount of data to build up in memtables across all column * families before writing to disk. * * This is distinct from {@link ColumnFamilyOptions#writeBufferSize()}, * which enforces a limit for a single memtable. * * This feature is disabled by default. Specify a non-zero value * to enable it. * * Default: 0 (disabled) * * @return the size of the write buffer */ long dbWriteBufferSize(); /** * Specify the file access pattern once a compaction is started. * It will be applied to all input files of a compaction. * * Default: {@link AccessHint#NORMAL} * * @param accessHint The access hint * * @return the reference to the current options. */ T setAccessHintOnCompactionStart(final AccessHint accessHint); /** * Specify the file access pattern once a compaction is started. * It will be applied to all input files of a compaction. * * Default: {@link AccessHint#NORMAL} * * @return The access hint */ AccessHint accessHintOnCompactionStart(); /** * If true, always create a new file descriptor and new table reader * for compaction inputs. Turn this parameter on may introduce extra * memory usage in the table reader, if it allocates extra memory * for indexes. This will allow file descriptor prefetch options * to be set for compaction input files and not to impact file * descriptors for the same file used by user queries. * Suggest to enable {@link BlockBasedTableConfig#cacheIndexAndFilterBlocks()} * for this mode if using block-based table. * * Default: false * * @param newTableReaderForCompactionInputs true if a new file descriptor and * table reader should be created for compaction inputs * * @return the reference to the current options. */ T setNewTableReaderForCompactionInputs( boolean newTableReaderForCompactionInputs); /** * If true, always create a new file descriptor and new table reader * for compaction inputs. Turn this parameter on may introduce extra * memory usage in the table reader, if it allocates extra memory * for indexes. This will allow file descriptor prefetch options * to be set for compaction input files and not to impact file * descriptors for the same file used by user queries. * Suggest to enable {@link BlockBasedTableConfig#cacheIndexAndFilterBlocks()} * for this mode if using block-based table. * * Default: false * * @return true if a new file descriptor and table reader are created for * compaction inputs */ boolean newTableReaderForCompactionInputs(); /** * This is a maximum buffer size that is used by WinMmapReadableFile in * unbuffered disk I/O mode. We need to maintain an aligned buffer for * reads. We allow the buffer to grow until the specified value and then * for bigger requests allocate one shot buffers. In unbuffered mode we * always bypass read-ahead buffer at ReadaheadRandomAccessFile * When read-ahead is required we then make use of * {@link MutableDBOptionsInterface#compactionReadaheadSize()} value and * always try to read ahead. * With read-ahead we always pre-allocate buffer to the size instead of * growing it up to a limit. * * This option is currently honored only on Windows * * Default: 1 Mb * * Special value: 0 - means do not maintain per instance buffer. Allocate * per request buffer and avoid locking. * * @param randomAccessMaxBufferSize the maximum size of the random access * buffer * * @return the reference to the current options. */ T setRandomAccessMaxBufferSize(long randomAccessMaxBufferSize); /** * This is a maximum buffer size that is used by WinMmapReadableFile in * unbuffered disk I/O mode. We need to maintain an aligned buffer for * reads. We allow the buffer to grow until the specified value and then * for bigger requests allocate one shot buffers. In unbuffered mode we * always bypass read-ahead buffer at ReadaheadRandomAccessFile * When read-ahead is required we then make use of * {@link MutableDBOptionsInterface#compactionReadaheadSize()} value and * always try to read ahead. With read-ahead we always pre-allocate buffer * to the size instead of growing it up to a limit. * * This option is currently honored only on Windows * * Default: 1 Mb * * Special value: 0 - means do not maintain per instance buffer. Allocate * per request buffer and avoid locking. * * @return the maximum size of the random access buffer */ long randomAccessMaxBufferSize(); /** * Use adaptive mutex, which spins in the user space before resorting * to kernel. This could reduce context switch when the mutex is not * heavily contended. However, if the mutex is hot, we could end up * wasting spin time. * Default: false * * @param useAdaptiveMutex true if adaptive mutex is used. * @return the instance of the current object. */ T setUseAdaptiveMutex(boolean useAdaptiveMutex); /** * Use adaptive mutex, which spins in the user space before resorting * to kernel. This could reduce context switch when the mutex is not * heavily contended. However, if the mutex is hot, we could end up * wasting spin time. * Default: false * * @return true if adaptive mutex is used. */ boolean useAdaptiveMutex(); /** * Sets the {@link EventListener}s whose callback functions * will be called when specific RocksDB event happens. * * Note: the RocksJava API currently only supports EventListeners implemented in Java. * It could be extended in future to also support adding/removing EventListeners implemented in * C++. * * @param listeners the listeners who should be notified on various events. * * @return the instance of the current object. */ T setListeners(final List<AbstractEventListener> listeners); /** * Sets the {@link EventListener}s whose callback functions * will be called when specific RocksDB event happens. * * Note: the RocksJava API currently only supports EventListeners implemented in Java. * It could be extended in future to also support adding/removing EventListeners implemented in * C++. * * @return the instance of the current object. */ List<AbstractEventListener> listeners(); /** * If true, then the status of the threads involved in this DB will * be tracked and available via GetThreadList() API. * * Default: false * * @param enableThreadTracking true to enable tracking * * @return the reference to the current options. */ T setEnableThreadTracking(boolean enableThreadTracking); /** * If true, then the status of the threads involved in this DB will * be tracked and available via GetThreadList() API. * * Default: false * * @return true if tracking is enabled */ boolean enableThreadTracking(); /** * By default, a single write thread queue is maintained. The thread gets * to the head of the queue becomes write batch group leader and responsible * for writing to WAL and memtable for the batch group. * * If {@link #enablePipelinedWrite()} is true, separate write thread queue is * maintained for WAL write and memtable write. A write thread first enter WAL * writer queue and then memtable writer queue. Pending thread on the WAL * writer queue thus only have to wait for previous writers to finish their * WAL writing but not the memtable writing. Enabling the feature may improve * write throughput and reduce latency of the prepare phase of two-phase * commit. * * Default: false * * @param enablePipelinedWrite true to enabled pipelined writes * * @return the reference to the current options. */ T setEnablePipelinedWrite(final boolean enablePipelinedWrite); /** * Returns true if pipelined writes are enabled. * See {@link #setEnablePipelinedWrite(boolean)}. * * @return true if pipelined writes are enabled, false otherwise. */ boolean enablePipelinedWrite(); /** * Setting {@link #unorderedWrite()} to true trades higher write throughput with * relaxing the immutability guarantee of snapshots. This violates the * repeatability one expects from ::Get from a snapshot, as well as * ::MultiGet and Iterator's consistent-point-in-time view property. * If the application cannot tolerate the relaxed guarantees, it can implement * its own mechanisms to work around that and yet benefit from the higher * throughput. Using TransactionDB with WRITE_PREPARED write policy and * {@link #twoWriteQueues()} true is one way to achieve immutable snapshots despite * unordered_write. * * By default, i.e., when it is false, rocksdb does not advance the sequence * number for new snapshots unless all the writes with lower sequence numbers * are already finished. This provides the immutability that we except from * snapshots. Moreover, since Iterator and MultiGet internally depend on * snapshots, the snapshot immutability results into Iterator and MultiGet * offering consistent-point-in-time view. If set to true, although * Read-Your-Own-Write property is still provided, the snapshot immutability * property is relaxed: the writes issued after the snapshot is obtained (with * larger sequence numbers) will be still not visible to the reads from that * snapshot, however, there still might be pending writes (with lower sequence * number) that will change the state visible to the snapshot after they are * landed to the memtable. * * @param unorderedWrite true to enabled unordered write * * @return the reference to the current options. */ T setUnorderedWrite(final boolean unorderedWrite); /** * Returns true if unordered write are enabled. * See {@link #setUnorderedWrite(boolean)}. * * @return true if unordered write are enabled, false otherwise. */ boolean unorderedWrite(); /** * If true, allow multi-writers to update mem tables in parallel. * Only some memtable factorys support concurrent writes; currently it * is implemented only for SkipListFactory. Concurrent memtable writes * are not compatible with inplace_update_support or filter_deletes. * It is strongly recommended to set * {@link #setEnableWriteThreadAdaptiveYield(boolean)} if you are going to use * this feature. * Default: true * * @param allowConcurrentMemtableWrite true to enable concurrent writes * for the memtable * * @return the reference to the current options. */ T setAllowConcurrentMemtableWrite(boolean allowConcurrentMemtableWrite); /** * If true, allow multi-writers to update mem tables in parallel. * Only some memtable factorys support concurrent writes; currently it * is implemented only for SkipListFactory. Concurrent memtable writes * are not compatible with inplace_update_support or filter_deletes. * It is strongly recommended to set * {@link #setEnableWriteThreadAdaptiveYield(boolean)} if you are going to use * this feature. * Default: true * * @return true if concurrent writes are enabled for the memtable */ boolean allowConcurrentMemtableWrite(); /** * If true, threads synchronizing with the write batch group leader will * wait for up to {@link #writeThreadMaxYieldUsec()} before blocking on a * mutex. This can substantially improve throughput for concurrent workloads, * regardless of whether {@link #allowConcurrentMemtableWrite()} is enabled. * Default: true * * @param enableWriteThreadAdaptiveYield true to enable adaptive yield for the * write threads * * @return the reference to the current options. */ T setEnableWriteThreadAdaptiveYield( boolean enableWriteThreadAdaptiveYield); /** * If true, threads synchronizing with the write batch group leader will * wait for up to {@link #writeThreadMaxYieldUsec()} before blocking on a * mutex. This can substantially improve throughput for concurrent workloads, * regardless of whether {@link #allowConcurrentMemtableWrite()} is enabled. * Default: true * * @return true if adaptive yield is enabled * for the writing threads */ boolean enableWriteThreadAdaptiveYield(); /** * The maximum number of microseconds that a write operation will use * a yielding spin loop to coordinate with other write threads before * blocking on a mutex. (Assuming {@link #writeThreadSlowYieldUsec()} is * set properly) increasing this value is likely to increase RocksDB * throughput at the expense of increased CPU usage. * Default: 100 * * @param writeThreadMaxYieldUsec maximum number of microseconds * * @return the reference to the current options. */ T setWriteThreadMaxYieldUsec(long writeThreadMaxYieldUsec); /** * The maximum number of microseconds that a write operation will use * a yielding spin loop to coordinate with other write threads before * blocking on a mutex. (Assuming {@link #writeThreadSlowYieldUsec()} is * set properly) increasing this value is likely to increase RocksDB * throughput at the expense of increased CPU usage. * Default: 100 * * @return the maximum number of microseconds */ long writeThreadMaxYieldUsec(); /** * The latency in microseconds after which a std::this_thread::yield * call (sched_yield on Linux) is considered to be a signal that * other processes or threads would like to use the current core. * Increasing this makes writer threads more likely to take CPU * by spinning, which will show up as an increase in the number of * involuntary context switches. * Default: 3 * * @param writeThreadSlowYieldUsec the latency in microseconds * * @return the reference to the current options. */ T setWriteThreadSlowYieldUsec(long writeThreadSlowYieldUsec); /** * The latency in microseconds after which a std::this_thread::yield * call (sched_yield on Linux) is considered to be a signal that * other processes or threads would like to use the current core. * Increasing this makes writer threads more likely to take CPU * by spinning, which will show up as an increase in the number of * involuntary context switches. * Default: 3 * * @return writeThreadSlowYieldUsec the latency in microseconds */ long writeThreadSlowYieldUsec(); /** * If true, then DB::Open() will not update the statistics used to optimize * compaction decision by loading table properties from many files. * Turning off this feature will improve DBOpen time especially in * disk environment. * * Default: false * * @param skipStatsUpdateOnDbOpen true if updating stats will be skipped * * @return the reference to the current options. */ T setSkipStatsUpdateOnDbOpen(boolean skipStatsUpdateOnDbOpen); /** * If true, then DB::Open() will not update the statistics used to optimize * compaction decision by loading table properties from many files. * Turning off this feature will improve DBOpen time especially in * disk environment. * * Default: false * * @return true if updating stats will be skipped */ boolean skipStatsUpdateOnDbOpen(); /** * If true, then {@link RocksDB#open(String)} will not fetch and check sizes of all sst files. * This may significantly speed up startup if there are many sst files, * especially when using non-default Env with expensive GetFileSize(). * We'll still check that all required sst files exist. * If {@code paranoid_checks} is false, this option is ignored, and sst files are * not checked at all. * * Default: false * * @param skipCheckingSstFileSizesOnDbOpen if true, then SST file sizes will not be checked * when calling {@link RocksDB#open(String)}. * @return the reference to the current options. */ T setSkipCheckingSstFileSizesOnDbOpen(final boolean skipCheckingSstFileSizesOnDbOpen); /** * If true, then {@link RocksDB#open(String)} will not fetch and check sizes of all sst files. * This may significantly speed up startup if there are many sst files, * especially when using non-default Env with expensive GetFileSize(). * We'll still check that all required sst files exist. * If {@code paranoid_checks} is false, this option is ignored, and sst files are * not checked at all. * * Default: false * * @return true, if file sizes will not be checked when calling {@link RocksDB#open(String)}. */ boolean skipCheckingSstFileSizesOnDbOpen(); /** * Recovery mode to control the consistency while replaying WAL * * Default: {@link WALRecoveryMode#PointInTimeRecovery} * * @param walRecoveryMode The WAL recover mode * * @return the reference to the current options. */ T setWalRecoveryMode(WALRecoveryMode walRecoveryMode); /** * Recovery mode to control the consistency while replaying WAL * * Default: {@link WALRecoveryMode#PointInTimeRecovery} * * @return The WAL recover mode */ WALRecoveryMode walRecoveryMode(); /** * if set to false then recovery will fail when a prepared * transaction is encountered in the WAL * * Default: false * * @param allow2pc true if two-phase-commit is enabled * * @return the reference to the current options. */ T setAllow2pc(boolean allow2pc); /** * if set to false then recovery will fail when a prepared * transaction is encountered in the WAL * * Default: false * * @return true if two-phase-commit is enabled */ boolean allow2pc(); /** * A global cache for table-level rows. * * Default: null (disabled) * * @param rowCache The global row cache * * @return the reference to the current options. */ T setRowCache(final Cache rowCache); /** * A global cache for table-level rows. * * Default: null (disabled) * * @return The global row cache */ Cache rowCache(); /** * A filter object supplied to be invoked while processing write-ahead-logs * (WALs) during recovery. The filter provides a way to inspect log * records, ignoring a particular record or skipping replay. * The filter is invoked at startup and is invoked from a single-thread * currently. * * @param walFilter the filter for processing WALs during recovery. * * @return the reference to the current options. */ T setWalFilter(final AbstractWalFilter walFilter); /** * Get's the filter for processing WALs during recovery. * See {@link #setWalFilter(AbstractWalFilter)}. * * @return the filter used for processing WALs during recovery. */ WalFilter walFilter(); /** * If true, then DB::Open / CreateColumnFamily / DropColumnFamily * / SetOptions will fail if options file is not detected or properly * persisted. * * DEFAULT: false * * @param failIfOptionsFileError true if we should fail if there is an error * in the options file * * @return the reference to the current options. */ T setFailIfOptionsFileError(boolean failIfOptionsFileError); /** * If true, then DB::Open / CreateColumnFamily / DropColumnFamily * / SetOptions will fail if options file is not detected or properly * persisted. * * DEFAULT: false * * @return true if we should fail if there is an error in the options file */ boolean failIfOptionsFileError(); /** * If true, then print malloc stats together with rocksdb.stats * when printing to LOG. * * DEFAULT: false * * @param dumpMallocStats true if malloc stats should be printed to LOG * * @return the reference to the current options. */ T setDumpMallocStats(boolean dumpMallocStats); /** * If true, then print malloc stats together with rocksdb.stats * when printing to LOG. * * DEFAULT: false * * @return true if malloc stats should be printed to LOG */ boolean dumpMallocStats(); /** * By default RocksDB replay WAL logs and flush them on DB open, which may * create very small SST files. If this option is enabled, RocksDB will try * to avoid (but not guarantee not to) flush during recovery. Also, existing * WAL logs will be kept, so that if crash happened before flush, we still * have logs to recover from. * * DEFAULT: false * * @param avoidFlushDuringRecovery true to try to avoid (but not guarantee * not to) flush during recovery * * @return the reference to the current options. */ T setAvoidFlushDuringRecovery(boolean avoidFlushDuringRecovery); /** * By default RocksDB replay WAL logs and flush them on DB open, which may * create very small SST files. If this option is enabled, RocksDB will try * to avoid (but not guarantee not to) flush during recovery. Also, existing * WAL logs will be kept, so that if crash happened before flush, we still * have logs to recover from. * * DEFAULT: false * * @return true to try to avoid (but not guarantee not to) flush during * recovery */ boolean avoidFlushDuringRecovery(); /** * Set this option to true during creation of database if you want * to be able to ingest behind (call IngestExternalFile() skipping keys * that already exist, rather than overwriting matching keys). * Setting this option to true will affect 2 things: * 1) Disable some internal optimizations around SST file compression * 2) Reserve bottom-most level for ingested files only. * 3) Note that num_levels should be &gt;= 3 if this option is turned on. * * DEFAULT: false * * @param allowIngestBehind true to allow ingest behind, false to disallow. * * @return the reference to the current options. */ T setAllowIngestBehind(final boolean allowIngestBehind); /** * Returns true if ingest behind is allowed. * See {@link #setAllowIngestBehind(boolean)}. * * @return true if ingest behind is allowed, false otherwise. */ boolean allowIngestBehind(); /** * Needed to support differential snapshots. * If set to true then DB will only process deletes with sequence number * less than what was set by SetPreserveDeletesSequenceNumber(uint64_t ts). * Clients are responsible to periodically call this method to advance * the cutoff time. If this method is never called and preserve_deletes * is set to true NO deletes will ever be processed. * At the moment this only keeps normal deletes, SingleDeletes will * not be preserved. * * DEFAULT: false * * @param preserveDeletes true to preserve deletes. * * @return the reference to the current options. */ T setPreserveDeletes(final boolean preserveDeletes); /** * Returns true if deletes are preserved. * See {@link #setPreserveDeletes(boolean)}. * * @return true if deletes are preserved, false otherwise. */ boolean preserveDeletes(); /** * If enabled it uses two queues for writes, one for the ones with * disable_memtable and one for the ones that also write to memtable. This * allows the memtable writes not to lag behind other writes. It can be used * to optimize MySQL 2PC in which only the commits, which are serial, write to * memtable. * * DEFAULT: false * * @param twoWriteQueues true to enable two write queues, false otherwise. * * @return the reference to the current options. */ T setTwoWriteQueues(final boolean twoWriteQueues); /** * Returns true if two write queues are enabled. * * @return true if two write queues are enabled, false otherwise. */ boolean twoWriteQueues(); /** * If true WAL is not flushed automatically after each write. Instead it * relies on manual invocation of FlushWAL to write the WAL buffer to its * file. * * DEFAULT: false * * @param manualWalFlush true to set disable automatic WAL flushing, * false otherwise. * * @return the reference to the current options. */ T setManualWalFlush(final boolean manualWalFlush); /** * Returns true if automatic WAL flushing is disabled. * See {@link #setManualWalFlush(boolean)}. * * @return true if automatic WAL flushing is disabled, false otherwise. */ boolean manualWalFlush(); /** * If true, RocksDB supports flushing multiple column families and committing * their results atomically to MANIFEST. Note that it is not * necessary to set atomic_flush to true if WAL is always enabled since WAL * allows the database to be restored to the last persistent state in WAL. * This option is useful when there are column families with writes NOT * protected by WAL. * For manual flush, application has to specify which column families to * flush atomically in {@link RocksDB#flush(FlushOptions, List)}. * For auto-triggered flush, RocksDB atomically flushes ALL column families. * * Currently, any WAL-enabled writes after atomic flush may be replayed * independently if the process crashes later and tries to recover. * * @param atomicFlush true to enable atomic flush of multiple column families. * * @return the reference to the current options. */ T setAtomicFlush(final boolean atomicFlush); /** * Determine if atomic flush of multiple column families is enabled. * * See {@link #setAtomicFlush(boolean)}. * * @return true if atomic flush is enabled. */ boolean atomicFlush(); /** * If true, working thread may avoid doing unnecessary and long-latency * operation (such as deleting obsolete files directly or deleting memtable) * and will instead schedule a background job to do it. * Use it if you're latency-sensitive. * If set to true, takes precedence over * {@link ReadOptions#setBackgroundPurgeOnIteratorCleanup(boolean)}. * * @param avoidUnnecessaryBlockingIO If true, working thread may avoid doing unnecessary * operation. * @return the reference to the current options. */ T setAvoidUnnecessaryBlockingIO(final boolean avoidUnnecessaryBlockingIO); /** * If true, working thread may avoid doing unnecessary and long-latency * operation (such as deleting obsolete files directly or deleting memtable) * and will instead schedule a background job to do it. * Use it if you're latency-sensitive. * If set to true, takes precedence over * {@link ReadOptions#setBackgroundPurgeOnIteratorCleanup(boolean)}. * * @return true, if working thread may avoid doing unnecessary operation. */ boolean avoidUnnecessaryBlockingIO(); /** * If true, automatically persist stats to a hidden column family (column * family name: ___rocksdb_stats_history___) every * stats_persist_period_sec seconds; otherwise, write to an in-memory * struct. User can query through `GetStatsHistory` API. * If user attempts to create a column family with the same name on a DB * which have previously set persist_stats_to_disk to true, the column family * creation will fail, but the hidden column family will survive, as well as * the previously persisted statistics. * When peristing stats to disk, the stat name will be limited at 100 bytes. * Default: false * * @param persistStatsToDisk true if stats should be persisted to hidden column family. * @return the instance of the current object. */ T setPersistStatsToDisk(final boolean persistStatsToDisk); /** * If true, automatically persist stats to a hidden column family (column * family name: ___rocksdb_stats_history___) every * stats_persist_period_sec seconds; otherwise, write to an in-memory * struct. User can query through `GetStatsHistory` API. * If user attempts to create a column family with the same name on a DB * which have previously set persist_stats_to_disk to true, the column family * creation will fail, but the hidden column family will survive, as well as * the previously persisted statistics. * When peristing stats to disk, the stat name will be limited at 100 bytes. * Default: false * * @return true if stats should be persisted to hidden column family. */ boolean persistStatsToDisk(); /** * Historically DB ID has always been stored in Identity File in DB folder. * If this flag is true, the DB ID is written to Manifest file in addition * to the Identity file. By doing this 2 problems are solved * 1. We don't checksum the Identity file where as Manifest file is. * 2. Since the source of truth for DB is Manifest file DB ID will sit with * the source of truth. Previously the Identity file could be copied * independent of Manifest and that can result in wrong DB ID. * We recommend setting this flag to true. * Default: false * * @param writeDbidToManifest if true, then DB ID will be written to Manifest file. * @return the instance of the current object. */ T setWriteDbidToManifest(final boolean writeDbidToManifest); /** * Historically DB ID has always been stored in Identity File in DB folder. * If this flag is true, the DB ID is written to Manifest file in addition * to the Identity file. By doing this 2 problems are solved * 1. We don't checksum the Identity file where as Manifest file is. * 2. Since the source of truth for DB is Manifest file DB ID will sit with * the source of truth. Previously the Identity file could be copied * independent of Manifest and that can result in wrong DB ID. * We recommend setting this flag to true. * Default: false * * @return true, if DB ID will be written to Manifest file. */ boolean writeDbidToManifest(); /** * The number of bytes to prefetch when reading the log. This is mostly useful * for reading a remotely located log, as it can save the number of * round-trips. If 0, then the prefetching is disabled. * * Default: 0 * * @param logReadaheadSize the number of bytes to prefetch when reading the log. * @return the instance of the current object. */ T setLogReadaheadSize(final long logReadaheadSize); /** * The number of bytes to prefetch when reading the log. This is mostly useful * for reading a remotely located log, as it can save the number of * round-trips. If 0, then the prefetching is disabled. * * Default: 0 * * @return the number of bytes to prefetch when reading the log. */ long logReadaheadSize(); /** * By default, RocksDB recovery fails if any table file referenced in * MANIFEST are missing after scanning the MANIFEST. * Best-efforts recovery is another recovery mode that * tries to restore the database to the most recent point in time without * missing file. * Currently not compatible with atomic flush. Furthermore, WAL files will * not be used for recovery if best_efforts_recovery is true. * Default: false * * @param bestEffortsRecovery if true, RocksDB will use best-efforts mode when recovering. * @return the instance of the current object. */ T setBestEffortsRecovery(final boolean bestEffortsRecovery); /** * By default, RocksDB recovery fails if any table file referenced in * MANIFEST are missing after scanning the MANIFEST. * Best-efforts recovery is another recovery mode that * tries to restore the database to the most recent point in time without * missing file. * Currently not compatible with atomic flush. Furthermore, WAL files will * not be used for recovery if best_efforts_recovery is true. * Default: false * * @return true, if RocksDB uses best-efforts mode when recovering. */ boolean bestEffortsRecovery(); /** * It defines how many times db resume is called by a separate thread when * background retryable IO Error happens. When background retryable IO * Error happens, SetBGError is called to deal with the error. If the error * can be auto-recovered (e.g., retryable IO Error during Flush or WAL write), * then db resume is called in background to recover from the error. If this * value is 0 or negative, db resume will not be called. * * Default: INT_MAX * * @param maxBgerrorResumeCount maximum number of times db resume should be called when IO Error * happens. * @return the instance of the current object. */ T setMaxBgErrorResumeCount(final int maxBgerrorResumeCount); /** * It defines how many times db resume is called by a separate thread when * background retryable IO Error happens. When background retryable IO * Error happens, SetBGError is called to deal with the error. If the error * can be auto-recovered (e.g., retryable IO Error during Flush or WAL write), * then db resume is called in background to recover from the error. If this * value is 0 or negative, db resume will not be called. * * Default: INT_MAX * * @return maximum number of times db resume should be called when IO Error happens. */ int maxBgerrorResumeCount(); /** * If max_bgerror_resume_count is &ge; 2, db resume is called multiple times. * This option decides how long to wait to retry the next resume if the * previous resume fails and satisfy redo resume conditions. * * Default: 1000000 (microseconds). * * @param bgerrorResumeRetryInterval how many microseconds to wait between DB resume attempts. * @return the instance of the current object. */ T setBgerrorResumeRetryInterval(final long bgerrorResumeRetryInterval); /** * If max_bgerror_resume_count is &ge; 2, db resume is called multiple times. * This option decides how long to wait to retry the next resume if the * previous resume fails and satisfy redo resume conditions. * * Default: 1000000 (microseconds). * * @return the instance of the current object. */ long bgerrorResumeRetryInterval(); }
facebook/rocksdb
java/src/main/java/org/rocksdb/DBOptionsInterface.java
953
package mindustry.net; import arc.*; import arc.files.*; import arc.func.*; import arc.struct.*; import arc.util.*; import arc.util.io.*; import mindustry.*; import mindustry.core.*; import mindustry.mod.Mods.*; import java.io.*; import java.text.*; import java.util.*; import static arc.Core.*; import static mindustry.Vars.*; public class CrashSender{ public static String createReport(String error){ String report = "Mindustry has crashed. How unfortunate.\n"; if(mods != null && mods.list().size == 0 && Version.build != -1){ report += "Report this at " + Vars.reportIssueURL + "\n\n"; } return report + "Version: " + Version.combined() + (Vars.headless ? " (Server)" : "") + "\n" + "OS: " + OS.osName + " x" + (OS.osArchBits) + " (" + OS.osArch + ")\n" + ((OS.isAndroid || OS.isIos) && app != null ? "Android API level: " + Core.app.getVersion() + "\n" : "") + "Java Version: " + OS.javaVersion + "\n" + "Runtime Available Memory: " + (Runtime.getRuntime().maxMemory() / 1024 / 1024) + "mb\n" + "Cores: " + Runtime.getRuntime().availableProcessors() + "\n" + (mods == null ? "<no mod init>" : "Mods: " + (!mods.list().contains(LoadedMod::shouldBeEnabled) ? "none (vanilla)" : mods.list().select(LoadedMod::shouldBeEnabled).toString(", ", mod -> mod.name + ":" + mod.meta.version))) + "\n\n" + error; } public static void log(Throwable exception){ try{ Core.settings.getDataDirectory().child("crashes").child("crash_" + System.currentTimeMillis() + ".txt") .writeString(createReport(Strings.neatError(exception))); }catch(Throwable ignored){ } } public static void send(Throwable exception, Cons<File> writeListener){ try{ try{ //log to file Log.err(exception); }catch(Throwable no){ exception.printStackTrace(); } //try saving game data try{ settings.manualSave(); }catch(Throwable ignored){} //don't create crash logs for custom builds, as it's expected if(OS.username.equals("anuke") && !"steam".equals(Version.modifier)){ ret(); } //attempt to load version regardless if(Version.number == 0){ try{ ObjectMap<String, String> map = new ObjectMap<>(); PropertiesUtils.load(map, new InputStreamReader(CrashSender.class.getResourceAsStream("/version.properties"))); Version.type = map.get("type"); Version.number = Integer.parseInt(map.get("number")); Version.modifier = map.get("modifier"); if(map.get("build").contains(".")){ String[] split = map.get("build").split("\\."); Version.build = Integer.parseInt(split[0]); Version.revision = Integer.parseInt(split[1]); }else{ Version.build = Strings.canParseInt(map.get("build")) ? Integer.parseInt(map.get("build")) : -1; } }catch(Throwable e){ e.printStackTrace(); Log.err("Failed to parse version."); } } try{ File file = new File(OS.getAppDataDirectoryString(Vars.appName), "crashes/crash-report-" + new SimpleDateFormat("MM_dd_yyyy_HH_mm_ss").format(new Date()) + ".txt"); new Fi(OS.getAppDataDirectoryString(Vars.appName)).child("crashes").mkdirs(); new Fi(file).writeString(createReport(writeException(exception))); writeListener.get(file); }catch(Throwable e){ Log.err("Failed to save local crash report.", e); } //attempt to close connections, if applicable try{ net.dispose(); }catch(Throwable ignored){ } }catch(Throwable death){ death.printStackTrace(); } ret(); } private static void ret(){ System.exit(1); } private static String writeException(Throwable e){ StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); return sw.toString(); } }
Anuken/Mindustry
core/src/mindustry/net/CrashSender.java
954
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel; import io.netty.buffer.ByteBuf; import io.netty.util.concurrent.DefaultEventExecutorGroup; import io.netty.util.concurrent.EventExecutorGroup; import io.netty.util.concurrent.UnorderedThreadPoolEventExecutor; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; /** * A list of {@link ChannelHandler}s which handles or intercepts inbound events and outbound operations of a * {@link Channel}. {@link ChannelPipeline} implements an advanced form of the * <a href="https://www.oracle.com/technetwork/java/interceptingfilter-142169.html">Intercepting Filter</a> pattern * to give a user full control over how an event is handled and how the {@link ChannelHandler}s in a pipeline * interact with each other. * * <h3>Creation of a pipeline</h3> * * Each channel has its own pipeline and it is created automatically when a new channel is created. * * <h3>How an event flows in a pipeline</h3> * * The following diagram describes how I/O events are processed by {@link ChannelHandler}s in a {@link ChannelPipeline} * typically. An I/O event is handled by either a {@link ChannelInboundHandler} or a {@link ChannelOutboundHandler} * and be forwarded to its closest handler by calling the event propagation methods defined in * {@link ChannelHandlerContext}, such as {@link ChannelHandlerContext#fireChannelRead(Object)} and * {@link ChannelHandlerContext#write(Object)}. * * <pre> * I/O Request * via {@link Channel} or * {@link ChannelHandlerContext} * | * +---------------------------------------------------+---------------+ * | ChannelPipeline | | * | \|/ | * | +---------------------+ +-----------+----------+ | * | | Inbound Handler N | | Outbound Handler 1 | | * | +----------+----------+ +-----------+----------+ | * | /|\ | | * | | \|/ | * | +----------+----------+ +-----------+----------+ | * | | Inbound Handler N-1 | | Outbound Handler 2 | | * | +----------+----------+ +-----------+----------+ | * | /|\ . | * | . . | * | ChannelHandlerContext.fireIN_EVT() ChannelHandlerContext.OUT_EVT()| * | [ method call] [method call] | * | . . | * | . \|/ | * | +----------+----------+ +-----------+----------+ | * | | Inbound Handler 2 | | Outbound Handler M-1 | | * | +----------+----------+ +-----------+----------+ | * | /|\ | | * | | \|/ | * | +----------+----------+ +-----------+----------+ | * | | Inbound Handler 1 | | Outbound Handler M | | * | +----------+----------+ +-----------+----------+ | * | /|\ | | * +---------------+-----------------------------------+---------------+ * | \|/ * +---------------+-----------------------------------+---------------+ * | | | | * | [ Socket.read() ] [ Socket.write() ] | * | | * | Netty Internal I/O Threads (Transport Implementation) | * +-------------------------------------------------------------------+ * </pre> * An inbound event is handled by the inbound handlers in the bottom-up direction as shown on the left side of the * diagram. An inbound handler usually handles the inbound data generated by the I/O thread on the bottom of the * diagram. The inbound data is often read from a remote peer via the actual input operation such as * {@link SocketChannel#read(ByteBuffer)}. If an inbound event goes beyond the top inbound handler, it is discarded * silently, or logged if it needs your attention. * <p> * An outbound event is handled by the outbound handler in the top-down direction as shown on the right side of the * diagram. An outbound handler usually generates or transforms the outbound traffic such as write requests. * If an outbound event goes beyond the bottom outbound handler, it is handled by an I/O thread associated with the * {@link Channel}. The I/O thread often performs the actual output operation such as * {@link SocketChannel#write(ByteBuffer)}. * <p> * For example, let us assume that we created the following pipeline: * <pre> * {@link ChannelPipeline} p = ...; * p.addLast("1", new InboundHandlerA()); * p.addLast("2", new InboundHandlerB()); * p.addLast("3", new OutboundHandlerA()); * p.addLast("4", new OutboundHandlerB()); * p.addLast("5", new InboundOutboundHandlerX()); * </pre> * In the example above, the class whose name starts with {@code Inbound} means it is an inbound handler. * The class whose name starts with {@code Outbound} means it is a outbound handler. * <p> * In the given example configuration, the handler evaluation order is 1, 2, 3, 4, 5 when an event goes inbound. * When an event goes outbound, the order is 5, 4, 3, 2, 1. On top of this principle, {@link ChannelPipeline} skips * the evaluation of certain handlers to shorten the stack depth: * <ul> * <li>3 and 4 don't implement {@link ChannelInboundHandler}, and therefore the actual evaluation order of an inbound * event will be: 1, 2, and 5.</li> * <li>1 and 2 don't implement {@link ChannelOutboundHandler}, and therefore the actual evaluation order of a * outbound event will be: 5, 4, and 3.</li> * <li>If 5 implements both {@link ChannelInboundHandler} and {@link ChannelOutboundHandler}, the evaluation order of * an inbound and a outbound event could be 125 and 543 respectively.</li> * </ul> * * <h3>Forwarding an event to the next handler</h3> * * As you might noticed in the diagram shows, a handler has to invoke the event propagation methods in * {@link ChannelHandlerContext} to forward an event to its next handler. Those methods include: * <ul> * <li>Inbound event propagation methods: * <ul> * <li>{@link ChannelHandlerContext#fireChannelRegistered()}</li> * <li>{@link ChannelHandlerContext#fireChannelActive()}</li> * <li>{@link ChannelHandlerContext#fireChannelRead(Object)}</li> * <li>{@link ChannelHandlerContext#fireChannelReadComplete()}</li> * <li>{@link ChannelHandlerContext#fireExceptionCaught(Throwable)}</li> * <li>{@link ChannelHandlerContext#fireUserEventTriggered(Object)}</li> * <li>{@link ChannelHandlerContext#fireChannelWritabilityChanged()}</li> * <li>{@link ChannelHandlerContext#fireChannelInactive()}</li> * <li>{@link ChannelHandlerContext#fireChannelUnregistered()}</li> * </ul> * </li> * <li>Outbound event propagation methods: * <ul> * <li>{@link ChannelHandlerContext#bind(SocketAddress, ChannelPromise)}</li> * <li>{@link ChannelHandlerContext#connect(SocketAddress, SocketAddress, ChannelPromise)}</li> * <li>{@link ChannelHandlerContext#write(Object, ChannelPromise)}</li> * <li>{@link ChannelHandlerContext#flush()}</li> * <li>{@link ChannelHandlerContext#read()}</li> * <li>{@link ChannelHandlerContext#disconnect(ChannelPromise)}</li> * <li>{@link ChannelHandlerContext#close(ChannelPromise)}</li> * <li>{@link ChannelHandlerContext#deregister(ChannelPromise)}</li> * </ul> * </li> * </ul> * * and the following example shows how the event propagation is usually done: * * <pre> * public class MyInboundHandler extends {@link ChannelInboundHandlerAdapter} { * {@code @Override} * public void channelActive({@link ChannelHandlerContext} ctx) { * System.out.println("Connected!"); * ctx.fireChannelActive(); * } * } * * public class MyOutboundHandler extends {@link ChannelOutboundHandlerAdapter} { * {@code @Override} * public void close({@link ChannelHandlerContext} ctx, {@link ChannelPromise} promise) { * System.out.println("Closing .."); * ctx.close(promise); * } * } * </pre> * * <h3>Building a pipeline</h3> * <p> * A user is supposed to have one or more {@link ChannelHandler}s in a pipeline to receive I/O events (e.g. read) and * to request I/O operations (e.g. write and close). For example, a typical server will have the following handlers * in each channel's pipeline, but your mileage may vary depending on the complexity and characteristics of the * protocol and business logic: * * <ol> * <li>Protocol Decoder - translates binary data (e.g. {@link ByteBuf}) into a Java object.</li> * <li>Protocol Encoder - translates a Java object into binary data.</li> * <li>Business Logic Handler - performs the actual business logic (e.g. database access).</li> * </ol> * * and it could be represented as shown in the following example: * * <pre> * static final {@link EventExecutorGroup} group = new {@link DefaultEventExecutorGroup}(16); * ... * * {@link ChannelPipeline} pipeline = ch.pipeline(); * * pipeline.addLast("decoder", new MyProtocolDecoder()); * pipeline.addLast("encoder", new MyProtocolEncoder()); * * // Tell the pipeline to run MyBusinessLogicHandler's event handler methods * // in a different thread than an I/O thread so that the I/O thread is not blocked by * // a time-consuming task. * // If your business logic is fully asynchronous or finished very quickly, you don't * // need to specify a group. * pipeline.addLast(group, "handler", new MyBusinessLogicHandler()); * </pre> * * Be aware that while using {@link DefaultEventLoopGroup} will offload the operation from the {@link EventLoop} it will * still process tasks in a serial fashion per {@link ChannelHandlerContext} and so guarantee ordering. Due the ordering * it may still become a bottle-neck. If ordering is not a requirement for your use-case you may want to consider using * {@link UnorderedThreadPoolEventExecutor} to maximize the parallelism of the task execution. * * <h3>Thread safety</h3> * <p> * A {@link ChannelHandler} can be added or removed at any time because a {@link ChannelPipeline} is thread safe. * For example, you can insert an encryption handler when sensitive information is about to be exchanged, and remove it * after the exchange. */ public interface ChannelPipeline extends ChannelInboundInvoker, ChannelOutboundInvoker, Iterable<Entry<String, ChannelHandler>> { /** * Inserts a {@link ChannelHandler} at the first position of this pipeline. * * @param name the name of the handler to insert first * @param handler the handler to insert first * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addFirst(String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} at the first position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param name the name of the handler to insert first * @param handler the handler to insert first * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addFirst(EventExecutorGroup group, String name, ChannelHandler handler); /** * Appends a {@link ChannelHandler} at the last position of this pipeline. * * @param name the name of the handler to append * @param handler the handler to append * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addLast(String name, ChannelHandler handler); /** * Appends a {@link ChannelHandler} at the last position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param name the name of the handler to append * @param handler the handler to append * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addLast(EventExecutorGroup group, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} before an existing handler of this * pipeline. * * @param baseName the name of the existing handler * @param name the name of the handler to insert before * @param handler the handler to insert before * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addBefore(String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} before an existing handler of this * pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param baseName the name of the existing handler * @param name the name of the handler to insert before * @param handler the handler to insert before * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addBefore(EventExecutorGroup group, String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} after an existing handler of this * pipeline. * * @param baseName the name of the existing handler * @param name the name of the handler to insert after * @param handler the handler to insert after * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addAfter(String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} after an existing handler of this * pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param baseName the name of the existing handler * @param name the name of the handler to insert after * @param handler the handler to insert after * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addAfter(EventExecutorGroup group, String baseName, String name, ChannelHandler handler); /** * Inserts {@link ChannelHandler}s at the first position of this pipeline. * * @param handlers the handlers to insert first * */ ChannelPipeline addFirst(ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the first position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler}s * methods. * @param handlers the handlers to insert first * */ ChannelPipeline addFirst(EventExecutorGroup group, ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the last position of this pipeline. * * @param handlers the handlers to insert last * */ ChannelPipeline addLast(ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the last position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler}s * methods. * @param handlers the handlers to insert last * */ ChannelPipeline addLast(EventExecutorGroup group, ChannelHandler... handlers); /** * Removes the specified {@link ChannelHandler} from this pipeline. * * @param handler the {@link ChannelHandler} to remove * * @throws NoSuchElementException * if there's no such handler in this pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline remove(ChannelHandler handler); /** * Removes the {@link ChannelHandler} with the specified name from this pipeline. * * @param name the name under which the {@link ChannelHandler} was stored. * * @return the removed handler * * @throws NoSuchElementException * if there's no such handler with the specified name in this pipeline * @throws NullPointerException * if the specified name is {@code null} */ ChannelHandler remove(String name); /** * Removes the {@link ChannelHandler} of the specified type from this pipeline. * * @param <T> the type of the handler * @param handlerType the type of the handler * * @return the removed handler * * @throws NoSuchElementException * if there's no such handler of the specified type in this pipeline * @throws NullPointerException * if the specified handler type is {@code null} */ <T extends ChannelHandler> T remove(Class<T> handlerType); /** * Removes the first {@link ChannelHandler} in this pipeline. * * @return the removed handler * * @throws NoSuchElementException * if this pipeline is empty */ ChannelHandler removeFirst(); /** * Removes the last {@link ChannelHandler} in this pipeline. * * @return the removed handler * * @throws NoSuchElementException * if this pipeline is empty */ ChannelHandler removeLast(); /** * Replaces the specified {@link ChannelHandler} with a new handler in this pipeline. * * @param oldHandler the {@link ChannelHandler} to be replaced * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return itself * @throws NoSuchElementException * if the specified old handler does not exist in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ ChannelPipeline replace(ChannelHandler oldHandler, String newName, ChannelHandler newHandler); /** * Replaces the {@link ChannelHandler} of the specified name with a new handler in this pipeline. * * @param oldName the name of the {@link ChannelHandler} to be replaced * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return the removed handler * * @throws NoSuchElementException * if the handler with the specified old name does not exist in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ ChannelHandler replace(String oldName, String newName, ChannelHandler newHandler); /** * Replaces the {@link ChannelHandler} of the specified type with a new handler in this pipeline. * * @param oldHandlerType the type of the handler to be removed * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return the removed handler * * @throws NoSuchElementException * if the handler of the specified old handler type does not exist * in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ <T extends ChannelHandler> T replace(Class<T> oldHandlerType, String newName, ChannelHandler newHandler); /** * Returns the first {@link ChannelHandler} in this pipeline. * * @return the first handler. {@code null} if this pipeline is empty. */ ChannelHandler first(); /** * Returns the context of the first {@link ChannelHandler} in this pipeline. * * @return the context of the first handler. {@code null} if this pipeline is empty. */ ChannelHandlerContext firstContext(); /** * Returns the last {@link ChannelHandler} in this pipeline. * * @return the last handler. {@code null} if this pipeline is empty. */ ChannelHandler last(); /** * Returns the context of the last {@link ChannelHandler} in this pipeline. * * @return the context of the last handler. {@code null} if this pipeline is empty. */ ChannelHandlerContext lastContext(); /** * Returns the {@link ChannelHandler} with the specified name in this * pipeline. * * @return the handler with the specified name. * {@code null} if there's no such handler in this pipeline. */ ChannelHandler get(String name); /** * Returns the {@link ChannelHandler} of the specified type in this * pipeline. * * @return the handler of the specified handler type. * {@code null} if there's no such handler in this pipeline. */ <T extends ChannelHandler> T get(Class<T> handlerType); /** * Returns the context object of the specified {@link ChannelHandler} in * this pipeline. * * @return the context object of the specified handler. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(ChannelHandler handler); /** * Returns the context object of the {@link ChannelHandler} with the * specified name in this pipeline. * * @return the context object of the handler with the specified name. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(String name); /** * Returns the context object of the {@link ChannelHandler} of the * specified type in this pipeline. * * @return the context object of the handler of the specified type. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(Class<? extends ChannelHandler> handlerType); /** * Returns the {@link Channel} that this pipeline is attached to. * * @return the channel. {@code null} if this pipeline is not attached yet. */ Channel channel(); /** * Returns the {@link List} of the handler names. */ List<String> names(); /** * Converts this pipeline into an ordered {@link Map} whose keys are * handler names and whose values are handlers. */ Map<String, ChannelHandler> toMap(); @Override ChannelPipeline fireChannelRegistered(); @Override ChannelPipeline fireChannelUnregistered(); @Override ChannelPipeline fireChannelActive(); @Override ChannelPipeline fireChannelInactive(); @Override ChannelPipeline fireExceptionCaught(Throwable cause); @Override ChannelPipeline fireUserEventTriggered(Object event); @Override ChannelPipeline fireChannelRead(Object msg); @Override ChannelPipeline fireChannelReadComplete(); @Override ChannelPipeline fireChannelWritabilityChanged(); @Override ChannelPipeline flush(); }
netty/netty
transport/src/main/java/io/netty/channel/ChannelPipeline.java
955
package mindustry.world; import arc.*; import arc.audio.*; import arc.func.*; import arc.graphics.*; import arc.graphics.g2d.*; import arc.graphics.g2d.TextureAtlas.*; import arc.math.*; import arc.math.geom.*; import arc.scene.ui.layout.*; import arc.struct.EnumSet; import arc.struct.*; import arc.util.*; import arc.util.pooling.*; import mindustry.annotations.Annotations.*; import mindustry.content.*; import mindustry.core.*; import mindustry.ctype.*; import mindustry.entities.*; import mindustry.entities.bullet.*; import mindustry.entities.units.*; import mindustry.game.*; import mindustry.gen.*; import mindustry.graphics.*; import mindustry.graphics.MultiPacker.*; import mindustry.logic.*; import mindustry.type.*; import mindustry.ui.*; import mindustry.world.blocks.*; import mindustry.world.blocks.environment.*; import mindustry.world.blocks.power.*; import mindustry.world.consumers.*; import mindustry.world.meta.*; import java.lang.reflect.*; import java.util.*; import static mindustry.Vars.*; public class Block extends UnlockableContent implements Senseable{ /** If true, buildings have an ItemModule. */ public boolean hasItems; /** If true, buildings have a LiquidModule. */ public boolean hasLiquids; /** If true, buildings have a PowerModule. */ public boolean hasPower; /** Flag for determining whether this block outputs liquid somewhere; used for connections. */ public boolean outputsLiquid = false; /** Used by certain power blocks (nodes) to flag as non-consuming of power. True by default, even if this block has no power. */ public boolean consumesPower = true; /** If true, this block is a generator that can produce power. */ public boolean outputsPower = false; /** If false, power nodes cannot connect to this block. */ public boolean connectedPower = true; /** If true, this block can conduct power like a cable. */ public boolean conductivePower = false; /** If true, this block can output payloads; affects blending. */ public boolean outputsPayload = false; /** If true, this block can input payloads; affects unit payload enter behavior. */ public boolean acceptsPayloads = false; /** If true, payloads will attempt to move into this block. */ public boolean acceptsPayload = false; /** Visual flag use for blending of certain transportation blocks. */ public boolean acceptsItems = false; /** If true, all item capacities of this block are separate instead of pooled as one number. */ public boolean separateItemCapacity = false; /** maximum items this block can carry (usually, this is per-type of item) */ public int itemCapacity = 10; /** maximum total liquids this block can carry if hasLiquids = true */ public float liquidCapacity = 10f; /** higher numbers increase liquid output speed; TODO remove and replace with better liquids system */ public float liquidPressure = 1f; /** If true, this block outputs to its facing direction, when applicable. * Used for blending calculations. */ public boolean outputFacing = true; /** if true, this block does not accept input from the sides (used for armored conveyors) */ public boolean noSideBlend = false; /** whether to display flow rate */ public boolean displayFlow = true; /** whether this block is visible in the editor */ public boolean inEditor = true; /** the last configuration value applied to this block. */ public @Nullable Object lastConfig; /** whether to save the last config and apply it to newly placed blocks */ public boolean saveConfig = false; /** whether to allow copying the config through middle click */ public boolean copyConfig = true; /** if true, double-tapping this configurable block clears configuration. */ public boolean clearOnDoubleTap = false; /** whether this block has a tile entity that updates */ public boolean update; /** whether this block has health and can be destroyed */ public boolean destructible; /** whether unloaders work on this block */ public boolean unloadable = true; /** if true, this block acts a duct and will connect to armored ducts from the side. */ public boolean isDuct = false; /** whether units can resupply by taking items from this block */ public boolean allowResupply = false; /** whether this is solid */ public boolean solid; /** whether this block CAN be solid. */ public boolean solidifes; /** if true, this counts as a non-solid block to this team. */ public boolean teamPassable; /** if true, this block cannot be hit by bullets unless explicitly targeted. */ public boolean underBullets; /** whether this is rotatable */ public boolean rotate; /** if rotate is true and this is false, the region won't rotate when drawing */ public boolean rotateDraw = true; /** if rotate = false and this is true, rotation will be locked at 0 when placing (default); advanced use only */ public boolean lockRotation = true; /** if true, schematic flips with this block are inverted. */ public boolean invertFlip = false; /** number of different variant regions to use */ public int variants = 0; /** whether to draw a rotation arrow - this does not apply to lines of blocks */ public boolean drawArrow = true; /** whether to draw the team corner by default */ public boolean drawTeamOverlay = true; /** for static blocks only: if true, tile data() is saved in world data. */ public boolean saveData; /** whether you can break this with rightclick */ public boolean breakable; /** whether to add this block to brokenblocks */ public boolean rebuildable = true; /** if true, this logic-related block can only be used with privileged processors (or is one itself) */ public boolean privileged = false; /** whether this block can only be placed on water */ public boolean requiresWater = false; /** whether this block can be placed on any liquids, anywhere */ public boolean placeableLiquid = false; /** whether this block can be placed directly by the player via PlacementFragment */ public boolean placeablePlayer = true; /** whether this floor can be placed on. */ public boolean placeableOn = true; /** whether this block has insulating properties. */ public boolean insulated = false; /** whether the sprite is a full square. */ public boolean squareSprite = true; /** whether this block absorbs laser attacks. */ public boolean absorbLasers = false; /** if false, the status is never drawn */ public boolean enableDrawStatus = true; /** whether to draw disabled status */ public boolean drawDisabled = true; /** whether to automatically reset enabled status after a logic block has not interacted for a while. */ public boolean autoResetEnabled = true; /** if true, the block stops updating when disabled */ public boolean noUpdateDisabled = false; /** if true, this block updates when it's a payload in a unit. */ public boolean updateInUnits = true; /** if true, this block updates in payloads in units regardless of the experimental game rule */ public boolean alwaysUpdateInUnits = false; /** if false, only incinerable liquids are dropped when deconstructing; otherwise, all liquids are dropped. */ public boolean deconstructDropAllLiquid = false; /** Whether to use this block's color in the minimap. Only used for overlays. */ public boolean useColor = true; /** item that drops from this block, used for drills */ public @Nullable Item itemDrop = null; /** if true, this block cannot be mined by players. useful for annoying things like sand. */ public boolean playerUnmineable = false; /** Array of affinities to certain things. */ public Attributes attributes = new Attributes(); /** Health per square tile that this block occupies; essentially, this is multiplied by size * size. Overridden if health is > 0. If <0, the default is 40. */ public float scaledHealth = -1; /** building health; -1 to use scaledHealth */ public int health = -1; /** damage absorption, similar to unit armor */ public float armor = 0f; /** base block explosiveness */ public float baseExplosiveness = 0f; /** bullet that this block spawns when destroyed */ public @Nullable BulletType destroyBullet = null; /** if true, destroyBullet is spawned on the block's team instead of Derelict team */ public boolean destroyBulletSameTeam = false; /** liquid used for lighting */ public @Nullable Liquid lightLiquid; /** whether cracks are drawn when this block is damaged */ public boolean drawCracks = true; /** whether rubble is created when this block is destroyed */ public boolean createRubble = true; /** whether this block can be placed on edges of liquids. */ public boolean floating = false; /** multiblock size */ public int size = 1; /** multiblock offset */ public float offset = 0f; /** offset for iteration (internal use only) */ public int sizeOffset = 0; /** Clipping size of this block. Should be as large as the block will draw. */ public float clipSize = -1f; /** When placeRangeCheck is enabled, this is the range checked for enemy blocks. */ public float placeOverlapRange = 50f; /** Multiplier of damage dealt to this block by tanks. Does not apply to crawlers. */ public float crushDamageMultiplier = 1f; /** Max of timers used. */ public int timers = 0; /** Cache layer. Only used for 'cached' rendering. */ public CacheLayer cacheLayer = CacheLayer.normal; /** Special flag; if false, floor will be drawn under this block even if it is cached. */ public boolean fillsTile = true; /** If true, this block can be covered by darkness / fog even if synthetic. */ public boolean forceDark = false; /** whether this block can be replaced in all cases */ public boolean alwaysReplace = false; /** if false, this block can never be replaced. */ public boolean replaceable = true; /** The block group. Unless {@link #canReplace} is overridden, blocks in the same group can replace each other. */ public BlockGroup group = BlockGroup.none; /** List of block flags. Used for AI indexing. */ public EnumSet<BlockFlag> flags = EnumSet.of(); /** Targeting priority of this block, as seen by enemies. */ public float priority = TargetPriority.base; /** How much this block affects the unit cap by. * The block flags must contain unitModifier in order for this to work. */ public int unitCapModifier = 0; /** Whether the block can be tapped and selected to configure. */ public boolean configurable; /** If true, this block does not have pointConfig with a transform called on map resize. */ public boolean ignoreResizeConfig; /** If true, this building can be selected like a unit when commanding. */ public boolean commandable; /** If true, the building inventory can be shown with the config. */ public boolean allowConfigInventory = true; /** Defines how large selection menus, such as that of sorters, should be. */ public int selectionRows = 5, selectionColumns = 4; /** If true, this block can be configured by logic. */ public boolean logicConfigurable = false; /** Whether this block consumes touchDown events when tapped. */ public boolean consumesTap; /** Whether to draw the glow of the liquid for this block, if it has one. */ public boolean drawLiquidLight = true; /** Environmental flags that are *all* required for this block to function. 0 = any environment */ public int envRequired = 0; /** The environment flags that this block can function in. If the env matches any of these, it will be enabled. */ public int envEnabled = Env.terrestrial; /** The environment flags that this block *cannot* function in. If the env matches any of these, it will be *disabled*. */ public int envDisabled = 0; /** Whether to periodically sync this block across the network. */ public boolean sync; /** Whether this block uses conveyor-type placement mode. */ public boolean conveyorPlacement; /** If false, diagonal placement (ctrl) for this block is not allowed. */ public boolean allowDiagonal = true; /** Whether to swap the diagonal placement modes. */ public boolean swapDiagonalPlacement; /** Whether to allow rectangular placement, as opposed to a line. */ public boolean allowRectanglePlacement = false; /** Build queue priority in schematics. */ public int schematicPriority = 0; /** * The color of this block when displayed on the minimap or map preview. * Do not set manually! This is overridden when loading for most blocks. */ public Color mapColor = new Color(0, 0, 0, 1); /** Whether this block has a minimap color. */ public boolean hasColor = false; /** Whether units target this block. */ public boolean targetable = true; /** If true, this block attacks and is considered a turret in the indexer. Building must implement Ranged. */ public boolean attacks = false; /** If true, this block is mending-related and can be suppressed with special units/missiles. */ public boolean suppressable = false; /** Whether the overdrive core has any effect on this block. */ public boolean canOverdrive = true; /** Outlined icon color.*/ public Color outlineColor = Color.valueOf("404049"); /** Whether any icon region has an outline added. */ public boolean outlineIcon = false; /** Outline icon radius. */ public int outlineRadius = 4; /** Which of the icon regions gets the outline added. Uses last icon if <= 0. */ public int outlinedIcon = -1; /** Whether this block has a shadow under it. */ public boolean hasShadow = true; /** If true, a custom shadow (name-shadow) is drawn under this block. */ public boolean customShadow = false; /** Should the sound made when this block is built change in pitch. */ public boolean placePitchChange = true; /** Should the sound made when this block is deconstructed change in pitch. */ public boolean breakPitchChange = true; /** Sound made when this block is built. */ public Sound placeSound = Sounds.place; /** Sound made when this block is deconstructed. */ public Sound breakSound = Sounds.breaks; /** Sounds made when this block is destroyed.*/ public Sound destroySound = Sounds.boom; /** How reflective this block is. */ public float albedo = 0f; /** Environmental passive light color. */ public Color lightColor = Color.white.cpy(); /** * Whether this environmental block passively emits light. * Does not change behavior for non-environmental blocks, but still updates clipSize. */ public boolean emitLight = false; /** Radius of the light emitted by this block. */ public float lightRadius = 60f; /** How much fog this block uncovers, in tiles. Cannot be dynamic. <= 0 to disable. */ public int fogRadius = -1; /** The sound that this block makes while active. One sound loop. Do not overuse. */ public Sound loopSound = Sounds.none; /** Active sound base volume. */ public float loopSoundVolume = 0.5f; /** The sound that this block makes while idle. Uses one sound loop for all blocks. */ public Sound ambientSound = Sounds.none; /** Idle sound base volume. */ public float ambientSoundVolume = 0.05f; /** Cost of constructing this block. */ public ItemStack[] requirements = {}; /** Category in place menu. */ public Category category = Category.distribution; /** Time to build this block in ticks; do not modify directly! */ public float buildCost = 20f; /** Whether this block is visible and can currently be built. */ public BuildVisibility buildVisibility = BuildVisibility.hidden; /** Multiplier for speed of building this block. */ public float buildCostMultiplier = 1f; /** Build completion at which deconstruction finishes. */ public float deconstructThreshold = 0f; /** If true, this block deconstructs immediately. Instant deconstruction implies no resource refund. */ public boolean instantDeconstruct = false; /** If true, this block constructs immediately. This implies no resource requirement, and ignores configs - do not use, this is for performance only! */ public boolean instantBuild = false; /** Effect for placing the block. Passes size as rotation. */ public Effect placeEffect = Fx.placeBlock; /** Effect for breaking the block. Passes size as rotation. */ public Effect breakEffect = Fx.breakBlock; /** Effect for destroying the block. */ public Effect destroyEffect = Fx.dynamicExplosion; /** Multiplier for cost of research in tech tree. */ public float researchCostMultiplier = 1; /** Cost multipliers per-item. */ public ObjectFloatMap<Item> researchCostMultipliers = new ObjectFloatMap<>(); /** Override for research cost. Uses multipliers above and building requirements if not set. */ public @Nullable ItemStack[] researchCost; /** Whether this block has instant transfer.*/ public boolean instantTransfer = false; /** Whether you can rotate this block after it is placed. */ public boolean quickRotate = true; /** If true, this derelict block can be repair by clicking it. */ public boolean allowDerelictRepair = true; /** Main subclass. Non-anonymous. */ public @Nullable Class<?> subclass; /** Scroll position for certain blocks. */ public float selectScroll; /** Building that is created for this block. Initialized in init() via reflection. Set manually if modded. */ public Prov<Building> buildType = null; /** Configuration handlers by type. */ public ObjectMap<Class<?>, Cons2> configurations = new ObjectMap<>(); /** Consumption filters. */ public boolean[] itemFilter = {}, liquidFilter = {}; /** Array of consumers used by this block. Only populated after init(). */ public Consume[] consumers = {}, optionalConsumers = {}, nonOptionalConsumers = {}, updateConsumers = {}; /** Set to true if this block has any consumers in its array. */ public boolean hasConsumers; /** The single power consumer, if applicable. */ public @Nullable ConsumePower consPower; /** Map of bars by name. */ protected OrderedMap<String, Func<Building, Bar>> barMap = new OrderedMap<>(); /** List for building up consumption before init(). */ protected Seq<Consume> consumeBuilder = new Seq<>(); protected TextureRegion[] generatedIcons; protected TextureRegion[] editorVariantRegions; /** Regions indexes from icons() that are rotated. If either of these is not -1, other regions won't be rotated in ConstructBlocks. */ public int regionRotated1 = -1, regionRotated2 = -1; public TextureRegion region, editorIcon; public @Load("@-shadow") TextureRegion customShadowRegion; public @Load("@-team") TextureRegion teamRegion; public TextureRegion[] teamRegions, variantRegions, variantShadowRegions; protected static final Seq<Tile> tempTiles = new Seq<>(); protected static final Seq<Building> tempBuilds = new Seq<>(); /** Dump timer ID.*/ protected final int timerDump = timers++; /** How often to try dumping items in ticks, e.g. 5 = 12 times/sec*/ public int dumpTime = 5; public Block(String name){ super(name); initBuilding(); selectionSize = 28f; } public void drawBase(Tile tile){ //delegates to entity unless it is null if(tile.build != null){ tile.build.draw(); }else{ Draw.rect( variants == 0 ? region : variantRegions[Mathf.randomSeed(tile.pos(), 0, Math.max(0, variantRegions.length - 1))], tile.drawx(), tile.drawy()); } } public void drawShadow(Tile tile){ Draw.color(0f, 0f, 0f, BlockRenderer.shadowColor.a); Draw.rect( variants == 0 ? customShadowRegion : variantShadowRegions[Mathf.randomSeed(tile.pos(), 0, Math.max(0, variantShadowRegions.length - 1))], tile.drawx(), tile.drawy()); Draw.color(); } public float percentSolid(int x, int y){ Tile tile = world.tile(x, y); if(tile == null) return 0; return tile.getLinkedTilesAs(this, tempTiles) .sumf(other -> !other.floor().isLiquid ? 1f : 0f) / size / size; } public void drawEnvironmentLight(Tile tile){ Drawf.light(tile.worldx(), tile.worldy(), lightRadius, lightColor, lightColor.a); } /** Drawn when you are placing a block. */ public void drawPlace(int x, int y, int rotation, boolean valid){ drawPotentialLinks(x, y); drawOverlay(x * tilesize + offset, y * tilesize + offset, rotation); } public void drawPotentialLinks(int x, int y){ if((consumesPower || outputsPower) && hasPower && connectedPower){ Tile tile = world.tile(x, y); if(tile != null){ PowerNode.getNodeLinks(tile, this, player.team(), other -> { PowerNode node = (PowerNode)other.block; Draw.color(node.laserColor1, Renderer.laserOpacity * 0.5f); node.drawLaser(x * tilesize + offset, y * tilesize + offset, other.x, other.y, size, other.block.size); Drawf.square(other.x, other.y, other.block.size * tilesize / 2f + 2f, Pal.place); }); } } } public float drawPlaceText(String text, int x, int y, boolean valid){ if(renderer.pixelate) return 0; Color color = valid ? Pal.accent : Pal.remove; Font font = Fonts.outline; GlyphLayout layout = Pools.obtain(GlyphLayout.class, GlyphLayout::new); boolean ints = font.usesIntegerPositions(); font.setUseIntegerPositions(false); font.getData().setScale(1f / 4f / Scl.scl(1f)); layout.setText(font, text); float width = layout.width; font.setColor(color); float dx = x * tilesize + offset, dy = y * tilesize + offset + size * tilesize / 2f + 3; font.draw(text, dx, dy + layout.height + 1, Align.center); dy -= 1f; Lines.stroke(2f, Color.darkGray); Lines.line(dx - layout.width / 2f - 2f, dy, dx + layout.width / 2f + 1.5f, dy); Lines.stroke(1f, color); Lines.line(dx - layout.width / 2f - 2f, dy, dx + layout.width / 2f + 1.5f, dy); font.setUseIntegerPositions(ints); font.setColor(Color.white); font.getData().setScale(1f); Draw.reset(); Pools.free(layout); return width; } /** Drawn when placing and when hovering over. */ public void drawOverlay(float x, float y, int rotation){ } public float sumAttribute(@Nullable Attribute attr, int x, int y){ if(attr == null) return 0; Tile tile = world.tile(x, y); if(tile == null) return 0; return tile.getLinkedTilesAs(this, tempTiles) .sumf(other -> !floating && other.floor().isDeep() ? 0 : other.floor().attributes.get(attr)); } public TextureRegion getDisplayIcon(Tile tile){ return tile.build == null ? uiIcon : tile.build.getDisplayIcon(); } public String getDisplayName(Tile tile){ return tile.build == null ? localizedName : tile.build.getDisplayName(); } /** @return a custom minimap color for this or 0 to use default colors. */ public int minimapColor(Tile tile){ return 0; } public boolean outputsItems(){ return hasItems; } /** @return whether this block can be placed on the specified tile. */ public boolean canPlaceOn(Tile tile, Team team, int rotation){ return true; } /** @return whether this block can be broken on the specified tile. */ public boolean canBreak(Tile tile){ return true; } public boolean rotatedOutput(int x, int y){ return rotate; } public boolean synthetic(){ return update || destructible; } public boolean checkForceDark(Tile tile){ return forceDark; } @Override public void setStats(){ super.setStats(); stats.add(Stat.size, "@x@", size, size); if(synthetic()){ stats.add(Stat.health, health, StatUnit.none); if(armor > 0){ stats.add(Stat.armor, armor, StatUnit.none); } } if(canBeBuilt() && requirements.length > 0){ stats.add(Stat.buildTime, buildCost / 60, StatUnit.seconds); stats.add(Stat.buildCost, StatValues.items(false, requirements)); } if(instantTransfer){ stats.add(Stat.maxConsecutive, 2, StatUnit.none); } for(var c : consumers){ c.display(stats); } //Note: Power stats are added by the consumers. if(hasLiquids) stats.add(Stat.liquidCapacity, liquidCapacity, StatUnit.liquidUnits); if(hasItems && itemCapacity > 0) stats.add(Stat.itemCapacity, itemCapacity, StatUnit.items); } public <T extends Building> void addBar(String name, Func<T, Bar> sup){ barMap.put(name, (Func<Building, Bar>)sup); } public void removeBar(String name){ barMap.remove(name); } public Iterable<Func<Building, Bar>> listBars(){ return barMap.values(); } public void addLiquidBar(Liquid liq){ addBar("liquid-" + liq.name, entity -> !liq.unlockedNow() ? null : new Bar( () -> liq.localizedName, liq::barColor, () -> entity.liquids.get(liq) / liquidCapacity )); } /** Adds a liquid bar that dynamically displays a liquid type. */ public <T extends Building> void addLiquidBar(Func<T, Liquid> current){ addBar("liquid", entity -> new Bar( () -> current.get((T)entity) == null || entity.liquids.get(current.get((T)entity)) <= 0.001f ? Core.bundle.get("bar.liquid") : current.get((T)entity).localizedName, () -> current.get((T)entity) == null ? Color.clear : current.get((T)entity).barColor(), () -> current.get((T)entity) == null ? 0f : entity.liquids.get(current.get((T)entity)) / liquidCapacity) ); } public void setBars(){ addBar("health", entity -> new Bar("stat.health", Pal.health, entity::healthf).blink(Color.white)); if(consPower != null){ boolean buffered = consPower.buffered; float capacity = consPower.capacity; addBar("power", entity -> new Bar( () -> buffered ? Core.bundle.format("bar.poweramount", Float.isNaN(entity.power.status * capacity) ? "<ERROR>" : UI.formatAmount((int)(entity.power.status * capacity))) : Core.bundle.get("bar.power"), () -> Pal.powerBar, () -> Mathf.zero(consPower.requestedPower(entity)) && entity.power.graph.getPowerProduced() + entity.power.graph.getBatteryStored() > 0f ? 1f : entity.power.status) ); } if(hasItems && configurable){ addBar("items", entity -> new Bar( () -> Core.bundle.format("bar.items", entity.items.total()), () -> Pal.items, () -> (float)entity.items.total() / itemCapacity) ); } if(unitCapModifier != 0){ stats.add(Stat.maxUnits, (unitCapModifier < 0 ? "-" : "+") + Math.abs(unitCapModifier)); } //liquids added last if(hasLiquids){ //TODO liquids need to be handled VERY carefully. there are several potential possibilities: //1. no consumption or output (conduit/tank) // - display current(), 1 bar //2. static set of inputs and outputs // - create bars for each input/output, straightforward //3. TODO dynamic input/output combo??? // - confusion boolean added = false; //TODO handle in consumer //add bars for *specific* consumed liquids for(var consl : consumers){ if(consl instanceof ConsumeLiquid liq){ added = true; addLiquidBar(liq.liquid); }else if(consl instanceof ConsumeLiquids multi){ added = true; for(var stack : multi.liquids){ addLiquidBar(stack.liquid); } } } //nothing was added, so it's safe to add a dynamic liquid bar (probably?) if(!added){ addLiquidBar(build -> build.liquids.current()); } } } public boolean consumesItem(Item item){ return itemFilter[item.id]; } public boolean consumesLiquid(Liquid liq){ return liquidFilter[liq.id]; } public boolean canReplace(Block other){ if(other.alwaysReplace) return true; if(other.privileged) return false; return other.replaceable && (other != this || (rotate && quickRotate)) && ((this.group != BlockGroup.none && other.group == this.group) || other == this) && (size == other.size || (size >= other.size && ((subclass != null && subclass == other.subclass) || group.anyReplace))); } /** @return a possible replacement for this block when placed in a line by the player. */ public Block getReplacement(BuildPlan req, Seq<BuildPlan> plans){ return this; } /** Mutates the given list of points used during line placement. */ public void changePlacementPath(Seq<Point2> points, int rotation, boolean diagonalOn){ changePlacementPath(points, rotation); } /** Mutates the given list of points used during line placement. */ public void changePlacementPath(Seq<Point2> points, int rotation){ } /** Mutates the given list of plans used during line placement. */ public void handlePlacementLine(Seq<BuildPlan> plans){ } public boolean configSenseable(){ return configurations.containsKey(Item.class) || configurations.containsKey(Liquid.class) || configurations.containsKey(UnlockableContent.class) || configurations.containsKey(Block.class) || configurations.containsKey(UnitType.class); } public Object nextConfig(){ if(saveConfig && lastConfig != null){ return lastConfig; } return null; } /** Called when a new build plan is created in the player's queue. Blocks can maintain a reference to this plan and add configs to it later. */ public void onNewPlan(BuildPlan plan){ } public void drawPlan(BuildPlan plan, Eachable<BuildPlan> list, boolean valid){ drawPlan(plan, list, valid, 1f); } public void drawPlan(BuildPlan plan, Eachable<BuildPlan> list, boolean valid, float alpha){ Draw.reset(); Draw.mixcol(!valid ? Pal.breakInvalid : Color.white, (!valid ? 0.4f : 0.24f) + Mathf.absin(Time.globalTime, 6f, 0.28f)); Draw.alpha(alpha); float prevScale = Draw.scl; Draw.scl *= plan.animScale; drawPlanRegion(plan, list); Draw.scl = prevScale; Draw.reset(); } public void drawPlanRegion(BuildPlan plan, Eachable<BuildPlan> list){ drawDefaultPlanRegion(plan, list); } /** this is a different method so subclasses can call it even after overriding the base */ public void drawDefaultPlanRegion(BuildPlan plan, Eachable<BuildPlan> list){ TextureRegion reg = getPlanRegion(plan, list); Draw.rect(reg, plan.drawx(), plan.drawy(), !rotate || !rotateDraw ? 0 : plan.rotation * 90); if(plan.worldContext && player != null && teamRegion != null && teamRegion.found()){ if(teamRegions[player.team().id] == teamRegion) Draw.color(player.team().color); Draw.rect(teamRegions[player.team().id], plan.drawx(), plan.drawy()); Draw.color(); } drawPlanConfig(plan, list); } public TextureRegion getPlanRegion(BuildPlan plan, Eachable<BuildPlan> list){ return fullIcon; } public void drawPlanConfig(BuildPlan plan, Eachable<BuildPlan> list){ } public void drawPlanConfigCenter(BuildPlan plan, Object content, String region, boolean cross){ if(content == null){ if(cross){ Draw.rect("cross", plan.drawx(), plan.drawy()); } return; } Color color = content instanceof Item i ? i.color : content instanceof Liquid l ? l.color : null; if(color == null) return; Draw.color(color); Draw.rect(region, plan.drawx(), plan.drawy()); Draw.color(); } public void drawPlanConfigCenter(BuildPlan plan, Object content, String region){ drawPlanConfigCenter(plan, content, region, false); } public void drawPlanConfigTop(BuildPlan plan, Eachable<BuildPlan> list){ } /** Transforms the internal position of this config using the specified function, and return the result. */ public Object pointConfig(Object config, Cons<Point2> transformer){ return config; } /** Configure when a null value is passed.*/ public <E extends Building> void configClear(Cons<E> cons){ configurations.put(void.class, (tile, value) -> cons.get((E)tile)); } /** Listen for a config by class type. */ public <T, E extends Building> void config(Class<T> type, Cons2<E, T> config){ configurations.put(type, config); } public boolean isAccessible(){ return (hasItems && itemCapacity > 0); } /** sets {@param out} to the index-th side outside of this block, using the given rotation. */ public void nearbySide(int x, int y, int rotation, int index, Point2 out){ int cornerX = x - (size-1)/2, cornerY = y - (size-1)/2, s = size; switch(rotation){ case 0 -> out.set(cornerX + s, cornerY + index); case 1 -> out.set(cornerX + index, cornerY + s); case 2 -> out.set(cornerX - 1, cornerY + index); case 3 -> out.set(cornerX + index, cornerY - 1); } } public Point2[] getEdges(){ return Edges.getEdges(size); } public Point2[] getInsideEdges(){ return Edges.getInsideEdges(size); } /** Iterate through ever grid position taken up by this block. */ public void iterateTaken(int x, int y, Intc2 placer){ if(isMultiblock()){ int offsetx = -(size - 1) / 2; int offsety = -(size - 1) / 2; for(int dx = 0; dx < size; dx++){ for(int dy = 0; dy < size; dy++){ placer.get(dx + offsetx + x, dy + offsety + y); } } }else{ placer.get(x, y); } } /** Never use outside of the editor! */ public TextureRegion editorIcon(){ return editorIcon == null ? (editorIcon = Core.atlas.find(name + "-icon-editor")) : editorIcon; } /** Never use outside of the editor! */ public TextureRegion[] editorVariantRegions(){ if(editorVariantRegions == null){ variantRegions(); editorVariantRegions = new TextureRegion[variantRegions.length]; for(int i = 0; i < variantRegions.length; i++){ AtlasRegion region = (AtlasRegion)variantRegions[i]; editorVariantRegions[i] = Core.atlas.find("editor-" + region.name); } } return editorVariantRegions; } /** @return special icons to outline and save with an -outline variant. Vanilla only. */ public TextureRegion[] makeIconRegions(){ return new TextureRegion[0]; } protected TextureRegion[] icons(){ //use team region in vanilla team blocks TextureRegion r = variants > 0 ? Core.atlas.find(name + "1") : region; return teamRegion.found() && minfo.mod == null ? new TextureRegion[]{r, teamRegions[Team.sharded.id]} : new TextureRegion[]{r}; } public void getRegionsToOutline(Seq<TextureRegion> out){ } public TextureRegion[] getGeneratedIcons(){ return generatedIcons == null ? (generatedIcons = icons()) : generatedIcons; } public void resetGeneratedIcons(){ generatedIcons = null; } public TextureRegion[] variantRegions(){ return variantRegions == null ? (variantRegions = new TextureRegion[]{fullIcon}) : variantRegions; } public boolean hasBuilding(){ return destructible || update; } public final Building newBuilding(){ return buildType.get(); } public void updateClipRadius(float size){ clipSize = Math.max(clipSize, size * tilesize + size * 2f); } public Rect bounds(int x, int y, Rect rect){ return rect.setSize(size * tilesize).setCenter(x * tilesize + offset, y * tilesize + offset); } public boolean isMultiblock(){ return size > 1; } public boolean isVisible(){ return !isHidden() && (state.rules.editor || (!state.rules.hideBannedBlocks || !state.rules.isBanned(this))); } public boolean isVisibleOn(Planet planet){ return !Structs.contains(requirements, i -> planet.hiddenItems.contains(i.item)); } public boolean isPlaceable(){ return isVisible() && (!state.rules.isBanned(this) || state.rules.editor) && supportsEnv(state.rules.env); } /** @return whether this block supports a specific environment. */ public boolean supportsEnv(int env){ return (envEnabled & env) != 0 && (envDisabled & env) == 0 && (envRequired == 0 || (envRequired & env) == envRequired); } /** Called when building of this block begins. */ public void placeBegan(Tile tile, Block previous){ } /** Called when building of this block begins. */ public void placeBegan(Tile tile, Block previous, @Nullable Unit builder){ placeBegan(tile, previous); } /** Called right before building of this block begins. */ public void beforePlaceBegan(Tile tile, Block previous){ } public boolean isFloor(){ return this instanceof Floor; } public boolean isOverlay(){ return this instanceof OverlayFloor; } public Floor asFloor(){ return (Floor)this; } public boolean isAir(){ return id == 0; } public boolean canBeBuilt(){ return buildVisibility != BuildVisibility.hidden && buildVisibility != BuildVisibility.debugOnly; } public boolean environmentBuildable(){ return (state.rules.hiddenBuildItems.isEmpty() || !Structs.contains(requirements, i -> state.rules.hiddenBuildItems.contains(i.item))); } public boolean isStatic(){ return cacheLayer == CacheLayer.walls; } public <T extends Consume> T findConsumer(Boolf<Consume> filter){ return consumers.length == 0 ? (T)consumeBuilder.find(filter) : (T)Structs.find(consumers, filter); } public boolean hasConsumer(Consume cons){ return consumeBuilder.contains(cons); } public void removeConsumer(Consume cons){ if(consumers.length > 0){ throw new IllegalStateException("You can only remove consumers before init(). After init(), all consumers have already been initialized."); } consumeBuilder.remove(cons); } public void removeConsumers(Boolf<Consume> b){ consumeBuilder.removeAll(b); //the power was removed, unassign it if(!consumeBuilder.contains(c -> c instanceof ConsumePower)){ consPower = null; } } public ConsumeLiquid consumeLiquid(Liquid liquid, float amount){ return consume(new ConsumeLiquid(liquid, amount)); } public ConsumeLiquids consumeLiquids(LiquidStack... stacks){ return consume(new ConsumeLiquids(stacks)); } /** * Creates a consumer which directly uses power without buffering it. * @param powerPerTick The amount of power which is required each tick for 100% efficiency. * @return the created consumer object. */ public ConsumePower consumePower(float powerPerTick){ return consume(new ConsumePower(powerPerTick, 0.0f, false)); } /** Creates a consumer which only consumes power when the condition is met. */ public <T extends Building> ConsumePower consumePowerCond(float usage, Boolf<T> cons){ return consume(new ConsumePowerCondition(usage, (Boolf<Building>)cons)); } /** Creates a consumer that consumes a dynamic amount of power. */ public <T extends Building> ConsumePower consumePowerDynamic(Floatf<T> usage){ return consume(new ConsumePowerDynamic((Floatf<Building>)usage)); } /** Creates a consumer that consumes a dynamic amount of power. */ public <T extends Building> ConsumePower consumePowerDynamic(float displayed, Floatf<T> usage){ return consume(new ConsumePowerDynamic(displayed, (Floatf<Building>)usage)); } /** * Creates a consumer which stores power. * @param powerCapacity The maximum capacity in power units. */ public ConsumePower consumePowerBuffered(float powerCapacity){ return consume(new ConsumePower(0f, powerCapacity, true)); } public ConsumeItems consumeItem(Item item){ return consumeItem(item, 1); } public ConsumeItems consumeItem(Item item, int amount){ return consume(new ConsumeItems(new ItemStack[]{new ItemStack(item, amount)})); } public ConsumeItems consumeItems(ItemStack... items){ return consume(new ConsumeItems(items)); } public ConsumeCoolant consumeCoolant(float amount){ return consume(new ConsumeCoolant(amount)); } public ConsumeCoolant consumeCoolant(float amount, boolean allowLiquid, boolean allowGas){ return consume(new ConsumeCoolant(amount, allowLiquid, allowGas)); } public <T extends Consume> T consume(T consume){ if(consume instanceof ConsumePower){ //there can only be one power consumer consumeBuilder.removeAll(b -> b instanceof ConsumePower); consPower = (ConsumePower)consume; } consumeBuilder.add(consume); return consume; } public void setupRequirements(Category cat, ItemStack[] stacks){ requirements(cat, stacks); } public void setupRequirements(Category cat, BuildVisibility visible, ItemStack[] stacks){ requirements(cat, visible, stacks); } public void requirements(Category cat, ItemStack[] stacks, boolean unlocked){ requirements(cat, BuildVisibility.shown, stacks); this.alwaysUnlocked = unlocked; } public void requirements(Category cat, ItemStack[] stacks){ requirements(cat, BuildVisibility.shown, stacks); } /** Sets up requirements. Use only this method to set up requirements. */ public void requirements(Category cat, BuildVisibility visible, ItemStack[] stacks){ this.category = cat; this.requirements = stacks; this.buildVisibility = visible; Arrays.sort(requirements, Structs.comparingInt(i -> i.item.id)); } protected void initBuilding(){ //attempt to find the first declared class and use it as the entity type try{ Class<?> current = getClass(); if(current.isAnonymousClass()){ current = current.getSuperclass(); } subclass = current; while(buildType == null && Block.class.isAssignableFrom(current)){ //first class that is subclass of Building Class<?> type = Structs.find(current.getDeclaredClasses(), t -> Building.class.isAssignableFrom(t) && !t.isInterface()); if(type != null){ //these are inner classes, so they have an implicit parameter generated Constructor<? extends Building> cons = (Constructor<? extends Building>)type.getDeclaredConstructor(type.getDeclaringClass()); buildType = () -> { try{ return cons.newInstance(this); }catch(Exception e){ throw new RuntimeException(e); } }; } //scan through every superclass looking for it current = current.getSuperclass(); } }catch(Throwable ignored){ } if(buildType == null){ //assign default value buildType = Building::create; } } @Override public ItemStack[] researchRequirements(){ if(researchCost != null) return researchCost; if(researchCostMultiplier <= 0f) return ItemStack.empty; ItemStack[] out = new ItemStack[requirements.length]; for(int i = 0; i < out.length; i++){ int quantity = Mathf.round(60 * researchCostMultiplier + Mathf.pow(requirements[i].amount, 1.11f) * 20 * researchCostMultiplier * researchCostMultipliers.get(requirements[i].item, 1f), 10); out[i] = new ItemStack(requirements[i].item, UI.roundAmount(quantity)); } return out; } @Override public void getDependencies(Cons<UnlockableContent> cons){ //just requires items for(ItemStack stack : requirements){ cons.get(stack.item); } //also requires inputs for(var c : consumeBuilder){ if(c.optional) continue; if(c instanceof ConsumeItems i){ for(ItemStack stack : i.items){ cons.get(stack.item); } } //TODO: requiring liquid dependencies is usually a bad idea, because there is no reason to pump/produce something until you actually need it. /*else if(c instanceof ConsumeLiquid i){ cons.get(i.liquid); }else if(c instanceof ConsumeLiquids i){ for(var stack : i.liquids){ cons.get(stack.liquid); } }*/ } } @Override public ContentType getContentType(){ return ContentType.block; } @Override public boolean logicVisible(){ return buildVisibility != BuildVisibility.hidden; } /** Called after all blocks are created. */ @Override @CallSuper public void init(){ //disable standard shadow if(customShadow){ hasShadow = false; } if(fogRadius > 0){ flags = flags.with(BlockFlag.hasFogRadius); } //initialize default health based on size if(health == -1){ boolean round = false; if(scaledHealth < 0){ scaledHealth = 40; float scaling = 1f; for(var stack : requirements){ scaling += stack.item.healthScaling; } scaledHealth *= scaling; round = true; } health = round ? Mathf.round(size * size * scaledHealth, 5) : (int)(size * size * scaledHealth); } clipSize = Math.max(clipSize, size * tilesize); if(hasLiquids && drawLiquidLight){ clipSize = Math.max(size * 30f * 2f, clipSize); } if(emitLight){ clipSize = Math.max(clipSize, lightRadius * 2f); } if(group == BlockGroup.transportation || category == Category.distribution){ acceptsItems = true; } offset = ((size + 1) % 2) * tilesize / 2f; sizeOffset = -((size - 1) / 2); if(requirements.length > 0){ buildCost = 0f; for(ItemStack stack : requirements){ buildCost += stack.amount * stack.item.cost; } } buildCost *= buildCostMultiplier; consumers = consumeBuilder.toArray(Consume.class); optionalConsumers = consumeBuilder.select(consume -> consume.optional && !consume.ignore()).toArray(Consume.class); nonOptionalConsumers = consumeBuilder.select(consume -> !consume.optional && !consume.ignore()).toArray(Consume.class); updateConsumers = consumeBuilder.select(consume -> consume.update && !consume.ignore()).toArray(Consume.class); hasConsumers = consumers.length > 0; itemFilter = new boolean[content.items().size]; liquidFilter = new boolean[content.liquids().size]; for(Consume cons : consumers){ cons.apply(this); } setBars(); stats.useCategories = true; //TODO check for double power consumption if(!logicConfigurable){ configurations.each((key, val) -> { if(UnlockableContent.class.isAssignableFrom(key)){ logicConfigurable = true; } }); } if(!outputsPower && consPower != null && consPower.buffered){ Log.warn("Consumer using buffered power: @. Disabling buffered power.", name); consPower.buffered = false; } if(buildVisibility == BuildVisibility.sandboxOnly){ hideDetails = false; } } @Override public void load(){ super.load(); region = Core.atlas.find(name); ContentRegions.loadRegions(this); //load specific team regions teamRegions = new TextureRegion[Team.all.length]; for(Team team : Team.all){ teamRegions[team.id] = teamRegion.found() && team.hasPalette ? Core.atlas.find(name + "-team-" + team.name, teamRegion) : teamRegion; } if(variants != 0){ variantRegions = new TextureRegion[variants]; for(int i = 0; i < variants; i++){ variantRegions[i] = Core.atlas.find(name + (i + 1)); } region = variantRegions[0]; if(customShadow){ variantShadowRegions = new TextureRegion[variants]; for(int i = 0; i < variants; i++){ variantShadowRegions[i] = Core.atlas.find(name + "-shadow" + (i + 1)); } } } } @Override public boolean isHidden(){ return !buildVisibility.visible() && !state.rules.revealedBlocks.contains(this); } @Override public void createIcons(MultiPacker packer){ super.createIcons(packer); if(!synthetic()){ PixmapRegion image = Core.atlas.getPixmap(fullIcon); mapColor.set(image.get(image.width/2, image.height/2)); } if(variants > 0){ for(int i = 0; i < variants; i++){ String rname = name + (i + 1); packer.add(PageType.editor, "editor-" + rname, Core.atlas.getPixmap(rname)); } } Seq<Pixmap> toDispose = new Seq<>(); //generate paletted team regions if(teamRegion != null && teamRegion.found()){ for(Team team : Team.all){ //if there's an override, don't generate anything if(team.hasPalette && !Core.atlas.has(name + "-team-" + team.name)){ var base = Core.atlas.getPixmap(teamRegion); Pixmap out = new Pixmap(base.width, base.height); for(int x = 0; x < base.width; x++){ for(int y = 0; y < base.height; y++){ int color = base.get(x, y); int index = switch(color){ case 0xffffffff -> 0; case 0xdcc6c6ff, 0xdbc5c5ff -> 1; case 0x9d7f7fff, 0x9e8080ff -> 2; default -> -1; }; out.setRaw(x, y, index == -1 ? base.get(x, y) : team.palettei[index]); } } Drawf.checkBleed(out); packer.add(PageType.main, name + "-team-" + team.name, out); toDispose.add(out); } } teamRegions = new TextureRegion[Team.all.length]; for(Team team : Team.all){ teamRegions[team.id] = teamRegion.found() && team.hasPalette ? Core.atlas.find(name + "-team-" + team.name, teamRegion) : teamRegion; } } Pixmap last = null; var gen = icons(); if(outlineIcon){ AtlasRegion atlasRegion = (AtlasRegion)gen[outlinedIcon >= 0 ? Math.min(outlinedIcon, gen.length - 1) : gen.length -1]; PixmapRegion region = Core.atlas.getPixmap(atlasRegion); Pixmap out = last = Pixmaps.outline(region, outlineColor, outlineRadius); Drawf.checkBleed(out); packer.add(PageType.main, atlasRegion.name, out); toDispose.add(out); } var toOutline = new Seq<TextureRegion>(); getRegionsToOutline(toOutline); for(var region : toOutline){ if(region instanceof AtlasRegion atlas){ String regionName = atlas.name; Pixmap outlined = Pixmaps.outline(Core.atlas.getPixmap(region), outlineColor, outlineRadius); Drawf.checkBleed(outlined); packer.add(PageType.main, regionName + "-outline", outlined); toDispose.add(outlined); } } PixmapRegion editorBase; if(gen.length > 1){ Pixmap base = Core.atlas.getPixmap(gen[0]).crop(); for(int i = 1; i < gen.length; i++){ if(i == gen.length - 1 && last != null){ base.draw(last, 0, 0, true); }else{ base.draw(Core.atlas.getPixmap(gen[i]), true); } } packer.add(PageType.main, "block-" + name + "-full", base); editorBase = new PixmapRegion(base); toDispose.add(base); }else{ if(gen[0] != null) packer.add(PageType.main, "block-" + name + "-full", Core.atlas.getPixmap(gen[0])); editorBase = gen[0] == null ? Core.atlas.getPixmap(fullIcon) : Core.atlas.getPixmap(gen[0]); } packer.add(PageType.editor, name + "-icon-editor", editorBase); toDispose.each(Pixmap::dispose); } public int planRotation(int rot){ return !rotate && lockRotation ? 0 : rot; } public void flipRotation(BuildPlan req, boolean x){ if((x == (req.rotation % 2 == 0)) != invertFlip){ req.rotation = planRotation(Mathf.mod(req.rotation + 2, 4)); } } @Override public double sense(LAccess sensor){ return switch(sensor){ case color -> mapColor.toDoubleBits(); case health, maxHealth -> health; case size -> size; case itemCapacity -> itemCapacity; case liquidCapacity -> liquidCapacity; case powerCapacity -> consPower != null && consPower.buffered ? consPower.capacity : 0f; case id -> getLogicId(); default -> Double.NaN; }; } @Override public double sense(Content content){ return Double.NaN; } @Override public Object senseObject(LAccess sensor){ if(sensor == LAccess.name) return name; return noSensed; } }
Anuken/Mindustry
core/src/mindustry/world/Block.java
956
package mindustry.mod; import arc.*; import arc.assets.*; import arc.assets.loaders.MusicLoader.*; import arc.assets.loaders.SoundLoader.*; import arc.audio.*; import arc.files.*; import arc.func.*; import arc.graphics.*; import arc.graphics.g2d.*; import arc.math.*; import arc.math.geom.*; import arc.struct.*; import arc.util.*; import arc.util.serialization.*; import arc.util.serialization.Json.*; import arc.util.serialization.Jval.*; import mindustry.*; import mindustry.ai.*; import mindustry.ai.types.*; import mindustry.content.*; import mindustry.content.TechTree.*; import mindustry.ctype.*; import mindustry.entities.*; import mindustry.entities.Units.*; import mindustry.entities.abilities.*; import mindustry.entities.bullet.*; import mindustry.entities.effect.*; import mindustry.entities.part.*; import mindustry.entities.part.DrawPart.*; import mindustry.entities.pattern.*; import mindustry.game.*; import mindustry.game.Objectives.*; import mindustry.gen.*; import mindustry.graphics.*; import mindustry.graphics.g3d.*; import mindustry.graphics.g3d.PlanetGrid.*; import mindustry.io.*; import mindustry.maps.generators.*; import mindustry.maps.planet.*; import mindustry.mod.Mods.*; import mindustry.type.*; import mindustry.type.ammo.*; import mindustry.type.weather.*; import mindustry.world.*; import mindustry.world.blocks.units.*; import mindustry.world.blocks.units.UnitFactory.*; import mindustry.world.consumers.*; import mindustry.world.draw.*; import mindustry.world.meta.*; import java.lang.reflect.*; import static mindustry.Vars.*; @SuppressWarnings("unchecked") public class ContentParser{ private static final boolean ignoreUnknownFields = true; private static final ContentType[] typesToSearch = {ContentType.block, ContentType.item, ContentType.unit, ContentType.liquid, ContentType.planet}; ObjectMap<Class<?>, ContentType> contentTypes = new ObjectMap<>(); ObjectSet<Class<?>> implicitNullable = ObjectSet.with(TextureRegion.class, TextureRegion[].class, TextureRegion[][].class, TextureRegion[][][].class); Seq<ParseListener> listeners = new Seq<>(); ObjectMap<Class<?>, FieldParser> classParsers = new ObjectMap<>(){{ put(Effect.class, (type, data) -> { if(data.isString()){ return field(Fx.class, data); } if(data.isArray()){ return new MultiEffect(parser.readValue(Effect[].class, data)); } Class<? extends Effect> bc = resolve(data.getString("type", ""), ParticleEffect.class); data.remove("type"); Effect result = make(bc); readFields(result, data); return result; }); put(Sortf.class, (type, data) -> field(UnitSorts.class, data)); put(Interp.class, (type, data) -> field(Interp.class, data)); put(Blending.class, (type, data) -> field(Blending.class, data)); put(CacheLayer.class, (type, data) -> field(CacheLayer.class, data)); put(Attribute.class, (type, data) -> { String attr = data.asString(); if(Attribute.exists(attr)) return Attribute.get(attr); return Attribute.add(attr); }); put(BuildVisibility.class, (type, data) -> field(BuildVisibility.class, data)); put(Schematic.class, (type, data) -> { Object result = fieldOpt(Loadouts.class, data); if(result != null){ return result; }else{ String str = data.asString(); if(str.startsWith(Vars.schematicBaseStart)){ return Schematics.readBase64(str); }else{ return Schematics.read(Vars.tree.get("schematics/" + str + "." + Vars.schematicExtension)); } } }); put(Color.class, (type, data) -> Color.valueOf(data.asString())); put(StatusEffect.class, (type, data) -> { if(data.isString()){ StatusEffect result = locate(ContentType.status, data.asString()); if(result != null) return result; throw new IllegalArgumentException("Unknown status effect: '" + data.asString() + "'"); } StatusEffect effect = new StatusEffect(currentMod.name + "-" + data.getString("name")); effect.minfo.mod = currentMod; readFields(effect, data); return effect; }); put(UnitCommand.class, (type, data) -> { if(data.isString()){ var cmd = content.unitCommand(data.asString()); if(cmd != null){ return cmd; }else{ throw new IllegalArgumentException("Unknown unit command name: " + data.asString()); } }else{ throw new IllegalArgumentException("Unit commands must be strings."); } }); put(UnitStance.class, (type, data) -> { if(data.isString()){ var cmd = content.unitStance(data.asString()); if(cmd != null){ return cmd; }else{ throw new IllegalArgumentException("Unknown unit stance name: " + data.asString()); } }else{ throw new IllegalArgumentException("Unit stances must be strings."); } }); put(BulletType.class, (type, data) -> { if(data.isString()){ return field(Bullets.class, data); } Class<?> bc = resolve(data.getString("type", ""), BasicBulletType.class); data.remove("type"); BulletType result = (BulletType)make(bc); readFields(result, data); return result; }); put(AmmoType.class, (type, data) -> { //string -> item //if liquid ammo support is added, this should scan for liquids as well if(data.isString()) return new ItemAmmoType(find(ContentType.item, data.asString())); //number -> power if(data.isNumber()) return new PowerAmmoType(data.asFloat()); var bc = resolve(data.getString("type", ""), ItemAmmoType.class); data.remove("type"); AmmoType result = make(bc); readFields(result, data); return result; }); put(DrawBlock.class, (type, data) -> { if(data.isString()){ //try to instantiate return make(resolve(data.asString())); } //array is shorthand for DrawMulti if(data.isArray()){ return new DrawMulti(parser.readValue(DrawBlock[].class, data)); } var bc = resolve(data.getString("type", ""), DrawDefault.class); data.remove("type"); DrawBlock result = make(bc); readFields(result, data); return result; }); put(ShootPattern.class, (type, data) -> { var bc = resolve(data.getString("type", ""), ShootPattern.class); data.remove("type"); var result = make(bc); readFields(result, data); return result; }); put(DrawPart.class, (type, data) -> { Class<?> bc = resolve(data.getString("type", ""), RegionPart.class); data.remove("type"); var result = make(bc); readFields(result, data); return result; }); //TODO this is untested put(PartProgress.class, (type, data) -> { //simple case: it's a string or number constant if(data.isString()) return field(PartProgress.class, data.asString()); if(data.isNumber()) return PartProgress.constant(data.asFloat()); if(!data.has("type")){ throw new RuntimeException("PartProgress object need a 'type' string field. Check the PartProgress class for a list of constants."); } PartProgress base = (PartProgress)field(PartProgress.class, data.getString("type")); JsonValue opval = data.has("operation") ? data.get("operation") : data.has("op") ? data.get("op") : null; //no singular operation, check for multi-operation if(opval == null){ JsonValue opsVal = data.has("operations") ? data.get("operations") : data.has("ops") ? data.get("ops") : null; if(opsVal != null){ if(!opsVal.isArray()) throw new RuntimeException("Chained PartProgress operations must be an array."); int i = 0; while(true){ JsonValue val = opsVal.get(i); if(val == null) break; JsonValue op = val.has("operation") ? val.get("operation") : val.has("op") ? val.get("op") : null; base = parseProgressOp(base, op.asString(), val); i++; } } return base; } //this is the name of the method to call String op = opval.asString(); return parseProgressOp(base, op, data); }); put(PlanetGenerator.class, (type, data) -> { var result = new AsteroidGenerator(); //only one type for now readFields(result, data); return result; }); put(Mat3D.class, (type, data) -> { if(data == null) return new Mat3D(); //transform x y z format if(data.has("x") && data.has("y") && data.has("z")){ return new Mat3D().translate(data.getFloat("x", 0f), data.getFloat("y", 0f), data.getFloat("z", 0f)); } //transform array format if(data.isArray() && data.size == 3){ return new Mat3D().setToTranslation(new Vec3(data.asFloatArray())); } Mat3D mat = new Mat3D(); //TODO this is kinda bad for(var val : data){ switch(val.name){ case "translate", "trans" -> mat.translate(parser.readValue(Vec3.class, data)); case "scale", "scl" -> mat.scale(parser.readValue(Vec3.class, data)); case "rotate", "rot" -> mat.rotate(parser.readValue(Vec3.class, data), data.getFloat("degrees", 0f)); case "multiply", "mul" -> mat.mul(parser.readValue(Mat3D.class, data)); case "x", "y", "z" -> {} default -> throw new RuntimeException("Unknown matrix transformation: '" + val.name + "'"); } } return mat; }); put(Vec3.class, (type, data) -> { if(data.isArray()) return new Vec3(data.asFloatArray()); return new Vec3(data.getFloat("x", 0f), data.getFloat("y", 0f), data.getFloat("z", 0f)); }); put(Sound.class, (type, data) -> { if(data.isArray()) return new RandomSound(parser.readValue(Sound[].class, data)); var field = fieldOpt(Sounds.class, data); return field != null ? field : Vars.tree.loadSound(data.asString()); }); put(Music.class, (type, data) -> { var field = fieldOpt(Musics.class, data); return field != null ? field : Vars.tree.loadMusic(data.asString()); }); put(Objectives.Objective.class, (type, data) -> { if(data.isString()){ var cont = locateAny(data.asString()); if(cont == null) throw new IllegalArgumentException("Unknown objective content: " + data.asString()); return new Research((UnlockableContent)cont); } var oc = resolve(data.getString("type", ""), SectorComplete.class); data.remove("type"); Objectives.Objective obj = make(oc); readFields(obj, data); return obj; }); put(Ability.class, (type, data) -> { Class<? extends Ability> oc = resolve(data.getString("type", "")); data.remove("type"); Ability obj = make(oc); readFields(obj, data); return obj; }); put(Weapon.class, (type, data) -> { var oc = resolve(data.getString("type", ""), Weapon.class); data.remove("type"); var weapon = make(oc); readFields(weapon, data); weapon.name = currentMod.name + "-" + weapon.name; return weapon; }); put(Consume.class, (type, data) -> { var oc = resolve(data.getString("type", ""), Consume.class); data.remove("type"); var consume = make(oc); readFields(consume, data); return consume; }); put(ConsumeLiquidBase.class, (type, data) -> { var oc = resolve(data.getString("type", ""), ConsumeLiquidBase.class); data.remove("type"); var consume = make(oc); readFields(consume, data); return consume; }); }}; /** Stores things that need to be parsed fully, e.g. reading fields of content. * This is done to accommodate binding of content names first.*/ private Seq<Runnable> reads = new Seq<>(); private Seq<Runnable> postreads = new Seq<>(); private ObjectSet<Object> toBeParsed = new ObjectSet<>(); LoadedMod currentMod; Content currentContent; private Json parser = new Json(){ @Override public <T> T readValue(Class<T> type, Class elementType, JsonValue jsonData, Class keyType){ T t = internalRead(type, elementType, jsonData, keyType); if(t != null && !Reflect.isWrapper(t.getClass()) && (type == null || !type.isPrimitive())){ checkNullFields(t); listeners.each(hook -> hook.parsed(type, jsonData, t)); } return t; } private <T> T internalRead(Class<T> type, Class elementType, JsonValue jsonData, Class keyType){ if(type != null){ if(classParsers.containsKey(type)){ try{ return (T)classParsers.get(type).parse(type, jsonData); }catch(Exception e){ throw new RuntimeException(e); } } //try to parse env bits if((type == int.class || type == Integer.class) && jsonData.isArray()){ int value = 0; for(var str : jsonData){ if(!str.isString()) throw new SerializationException("Integer bitfield values must all be strings. Found: " + str); String field = str.asString(); value |= Reflect.<Integer>get(Env.class, field); } return (T)(Integer)value; } //try to parse "item/amount" syntax if(type == ItemStack.class && jsonData.isString() && jsonData.asString().contains("/")){ String[] split = jsonData.asString().split("/"); return (T)fromJson(ItemStack.class, "{item: " + split[0] + ", amount: " + split[1] + "}"); } //try to parse "payloaditem/amount" syntax if(type == PayloadStack.class && jsonData.isString() && jsonData.asString().contains("/")){ String[] split = jsonData.asString().split("/"); int number = Strings.parseInt(split[1], 1); UnlockableContent cont = content.unit(split[0]) == null ? content.block(split[0]) : content.unit(split[0]); return (T)new PayloadStack(cont == null ? Blocks.router : cont, number); } //try to parse "liquid/amount" syntax if(jsonData.isString() && jsonData.asString().contains("/")){ String[] split = jsonData.asString().split("/"); if(type == LiquidStack.class){ return (T)fromJson(LiquidStack.class, "{liquid: " + split[0] + ", amount: " + split[1] + "}"); }else if(type == ConsumeLiquid.class){ return (T)fromJson(ConsumeLiquid.class, "{liquid: " + split[0] + ", amount: " + split[1] + "}"); } } //try to parse Rect as array if(type == Rect.class && jsonData.isArray() && jsonData.size == 4){ return (T)new Rect(jsonData.get(0).asFloat(), jsonData.get(1).asFloat(), jsonData.get(2).asFloat(), jsonData.get(3).asFloat()); } //search across different content types to find one by name if(type == UnlockableContent.class){ for(ContentType c : typesToSearch){ T found = (T)locate(c, jsonData.asString()); if(found != null){ return found; } } throw new IllegalArgumentException("\"" + jsonData.name + "\": No content found with name '" + jsonData.asString() + "'."); } if(Content.class.isAssignableFrom(type)){ ContentType ctype = contentTypes.getThrow(type, () -> new IllegalArgumentException("No content type for class: " + type.getSimpleName())); String prefix = currentMod != null ? currentMod.name + "-" : ""; T one = (T)Vars.content.getByName(ctype, prefix + jsonData.asString()); if(one != null) return one; T two = (T)Vars.content.getByName(ctype, jsonData.asString()); if(two != null) return two; throw new IllegalArgumentException("\"" + jsonData.name + "\": No " + ctype + " found with name '" + jsonData.asString() + "'.\nMake sure '" + jsonData.asString() + "' is spelled correctly, and that it really exists!\nThis may also occur because its file failed to parse."); } } return super.readValue(type, elementType, jsonData, keyType); } }; private ObjectMap<ContentType, TypeParser<?>> parsers = ObjectMap.of( ContentType.block, (TypeParser<Block>)(mod, name, value) -> { readBundle(ContentType.block, name, value); Block block; if(locate(ContentType.block, name) != null){ if(value.has("type")){ Log.warn("Warning: '" + currentMod.name + "-" + name + "' re-declares a type. This will be interpreted as a new block. If you wish to override a vanilla block, omit the 'type' section, as vanilla block `type`s cannot be changed."); block = make(resolve(value.getString("type", ""), Block.class), mod + "-" + name); }else{ block = locate(ContentType.block, name); } }else{ block = make(resolve(value.getString("type", ""), Block.class), mod + "-" + name); } currentContent = block; read(() -> { if(value.has("consumes") && value.get("consumes").isObject()){ for(JsonValue child : value.get("consumes")){ switch(child.name){ case "remove" -> { String[] values = child.isString() ? new String[]{child.asString()} : child.asStringArray(); for(String type : values){ Class<?> consumeType = resolve("Consume" + Strings.capitalize(type), Consume.class); if(consumeType != Consume.class){ block.removeConsumers(b -> consumeType.isAssignableFrom(b.getClass())); }else{ Log.warn("Unknown consumer type '@' (Class: @) in consume: remove.", type, "Consume" + Strings.capitalize(type)); } } } case "item" -> block.consumeItem(find(ContentType.item, child.asString())); case "itemCharged" -> block.consume((Consume)parser.readValue(ConsumeItemCharged.class, child)); case "itemFlammable" -> block.consume((Consume)parser.readValue(ConsumeItemFlammable.class, child)); case "itemRadioactive" -> block.consume((Consume)parser.readValue(ConsumeItemRadioactive.class, child)); case "itemExplosive" -> block.consume((Consume)parser.readValue(ConsumeItemExplosive.class, child)); case "itemExplode" -> block.consume((Consume)parser.readValue(ConsumeItemExplode.class, child)); case "items" -> block.consume(child.isArray() ? new ConsumeItems(parser.readValue(ItemStack[].class, child)) : parser.readValue(ConsumeItems.class, child)); case "liquidFlammable" -> block.consume((Consume)parser.readValue(ConsumeLiquidFlammable.class, child)); case "liquid" -> block.consume((Consume)parser.readValue(ConsumeLiquid.class, child)); case "liquids" -> block.consume(child.isArray() ? new ConsumeLiquids(parser.readValue(LiquidStack[].class, child)) : parser.readValue(ConsumeLiquids.class, child)); case "coolant" -> block.consume((Consume)parser.readValue(ConsumeCoolant.class, child)); case "power" -> { if(child.isNumber()){ block.consumePower(child.asFloat()); }else{ block.consume((Consume)parser.readValue(ConsumePower.class, child)); } } case "powerBuffered" -> block.consumePowerBuffered(child.asFloat()); default -> throw new IllegalArgumentException("Unknown consumption type: '" + child.name + "' for block '" + block.name + "'."); } } value.remove("consumes"); } readFields(block, value, true); if(block.size > maxBlockSize){ throw new IllegalArgumentException("Blocks cannot be larger than " + maxBlockSize); } //make block visible by default if there are requirements and no visibility set if(value.has("requirements") && block.buildVisibility == BuildVisibility.hidden){ block.buildVisibility = BuildVisibility.shown; } }); return block; }, ContentType.unit, (TypeParser<UnitType>)(mod, name, value) -> { readBundle(ContentType.unit, name, value); UnitType unit; if(locate(ContentType.unit, name) == null){ unit = make(resolve(value.getString("template", ""), UnitType.class), mod + "-" + name); if(value.has("template")){ value.remove("template"); } var typeVal = value.get("type"); if(unit.constructor == null || typeVal != null){ if(typeVal != null && !typeVal.isString()){ throw new RuntimeException("Unit '" + name + "' has an incorrect type. Types must be strings."); } unit.constructor = unitType(typeVal); } }else{ unit = locate(ContentType.unit, name); } currentContent = unit; //TODO test this! read(() -> { //add reconstructor type if(value.has("requirements")){ JsonValue rec = value.remove("requirements"); UnitReq req = parser.readValue(UnitReq.class, rec); if(req.block instanceof Reconstructor r){ if(req.previous != null){ r.upgrades.add(new UnitType[]{req.previous, unit}); } }else if(req.block instanceof UnitFactory f){ f.plans.add(new UnitPlan(unit, req.time, req.requirements)); }else{ throw new IllegalArgumentException("Missing a valid 'block' in 'requirements'"); } } if(value.has("controller") || value.has("aiController")){ unit.aiController = supply(resolve(value.getString("controller", value.getString("aiController", "")), FlyingAI.class)); value.remove("controller"); } if(value.has("defaultController")){ var sup = supply(resolve(value.getString("defaultController"), FlyingAI.class)); unit.controller = u -> sup.get(); value.remove("defaultController"); } //read extra default waves if(value.has("waves")){ JsonValue waves = value.remove("waves"); SpawnGroup[] groups = parser.readValue(SpawnGroup[].class, waves); for(SpawnGroup group : groups){ group.type = unit; } Vars.waves.get().addAll(groups); } readFields(unit, value, true); }); return unit; }, ContentType.weather, (TypeParser<Weather>)(mod, name, value) -> { Weather item; if(locate(ContentType.weather, name) != null){ item = locate(ContentType.weather, name); readBundle(ContentType.weather, name, value); }else{ readBundle(ContentType.weather, name, value); item = make(resolve(getType(value), ParticleWeather.class), mod + "-" + name); value.remove("type"); } currentContent = item; read(() -> readFields(item, value)); return item; }, ContentType.item, parser(ContentType.item, Item::new), ContentType.liquid, (TypeParser<Liquid>)(mod, name, value) -> { Liquid liquid; if(locate(ContentType.liquid, name) != null){ liquid = locate(ContentType.liquid, name); readBundle(ContentType.liquid, name, value); }else{ readBundle(ContentType.liquid, name, value); liquid = make(resolve(value.getString("type", null), Liquid.class), mod + "-" + name); value.remove("type"); } currentContent = liquid; read(() -> readFields(liquid, value)); return liquid; }, ContentType.status, parser(ContentType.status, StatusEffect::new), ContentType.sector, (TypeParser<SectorPreset>)(mod, name, value) -> { if(value.isString()){ return locate(ContentType.sector, name); } if(!value.has("sector") || !value.get("sector").isNumber()) throw new RuntimeException("SectorPresets must have a sector number."); SectorPreset out = new SectorPreset(mod + "-" + name, currentMod); currentContent = out; read(() -> { Planet planet = locate(ContentType.planet, value.getString("planet", "serpulo")); if(planet == null) throw new RuntimeException("Planet '" + value.getString("planet") + "' not found."); out.initialize(planet, value.getInt("sector", 0)); value.remove("sector"); value.remove("planet"); readFields(out, value); }); return out; }, ContentType.planet, (TypeParser<Planet>)(mod, name, value) -> { if(value.isString()) return locate(ContentType.planet, name); Planet parent = locate(ContentType.planet, value.getString("parent", "")); Planet planet = new Planet(mod + "-" + name, parent, value.getFloat("radius", 1f), value.getInt("sectorSize", 0)); if(value.has("mesh")){ var mesh = value.get("mesh"); if(!mesh.isObject() && !mesh.isArray()) throw new RuntimeException("Meshes must be objects."); value.remove("mesh"); planet.meshLoader = () -> { //don't crash, just log an error try{ return parseMesh(planet, mesh); }catch(Exception e){ Log.err(e); return new ShaderSphereMesh(planet, Shaders.unlit, 2); } }; } if(value.has("cloudMesh")){ var mesh = value.get("cloudMesh"); if(!mesh.isObject() && !mesh.isArray()) throw new RuntimeException("Meshes must be objects."); value.remove("cloudMesh"); planet.cloudMeshLoader = () -> { //don't crash, just log an error try{ return parseMesh(planet, mesh); }catch(Exception e){ Log.err(e); return null; } }; } //always one sector right now... planet.sectors.add(new Sector(planet, Ptile.empty)); currentContent = planet; read(() -> readFields(planet, value)); return planet; } ); private Prov<Unit> unitType(JsonValue value){ if(value == null) return UnitEntity::create; return switch(value.asString()){ case "flying" -> UnitEntity::create; case "mech" -> MechUnit::create; case "legs" -> LegsUnit::create; case "naval" -> UnitWaterMove::create; case "payload" -> PayloadUnit::create; case "missile" -> TimedKillUnit::create; case "tank" -> TankUnit::create; case "hover" -> ElevationMoveUnit::create; case "tether" -> BuildingTetherPayloadUnit::create; case "crawl" -> CrawlUnit::create; default -> throw new RuntimeException("Invalid unit type: '" + value + "'. Must be 'flying/mech/legs/naval/payload/missile/tether/crawl'."); }; } private String getString(JsonValue value, String key){ if(value.has(key)){ return value.getString(key); }else{ throw new IllegalArgumentException("You are missing a \"" + key + "\". It must be added before the file can be parsed."); } } private String getType(JsonValue value){ return getString(value, "type"); } private <T extends Content> T find(ContentType type, String name){ Content c = Vars.content.getByName(type, name); if(c == null) c = Vars.content.getByName(type, currentMod.name + "-" + name); if(c == null) throw new IllegalArgumentException("No " + type + " found with name '" + name + "'"); return (T)c; } private <T extends Content> TypeParser<T> parser(ContentType type, Func<String, T> constructor){ return (mod, name, value) -> { T item; if(locate(type, name) != null){ item = (T)locate(type, name); readBundle(type, name, value); }else{ readBundle(type, name, value); item = constructor.get(mod + "-" + name); } currentContent = item; read(() -> readFields(item, value)); return item; }; } private void readBundle(ContentType type, String name, JsonValue value){ UnlockableContent cont = locate(type, name) instanceof UnlockableContent ? locate(type, name) : null; String entryName = cont == null ? type + "." + currentMod.name + "-" + name + "." : type + "." + cont.name + "."; I18NBundle bundle = Core.bundle; while(bundle.getParent() != null) bundle = bundle.getParent(); if(value.has("name")){ if(!Core.bundle.has(entryName + "name")){ bundle.getProperties().put(entryName + "name", value.getString("name")); if(cont != null) cont.localizedName = value.getString("name"); } value.remove("name"); } if(value.has("description")){ if(!Core.bundle.has(entryName + "description")){ bundle.getProperties().put(entryName + "description", value.getString("description")); if(cont != null) cont.description = value.getString("description"); } value.remove("description"); } } /** Call to read a content's extra info later.*/ private void read(Runnable run){ Content cont = currentContent; LoadedMod mod = currentMod; reads.add(() -> { this.currentMod = mod; this.currentContent = cont; run.run(); //check nulls after parsing if(cont != null){ toBeParsed.remove(cont); checkNullFields(cont); } }); } private void init(){ for(ContentType type : ContentType.all){ Seq<Content> arr = Vars.content.getBy(type); if(!arr.isEmpty()){ Class<?> c = arr.first().getClass(); //get base content class, skipping intermediates while(!(c.getSuperclass() == Content.class || c.getSuperclass() == UnlockableContent.class || Modifier.isAbstract(c.getSuperclass().getModifiers()))){ c = c.getSuperclass(); } contentTypes.put(c, type); } } } private void attempt(Runnable run){ try{ run.run(); }catch(Throwable t){ Log.err(t); //don't overwrite double errors markError(currentContent, t); } } public void finishParsing(){ reads.each(this::attempt); postreads.each(this::attempt); reads.clear(); postreads.clear(); toBeParsed.clear(); } /** * Parses content from a json file. * @param name the name of the file without its extension * @param json the json to parse * @param type the type of content this is * @param file file that this content is being parsed from * @return the content that was parsed */ public Content parse(LoadedMod mod, String name, String json, Fi file, ContentType type) throws Exception{ if(contentTypes.isEmpty()){ init(); } //remove extra # characters to make it valid json... apparently some people have *unquoted* # characters in their json if(file.extension().equals("json")){ json = json.replace("#", "\\#"); } currentMod = mod; JsonValue value = parser.fromJson(null, Jval.read(json).toString(Jformat.plain)); if(!parsers.containsKey(type)){ throw new SerializationException("No parsers for content type '" + type + "'"); } boolean located = locate(type, name) != null; Content c = parsers.get(type).parse(mod.name, name, value); c.minfo.sourceFile = file; toBeParsed.add(c); if(!located){ c.minfo.mod = mod; } return c; } public void markError(Content content, LoadedMod mod, Fi file, Throwable error){ Log.err("Error for @ / @:\n@\n", content, file, Strings.getStackTrace(error)); content.minfo.mod = mod; content.minfo.sourceFile = file; content.minfo.error = makeError(error, file); content.minfo.baseError = error; if(mod != null){ mod.erroredContent.add(content); } } public void markError(Content content, Throwable error){ if(content.minfo != null && !content.hasErrored()){ markError(content, content.minfo.mod, content.minfo.sourceFile, error); } } private String makeError(Throwable t, Fi file){ StringBuilder builder = new StringBuilder(); builder.append("[lightgray]").append("File: ").append(file.name()).append("[]\n\n"); if(t.getMessage() != null && t instanceof JsonParseException){ builder.append("[accent][[JsonParse][] ").append(":\n").append(t.getMessage()); }else if(t instanceof NullPointerException){ builder.append(Strings.neatError(t)); }else{ Seq<Throwable> causes = Strings.getCauses(t); for(Throwable e : causes){ builder.append("[accent][[").append(e.getClass().getSimpleName().replace("Exception", "")) .append("][] ") .append(e.getMessage() != null ? e.getMessage().replace("mindustry.", "").replace("arc.", "") : "").append("\n"); } } return builder.toString(); } private <T extends MappableContent> T locate(ContentType type, String name){ T first = Vars.content.getByName(type, name); //try vanilla replacement return first != null ? first : Vars.content.getByName(type, currentMod.name + "-" + name); } private <T extends MappableContent> T locateAny(String name){ for(ContentType t : ContentType.all){ var out = locate(t, name); if(out != null){ return (T)out; } } return null; } private GenericMesh[] parseMeshes(Planet planet, JsonValue array){ var res = new GenericMesh[array.size]; for(int i = 0; i < array.size; i++){ //yes get is O(n) but it's practically irrelevant here res[i] = parseMesh(planet, array.get(i)); } return res; } private GenericMesh parseMesh(Planet planet, JsonValue data){ if(data.isArray()){ return new MultiMesh(parseMeshes(planet, data)); } String tname = Strings.capitalize(data.getString("type", "NoiseMesh")); return switch(tname){ //TODO NoiseMesh is bad case "NoiseMesh" -> new NoiseMesh(planet, data.getInt("seed", 0), data.getInt("divisions", 1), data.getFloat("radius", 1f), data.getInt("octaves", 1), data.getFloat("persistence", 0.5f), data.getFloat("scale", 1f), data.getFloat("mag", 0.5f), Color.valueOf(data.getString("color1", data.getString("color", "ffffff"))), Color.valueOf(data.getString("color2", data.getString("color", "ffffff"))), data.getInt("colorOct", 1), data.getFloat("colorPersistence", 0.5f), data.getFloat("colorScale", 1f), data.getFloat("colorThreshold", 0.5f)); case "SunMesh" -> { var cvals = data.get("colors").asStringArray(); var colors = new Color[cvals.length]; for(int i=0; i<cvals.length; i++){ colors[i] = Color.valueOf(cvals[i]); } yield new SunMesh(planet, data.getInt("divisions", 1), data.getInt("octaves", 1), data.getFloat("persistence", 0.5f), data.getFloat("scl", 1f), data.getFloat("pow", 1f), data.getFloat("mag", 0.5f), data.getFloat("colorScale", 1f), colors); } case "HexSkyMesh" -> new HexSkyMesh(planet, data.getInt("seed", 0), data.getFloat("speed", 0), data.getFloat("radius", 1f), data.getInt("divisions", 3), Color.valueOf(data.getString("color", "ffffff")), data.getInt("octaves", 1), data.getFloat("persistence", 0.5f), data.getFloat("scale", 1f), data.getFloat("thresh", 0.5f)); case "MultiMesh" -> new MultiMesh(parseMeshes(planet, data.get("meshes"))); case "MatMesh" -> new MatMesh(parseMesh(planet, data.get("mesh")), parser.readValue(Mat3D.class, data.get("mat"))); default -> throw new RuntimeException("Unknown mesh type: " + tname); }; } private PartProgress parseProgressOp(PartProgress base, String op, JsonValue data){ //I have to hard-code this, no easy way of getting parameter names, unfortunately return switch(op){ case "inv" -> base.inv(); case "slope" -> base.slope(); case "clamp" -> base.clamp(); case "delay" -> base.delay(data.getFloat("amount")); case "sustain" -> base.sustain(data.getFloat("offset", 0f), data.getFloat("grow", 0f), data.getFloat("sustain")); case "shorten" -> base.shorten(data.getFloat("amount")); case "compress" -> base.compress(data.getFloat("start"), data.getFloat("end")); case "add" -> data.has("amount") ? base.add(data.getFloat("amount")) : base.add(parser.readValue(PartProgress.class, data.get("other"))); case "blend" -> base.blend(parser.readValue(PartProgress.class, data.get("other")), data.getFloat("amount")); case "mul" -> data.has("amount") ? base.mul(data.getFloat("amount")) : base.mul(parser.readValue(PartProgress.class, data.get("other"))); case "min" -> base.min(parser.readValue(PartProgress.class, data.get("other"))); case "sin" -> base.sin(data.has("offset") ? data.getFloat("offset") : 0f, data.getFloat("scl"), data.getFloat("mag")); case "absin" -> base.absin(data.getFloat("scl"), data.getFloat("mag")); case "curve" -> data.has("interp") ? base.curve(parser.readValue(Interp.class, data.get("interp"))) : base.curve(data.getFloat("offset"), data.getFloat("duration")); default -> throw new RuntimeException("Unknown operation '" + op + "', check PartProgress class for a list of methods."); }; } <T> T make(Class<T> type){ try{ Constructor<T> cons = type.getDeclaredConstructor(); cons.setAccessible(true); return cons.newInstance(); }catch(Exception e){ throw new RuntimeException(e); } } private <T> T make(Class<T> type, String name){ try{ Constructor<T> cons = type.getDeclaredConstructor(String.class); cons.setAccessible(true); return cons.newInstance(name); }catch(Exception e){ throw new RuntimeException(e); } } private <T> Prov<T> supply(Class<T> type){ try{ Constructor<T> cons = type.getDeclaredConstructor(); return () -> { try{ return cons.newInstance(); }catch(Exception e){ throw new RuntimeException(e); } }; }catch(Exception e){ throw new RuntimeException(e); } } Object field(Class<?> type, JsonValue value){ return field(type, value.asString()); } /** Gets a field from a static class by name, throwing a descriptive exception if not found. */ private Object field(Class<?> type, String name){ try{ Object b = type.getField(name).get(null); if(b == null) throw new IllegalArgumentException(type.getSimpleName() + ": not found: '" + name + "'"); return b; }catch(Exception e){ throw new RuntimeException(e); } } Object fieldOpt(Class<?> type, JsonValue value){ try{ return type.getField(value.asString()).get(null); }catch(Exception e){ return null; } } void checkNullFields(Object object){ if(object == null || object instanceof Number || object instanceof String || toBeParsed.contains(object) || object.getClass().getName().startsWith("arc.")) return; parser.getFields(object.getClass()).values().toSeq().each(field -> { try{ if(field.field.getType().isPrimitive()) return; if(!field.field.isAnnotationPresent(Nullable.class) && field.field.get(object) == null && !implicitNullable.contains(field.field.getType())){ throw new RuntimeException("'" + field.field.getName() + "' in " + ((object.getClass().isAnonymousClass() ? object.getClass().getSuperclass() : object.getClass()).getSimpleName()) + " is missing! Object = " + object + ", field = (" + field.field.getName() + " = " + field.field.get(object) + ")"); } }catch(Exception e){ throw new RuntimeException(e); } }); } private void readFields(Object object, JsonValue jsonMap, boolean stripType){ if(stripType) jsonMap.remove("type"); readFields(object, jsonMap); } void readFields(Object object, JsonValue jsonMap){ JsonValue research = jsonMap.remove("research"); toBeParsed.remove(object); var type = object.getClass(); var fields = parser.getFields(type); for(JsonValue child = jsonMap.child; child != null; child = child.next){ FieldMetadata metadata = fields.get(child.name().replace(" ", "_")); if(metadata == null){ if(ignoreUnknownFields){ Log.warn("[@]: Ignoring unknown field: @ (@)", currentContent.minfo.sourceFile.name(), child.name, type.getSimpleName()); continue; }else{ SerializationException ex = new SerializationException("Field not found: " + child.name + " (" + type.getName() + ")"); ex.addTrace(child.trace()); throw ex; } } Field field = metadata.field; try{ boolean mergeMap = ObjectMap.class.isAssignableFrom(field.getType()) && child.has("add") && child.get("add").isBoolean() && child.getBoolean("add", false); if(mergeMap){ child.remove("add"); } Object readField = parser.readValue(field.getType(), metadata.elementType, child, metadata.keyType); //if a map has add: true, add its contents to the map instead if(mergeMap && field.get(object) instanceof ObjectMap<?,?> baseMap){ baseMap.putAll((ObjectMap)readField); }else{ field.set(object, readField); } }catch(IllegalAccessException ex){ throw new SerializationException("Error accessing field: " + field.getName() + " (" + type.getName() + ")", ex); }catch(SerializationException ex){ ex.addTrace(field.getName() + " (" + type.getName() + ")"); throw ex; }catch(RuntimeException runtimeEx){ SerializationException ex = new SerializationException(runtimeEx); ex.addTrace(child.trace()); ex.addTrace(field.getName() + " (" + type.getName() + ")"); throw ex; } } if(object instanceof UnlockableContent unlock && research != null){ //add research tech node String researchName; ItemStack[] customRequirements; //research can be a single string or an object with parent and requirements if(research.isString()){ researchName = research.asString(); customRequirements = null; }else{ researchName = research.getString("parent", null); customRequirements = research.has("requirements") ? parser.readValue(ItemStack[].class, research.get("requirements")) : null; } //remove old node TechNode lastNode = TechTree.all.find(t -> t.content == unlock); if(lastNode != null){ lastNode.remove(); } TechNode node = new TechNode(null, unlock, customRequirements == null ? ItemStack.empty : customRequirements); LoadedMod cur = currentMod; postreads.add(() -> { currentContent = unlock; currentMod = cur; //add custom objectives if(research.has("objectives")){ node.objectives.addAll(parser.readValue(Objective[].class, research.get("objectives"))); } //all items have a produce requirement unless already specified if((unlock instanceof Item || unlock instanceof Liquid) && !node.objectives.contains(o -> o instanceof Produce p && p.content == unlock)){ node.objectives.add(new Produce(unlock)); } //remove old node from parent if(node.parent != null){ node.parent.children.remove(node); } if(customRequirements == null){ node.setupRequirements(unlock.researchRequirements()); } if(research.has("planet")){ node.planet = find(ContentType.planet, research.getString("planet")); } if(research.getBoolean("root", false)){ node.name = research.getString("name", unlock.name); node.requiresUnlock = research.getBoolean("requiresUnlock", false); TechTree.roots.add(node); }else{ if(researchName != null){ //find parent node. TechNode parent = TechTree.all.find(t -> t.content.name.equals(researchName) || t.content.name.equals(currentMod.name + "-" + researchName) || t.content.name.equals(SaveVersion.mapFallback(researchName))); if(parent == null){ Log.warn("Content '" + researchName + "' isn't in the tech tree, but '" + unlock.name + "' requires it to be researched."); }else{ //add this node to the parent if(!parent.children.contains(node)){ parent.children.add(node); } //reparent the node node.parent = parent; } }else{ Log.warn(unlock.name + " is not a root node, and does not have a `parent: ` property. Ignoring."); } } }); } } /** Tries to resolve a class from the class type map. */ <T> Class<T> resolve(String base){ return resolve(base, null); } /** Tries to resolve a class from the class type map. */ <T> Class<T> resolve(String base, Class<T> def){ //no base class specified if((base == null || base.isEmpty()) && def != null) return def; //return mapped class if found in the global map var out = ClassMap.classes.get(!base.isEmpty() && Character.isLowerCase(base.charAt(0)) ? Strings.capitalize(base) : base); if(out != null) return (Class<T>)out; //try to resolve it as a raw class name if(base.indexOf('.') != -1){ try{ return (Class<T>)Class.forName(base); }catch(Exception ignored){ //try to use mod class loader try{ return (Class<T>)Class.forName(base, true, mods.mainLoader()); }catch(Exception ignore){} } } if(def != null){ Log.warn("[@] No type '" + base + "' found, defaulting to type '" + def.getSimpleName() + "'", currentContent == null ? currentMod.name : ""); return def; } throw new IllegalArgumentException("Type not found: " + base); } private interface FieldParser{ Object parse(Class<?> type, JsonValue value) throws Exception; } private interface TypeParser<T extends Content>{ T parse(String mod, String name, JsonValue value) throws Exception; } //intermediate class for parsing static class UnitReq{ public Block block; public ItemStack[] requirements = {}; @Nullable public UnitType previous; public float time = 60f * 10f; } public interface ParseListener{ void parsed(Class<?> type, JsonValue jsonData, Object result); } }
Anuken/Mindustry
core/src/mindustry/mod/ContentParser.java
957
package jadx.gui.treemodel; import java.util.Comparator; import java.util.Enumeration; import java.util.function.Predicate; import javax.swing.Icon; import javax.swing.JPopupMenu; import javax.swing.tree.DefaultMutableTreeNode; import org.fife.ui.rsyntaxtextarea.SyntaxConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import jadx.api.ICodeInfo; import jadx.api.JavaNode; import jadx.api.metadata.ICodeNodeRef; import jadx.gui.ui.MainWindow; import jadx.gui.ui.panel.ContentPanel; import jadx.gui.ui.tab.TabbedPane; public abstract class JNode extends DefaultMutableTreeNode implements Comparable<JNode> { private static final long serialVersionUID = -5154479091781041008L; public abstract JClass getJParent(); /** * Return top level JClass or self if already at top. */ public JClass getRootClass() { return null; } public JavaNode getJavaNode() { return null; } public ICodeNodeRef getCodeNodeRef() { return null; } @Nullable public ContentPanel getContentPanel(TabbedPane tabbedPane) { return null; } public String getSyntaxName() { return SyntaxConstants.SYNTAX_STYLE_NONE; } @NotNull public ICodeInfo getCodeInfo() { return ICodeInfo.EMPTY; } public boolean isEditable() { return false; } public abstract Icon getIcon(); public String getName() { JavaNode javaNode = getJavaNode(); if (javaNode == null) { return null; } return javaNode.getName(); } public @Nullable JPopupMenu onTreePopupMenu(MainWindow mainWindow) { return null; } public abstract String makeString(); public String makeStringHtml() { return makeString(); } public String makeDescString() { return null; } public boolean hasDescString() { return false; } public String makeLongString() { return makeString(); } public String makeLongStringHtml() { return makeLongString(); } public boolean disableHtml() { return true; } public int getPos() { JavaNode javaNode = getJavaNode(); if (javaNode == null) { return -1; } return javaNode.getDefPos(); } public String getTooltip() { return makeLongStringHtml(); } public @Nullable JNode searchNode(Predicate<JNode> filter) { Enumeration<?> en = this.children(); while (en.hasMoreElements()) { JNode node = (JNode) en.nextElement(); if (filter.test(node)) { return node; } } return null; } /** * Remove and return first found node */ public @Nullable JNode removeNode(Predicate<JNode> filter) { Enumeration<?> en = this.children(); while (en.hasMoreElements()) { JNode node = (JNode) en.nextElement(); if (filter.test(node)) { this.remove(node); return node; } } return null; } private static final Comparator<JNode> COMPARATOR = Comparator .comparing(JNode::makeLongString) .thenComparingInt(JNode::getPos); @Override public int compareTo(@NotNull JNode other) { return COMPARATOR.compare(this, other); } @Override public String toString() { return makeString(); } }
skylot/jadx
jadx-gui/src/main/java/jadx/gui/treemodel/JNode.java
958
package mindustry.content; import arc.graphics.*; import arc.math.*; import arc.struct.*; import mindustry.*; import mindustry.entities.*; import mindustry.entities.abilities.*; import mindustry.entities.bullet.*; import mindustry.entities.effect.*; import mindustry.entities.part.DrawPart.*; import mindustry.entities.part.*; import mindustry.entities.pattern.*; import mindustry.gen.*; import mindustry.graphics.*; import mindustry.type.*; import mindustry.type.unit.*; import mindustry.world.*; import mindustry.world.blocks.*; import mindustry.world.blocks.campaign.*; import mindustry.world.blocks.defense.*; import mindustry.world.blocks.defense.turrets.*; import mindustry.world.blocks.distribution.*; import mindustry.world.blocks.environment.*; import mindustry.world.blocks.heat.*; import mindustry.world.blocks.legacy.*; import mindustry.world.blocks.liquid.*; import mindustry.world.blocks.logic.*; import mindustry.world.blocks.payloads.*; import mindustry.world.blocks.power.*; import mindustry.world.blocks.production.*; import mindustry.world.blocks.sandbox.*; import mindustry.world.blocks.storage.*; import mindustry.world.blocks.units.*; import mindustry.world.consumers.*; import mindustry.world.draw.*; import mindustry.world.meta.*; import static mindustry.Vars.*; import static mindustry.type.ItemStack.*; public class Blocks{ public static Block //environment air, spawn, cliff, deepwater, water, taintedWater, deepTaintedWater, tar, slag, cryofluid, stone, craters, charr, sand, darksand, dirt, mud, ice, snow, darksandTaintedWater, space, empty, dacite, rhyolite, rhyoliteCrater, roughRhyolite, regolith, yellowStone, redIce, redStone, denseRedStone, arkyciteFloor, arkyicStone, redmat, bluemat, stoneWall, dirtWall, sporeWall, iceWall, daciteWall, sporePine, snowPine, pine, shrubs, whiteTree, whiteTreeDead, sporeCluster, redweed, purbush, yellowCoral, rhyoliteVent, carbonVent, arkyicVent, yellowStoneVent, redStoneVent, crystallineVent, regolithWall, yellowStoneWall, rhyoliteWall, carbonWall, redIceWall, ferricStoneWall, beryllicStoneWall, arkyicWall, crystallineStoneWall, redStoneWall, redDiamondWall, ferricStone, ferricCraters, carbonStone, beryllicStone, crystallineStone, crystalFloor, yellowStonePlates, iceSnow, sandWater, darksandWater, duneWall, sandWall, moss, sporeMoss, shale, shaleWall, grass, salt, coreZone, //boulders shaleBoulder, sandBoulder, daciteBoulder, boulder, snowBoulder, basaltBoulder, carbonBoulder, ferricBoulder, beryllicBoulder, yellowStoneBoulder, arkyicBoulder, crystalCluster, vibrantCrystalCluster, crystalBlocks, crystalOrbs, crystallineBoulder, redIceBoulder, rhyoliteBoulder, redStoneBoulder, metalFloor, metalFloorDamaged, metalFloor2, metalFloor3, metalFloor4, metalFloor5, basalt, magmarock, hotrock, snowWall, saltWall, darkPanel1, darkPanel2, darkPanel3, darkPanel4, darkPanel5, darkPanel6, darkMetal, pebbles, tendrils, //ores oreCopper, oreLead, oreScrap, oreCoal, oreTitanium, oreThorium, oreBeryllium, oreTungsten, oreCrystalThorium, wallOreThorium, //wall ores wallOreBeryllium, graphiticWall, wallOreTungsten, //crafting siliconSmelter, siliconCrucible, kiln, graphitePress, plastaniumCompressor, multiPress, phaseWeaver, surgeSmelter, pyratiteMixer, blastMixer, cryofluidMixer, melter, separator, disassembler, sporePress, pulverizer, incinerator, coalCentrifuge, //crafting - erekir siliconArcFurnace, electrolyzer, oxidationChamber, atmosphericConcentrator, electricHeater, slagHeater, phaseHeater, heatRedirector, heatRouter, slagIncinerator, carbideCrucible, slagCentrifuge, surgeCrucible, cyanogenSynthesizer, phaseSynthesizer, heatReactor, //sandbox powerSource, powerVoid, itemSource, itemVoid, liquidSource, liquidVoid, payloadSource, payloadVoid, illuminator, heatSource, //defense copperWall, copperWallLarge, titaniumWall, titaniumWallLarge, plastaniumWall, plastaniumWallLarge, thoriumWall, thoriumWallLarge, door, doorLarge, phaseWall, phaseWallLarge, surgeWall, surgeWallLarge, //walls - erekir berylliumWall, berylliumWallLarge, tungstenWall, tungstenWallLarge, blastDoor, reinforcedSurgeWall, reinforcedSurgeWallLarge, carbideWall, carbideWallLarge, shieldedWall, mender, mendProjector, overdriveProjector, overdriveDome, forceProjector, shockMine, scrapWall, scrapWallLarge, scrapWallHuge, scrapWallGigantic, thruster, //ok, these names are getting ridiculous, but at least I don't have humongous walls yet //defense - erekir radar, buildTower, regenProjector, barrierProjector, shockwaveTower, //campaign only shieldProjector, largeShieldProjector, shieldBreaker, //transport conveyor, titaniumConveyor, plastaniumConveyor, armoredConveyor, distributor, junction, itemBridge, phaseConveyor, sorter, invertedSorter, router, overflowGate, underflowGate, massDriver, //transport - alternate duct, armoredDuct, ductRouter, overflowDuct, underflowDuct, ductBridge, ductUnloader, surgeConveyor, surgeRouter, unitCargoLoader, unitCargoUnloadPoint, //liquid mechanicalPump, rotaryPump, impulsePump, conduit, pulseConduit, platedConduit, liquidRouter, liquidContainer, liquidTank, liquidJunction, bridgeConduit, phaseConduit, //liquid - reinforced reinforcedPump, reinforcedConduit, reinforcedLiquidJunction, reinforcedBridgeConduit, reinforcedLiquidRouter, reinforcedLiquidContainer, reinforcedLiquidTank, //power combustionGenerator, thermalGenerator, steamGenerator, differentialGenerator, rtgGenerator, solarPanel, largeSolarPanel, thoriumReactor, impactReactor, battery, batteryLarge, powerNode, powerNodeLarge, surgeTower, diode, //power - erekir turbineCondenser, ventCondenser, chemicalCombustionChamber, pyrolysisGenerator, fluxReactor, neoplasiaReactor, beamNode, beamTower, beamLink, //production mechanicalDrill, pneumaticDrill, laserDrill, blastDrill, waterExtractor, oilExtractor, cultivator, cliffCrusher, plasmaBore, largePlasmaBore, impactDrill, eruptionDrill, //storage coreShard, coreFoundation, coreNucleus, vault, container, unloader, //storage - erekir coreBastion, coreCitadel, coreAcropolis, reinforcedContainer, reinforcedVault, //turrets duo, scatter, scorch, hail, arc, wave, lancer, swarmer, salvo, fuse, ripple, cyclone, foreshadow, spectre, meltdown, segment, parallax, tsunami, //turrets - erekir breach, diffuse, sublimate, titan, disperse, afflict, lustre, scathe, smite, malign, //units groundFactory, airFactory, navalFactory, additiveReconstructor, multiplicativeReconstructor, exponentialReconstructor, tetrativeReconstructor, repairPoint, repairTurret, //units - erekir tankFabricator, shipFabricator, mechFabricator, tankRefabricator, shipRefabricator, mechRefabricator, primeRefabricator, tankAssembler, shipAssembler, mechAssembler, basicAssemblerModule, unitRepairTower, //payloads payloadConveyor, payloadRouter, reinforcedPayloadConveyor, reinforcedPayloadRouter, payloadMassDriver, largePayloadMassDriver, smallDeconstructor, deconstructor, constructor, largeConstructor, payloadLoader, payloadUnloader, //logic message, switchBlock, microProcessor, logicProcessor, hyperProcessor, largeLogicDisplay, logicDisplay, memoryCell, memoryBank, canvas, reinforcedMessage, worldProcessor, worldCell, worldMessage, worldSwitch, //campaign launchPad, interplanetaryAccelerator ; public static void load(){ //region environment air = new AirBlock("air"); spawn = new SpawnBlock("spawn"); cliff = new Cliff("cliff"){{ inEditor = false; saveData = true; }}; //Registers build blocks //no reference is needed here since they can be looked up by name later for(int i = 1; i <= Vars.maxBlockSize; i++){ new ConstructBlock(i); } deepwater = new Floor("deep-water"){{ speedMultiplier = 0.2f; variants = 0; liquidDrop = Liquids.water; liquidMultiplier = 1.5f; isLiquid = true; status = StatusEffects.wet; statusDuration = 120f; drownTime = 200f; cacheLayer = CacheLayer.water; albedo = 0.9f; supportsOverlay = true; }}; water = new Floor("shallow-water"){{ speedMultiplier = 0.5f; variants = 0; status = StatusEffects.wet; statusDuration = 90f; liquidDrop = Liquids.water; isLiquid = true; cacheLayer = CacheLayer.water; albedo = 0.9f; supportsOverlay = true; }}; taintedWater = new Floor("tainted-water"){{ speedMultiplier = 0.5f; variants = 0; status = StatusEffects.wet; statusDuration = 90f; liquidDrop = Liquids.water; isLiquid = true; cacheLayer = CacheLayer.water; albedo = 0.9f; attributes.set(Attribute.spores, 0.15f); supportsOverlay = true; }}; deepTaintedWater = new Floor("deep-tainted-water"){{ speedMultiplier = 0.18f; variants = 0; status = StatusEffects.wet; statusDuration = 140f; drownTime = 200f; liquidDrop = Liquids.water; isLiquid = true; cacheLayer = CacheLayer.water; albedo = 0.9f; attributes.set(Attribute.spores, 0.15f); supportsOverlay = true; }}; darksandTaintedWater = new ShallowLiquid("darksand-tainted-water"){{ speedMultiplier = 0.75f; statusDuration = 60f; albedo = 0.9f; attributes.set(Attribute.spores, 0.1f); supportsOverlay = true; }}; sandWater = new ShallowLiquid("sand-water"){{ speedMultiplier = 0.8f; statusDuration = 50f; albedo = 0.9f; supportsOverlay = true; }}; darksandWater = new ShallowLiquid("darksand-water"){{ speedMultiplier = 0.8f; statusDuration = 50f; albedo = 0.9f; supportsOverlay = true; }}; tar = new Floor("tar"){{ drownTime = 230f; status = StatusEffects.tarred; statusDuration = 240f; speedMultiplier = 0.19f; variants = 0; liquidDrop = Liquids.oil; isLiquid = true; cacheLayer = CacheLayer.tar; }}; cryofluid = new Floor("pooled-cryofluid"){{ drownTime = 150f; status = StatusEffects.freezing; statusDuration = 240f; speedMultiplier = 0.5f; variants = 0; liquidDrop = Liquids.cryofluid; liquidMultiplier = 0.5f; isLiquid = true; cacheLayer = CacheLayer.cryofluid; emitLight = true; lightRadius = 25f; lightColor = Color.cyan.cpy().a(0.19f); }}; slag = new Floor("molten-slag"){{ drownTime = 230f; status = StatusEffects.melting; statusDuration = 240f; speedMultiplier = 0.19f; variants = 0; liquidDrop = Liquids.slag; isLiquid = true; cacheLayer = CacheLayer.slag; attributes.set(Attribute.heat, 0.85f); emitLight = true; lightRadius = 40f; lightColor = Color.orange.cpy().a(0.38f); }}; space = new Floor("space"){{ cacheLayer = CacheLayer.space; placeableOn = false; solid = true; variants = 0; canShadow = false; }}; empty = new EmptyFloor("empty"); stone = new Floor("stone"); craters = new Floor("crater-stone"){{ variants = 3; blendGroup = stone; }}; charr = new Floor("char"){{ blendGroup = stone; }}; basalt = new Floor("basalt"){{ attributes.set(Attribute.water, -0.25f); }}; hotrock = new Floor("hotrock"){{ attributes.set(Attribute.heat, 0.5f); attributes.set(Attribute.water, -0.5f); blendGroup = basalt; emitLight = true; lightRadius = 30f; lightColor = Color.orange.cpy().a(0.15f); }}; magmarock = new Floor("magmarock"){{ attributes.set(Attribute.heat, 0.75f); attributes.set(Attribute.water, -0.75f); blendGroup = basalt; emitLight = true; lightRadius = 50f; lightColor = Color.orange.cpy().a(0.3f); }}; sand = new Floor("sand-floor"){{ itemDrop = Items.sand; playerUnmineable = true; attributes.set(Attribute.oil, 0.7f); }}; darksand = new Floor("darksand"){{ itemDrop = Items.sand; playerUnmineable = true; attributes.set(Attribute.oil, 1.5f); }}; dirt = new Floor("dirt"); mud = new Floor("mud"){{ speedMultiplier = 0.6f; variants = 3; status = StatusEffects.muddy; statusDuration = 30f; attributes.set(Attribute.water, 1f); cacheLayer = CacheLayer.mud; walkSound = Sounds.mud; walkSoundVolume = 0.08f; walkSoundPitchMin = 0.4f; walkSoundPitchMax = 0.5f; }}; ((ShallowLiquid)darksandTaintedWater).set(Blocks.taintedWater, Blocks.darksand); ((ShallowLiquid)sandWater).set(Blocks.water, Blocks.sand); ((ShallowLiquid)darksandWater).set(Blocks.water, Blocks.darksand); dacite = new Floor("dacite"); rhyolite = new Floor("rhyolite"){{ attributes.set(Attribute.water, -1f); }}; rhyoliteCrater = new Floor("rhyolite-crater"){{ attributes.set(Attribute.water, -1f); blendGroup = rhyolite; }}; roughRhyolite = new Floor("rough-rhyolite"){{ attributes.set(Attribute.water, -1f); variants = 3; }}; regolith = new Floor("regolith"){{ attributes.set(Attribute.water, -1f); }}; yellowStone = new Floor("yellow-stone"){{ attributes.set(Attribute.water, -1f); }}; carbonStone = new Floor("carbon-stone"){{ attributes.set(Attribute.water, -1f); variants = 4; }}; ferricStone = new Floor("ferric-stone"){{ attributes.set(Attribute.water, -1f); }}; ferricCraters = new Floor("ferric-craters"){{ variants = 3; attributes.set(Attribute.water, -1f); blendGroup = ferricStone; }}; beryllicStone = new Floor("beryllic-stone"){{ variants = 4; }}; crystallineStone = new Floor("crystalline-stone"){{ variants = 5; }}; crystalFloor = new Floor("crystal-floor"){{ variants = 4; }}; yellowStonePlates = new Floor("yellow-stone-plates"){{ variants = 3; }}; redStone = new Floor("red-stone"){{ attributes.set(Attribute.water, -1f); variants = 4; }}; denseRedStone = new Floor("dense-red-stone"){{ attributes.set(Attribute.water, -1f); variants = 4; }}; redIce = new Floor("red-ice"){{ dragMultiplier = 0.4f; speedMultiplier = 0.9f; attributes.set(Attribute.water, 0.4f); }}; arkyciteFloor = new Floor("arkycite-floor"){{ speedMultiplier = 0.3f; variants = 0; liquidDrop = Liquids.arkycite; isLiquid = true; //TODO no status for now //status = StatusEffects.slow; //statusDuration = 120f; drownTime = 200f; cacheLayer = CacheLayer.arkycite; albedo = 0.9f; }}; arkyicStone = new Floor("arkyic-stone"){{ variants = 3; }}; rhyoliteVent = new SteamVent("rhyolite-vent"){{ parent = blendGroup = rhyolite; attributes.set(Attribute.steam, 1f); }}; carbonVent = new SteamVent("carbon-vent"){{ parent = blendGroup = carbonStone; attributes.set(Attribute.steam, 1f); }}; arkyicVent = new SteamVent("arkyic-vent"){{ parent = blendGroup = arkyicStone; attributes.set(Attribute.steam, 1f); }}; yellowStoneVent = new SteamVent("yellow-stone-vent"){{ parent = blendGroup = yellowStone; attributes.set(Attribute.steam, 1f); }}; redStoneVent = new SteamVent("red-stone-vent"){{ parent = blendGroup = denseRedStone; attributes.set(Attribute.steam, 1f); }}; crystallineVent = new SteamVent("crystalline-vent"){{ parent = blendGroup = crystallineStone; attributes.set(Attribute.steam, 1f); }}; redmat = new Floor("redmat"); bluemat = new Floor("bluemat"); grass = new Floor("grass"){{ //TODO grass needs a bush? classic had grass bushes. attributes.set(Attribute.water, 0.1f); }}; salt = new Floor("salt"){{ variants = 0; attributes.set(Attribute.water, -0.3f); attributes.set(Attribute.oil, 0.3f); }}; snow = new Floor("snow"){{ attributes.set(Attribute.water, 0.2f); albedo = 0.7f; }}; ice = new Floor("ice"){{ dragMultiplier = 0.35f; speedMultiplier = 0.9f; attributes.set(Attribute.water, 0.4f); albedo = 0.65f; }}; iceSnow = new Floor("ice-snow"){{ dragMultiplier = 0.6f; variants = 3; attributes.set(Attribute.water, 0.3f); albedo = 0.6f; }}; shale = new Floor("shale"){{ variants = 3; attributes.set(Attribute.oil, 1.6f); }}; moss = new Floor("moss"){{ variants = 3; attributes.set(Attribute.spores, 0.15f); }}; coreZone = new Floor("core-zone"){{ variants = 0; allowCorePlacement = true; }}; sporeMoss = new Floor("spore-moss"){{ variants = 3; attributes.set(Attribute.spores, 0.3f); }}; stoneWall = new StaticWall("stone-wall"){{ attributes.set(Attribute.sand, 1f); }}; sporeWall = new StaticWall("spore-wall"){{ taintedWater.asFloor().wall = deepTaintedWater.asFloor().wall = sporeMoss.asFloor().wall = this; }}; dirtWall = new StaticWall("dirt-wall"); daciteWall = new StaticWall("dacite-wall"); iceWall = new StaticWall("ice-wall"){{ iceSnow.asFloor().wall = this; albedo = 0.6f; }}; snowWall = new StaticWall("snow-wall"); duneWall = new StaticWall("dune-wall"){{ hotrock.asFloor().wall = magmarock.asFloor().wall = basalt.asFloor().wall = darksandWater.asFloor().wall = darksandTaintedWater.asFloor().wall = this; attributes.set(Attribute.sand, 2f); }}; regolithWall = new StaticWall("regolith-wall"){{ regolith.asFloor().wall = this; attributes.set(Attribute.sand, 1f); }}; yellowStoneWall = new StaticWall("yellow-stone-wall"){{ yellowStone.asFloor().wall = slag.asFloor().wall = yellowStonePlates.asFloor().wall = this; attributes.set(Attribute.sand, 1.5f); }}; rhyoliteWall = new StaticWall("rhyolite-wall"){{ rhyolite.asFloor().wall = rhyoliteCrater.asFloor().wall = roughRhyolite.asFloor().wall = this; attributes.set(Attribute.sand, 1f); }}; carbonWall = new StaticWall("carbon-wall"){{ carbonStone.asFloor().wall = this; attributes.set(Attribute.sand, 0.7f); }}; ferricStoneWall = new StaticWall("ferric-stone-wall"){{ ferricStone.asFloor().wall = this; attributes.set(Attribute.sand, 0.5f); }}; beryllicStoneWall = new StaticWall("beryllic-stone-wall"){{ beryllicStone.asFloor().wall = this; attributes.set(Attribute.sand, 1.2f); }}; arkyicWall = new StaticWall("arkyic-wall"){{ variants = 3; arkyciteFloor.asFloor().wall = arkyicStone.asFloor().wall = this; }}; crystallineStoneWall = new StaticWall("crystalline-stone-wall"){{ variants = 4; crystallineStone.asFloor().wall = crystalFloor.asFloor().wall = this; }}; redIceWall = new StaticWall("red-ice-wall"){{ redIce.asFloor().wall = this; }}; redStoneWall = new StaticWall("red-stone-wall"){{ redStone.asFloor().wall = denseRedStone.asFloor().wall = this; attributes.set(Attribute.sand, 1.5f); }}; redDiamondWall = new StaticTree("red-diamond-wall"){{ variants = 3; }}; sandWall = new StaticWall("sand-wall"){{ sandWater.asFloor().wall = water.asFloor().wall = deepwater.asFloor().wall = sand.asFloor().wall = this; attributes.set(Attribute.sand, 2f); }}; saltWall = new StaticWall("salt-wall"); shrubs = new StaticWall("shrubs"); shaleWall = new StaticWall("shale-wall"); sporePine = new StaticTree("spore-pine"){{ moss.asFloor().wall = this; }}; snowPine = new StaticTree("snow-pine"); pine = new StaticTree("pine"); whiteTreeDead = new TreeBlock("white-tree-dead"); whiteTree = new TreeBlock("white-tree"); sporeCluster = new Prop("spore-cluster"){{ variants = 3; breakSound = Sounds.plantBreak; }}; redweed = new Seaweed("redweed"){{ variants = 3; redmat.asFloor().decoration = this; }}; purbush = new SeaBush("pur-bush"){{ bluemat.asFloor().decoration = this; }}; yellowCoral = new SeaBush("yellowcoral"){{ lobesMin = 2; lobesMax = 3; magMax = 8f; magMin = 2f; origin = 0.3f; spread = 40f; sclMin = 60f; sclMax = 100f; }}; boulder = new Prop("boulder"){{ variants = 2; stone.asFloor().decoration = craters.asFloor().decoration = charr.asFloor().decoration = this; }}; snowBoulder = new Prop("snow-boulder"){{ variants = 2; snow.asFloor().decoration = ice.asFloor().decoration = iceSnow.asFloor().decoration = salt.asFloor().decoration = this; }}; shaleBoulder = new Prop("shale-boulder"){{ variants = 2; shale.asFloor().decoration = this; }}; sandBoulder = new Prop("sand-boulder"){{ variants = 2; sand.asFloor().decoration = this; }}; daciteBoulder = new Prop("dacite-boulder"){{ variants = 2; dacite.asFloor().decoration = this; }}; basaltBoulder = new Prop("basalt-boulder"){{ variants = 2; basalt.asFloor().decoration = hotrock.asFloor().decoration = darksand.asFloor().decoration = magmarock.asFloor().decoration = this; }}; carbonBoulder = new Prop("carbon-boulder"){{ variants = 2; carbonStone.asFloor().decoration = this; }}; ferricBoulder = new Prop("ferric-boulder"){{ variants = 2; ferricStone.asFloor().decoration = ferricCraters.asFloor().decoration = this; }}; beryllicBoulder = new Prop("beryllic-boulder"){{ variants = 2; beryllicStone.asFloor().decoration = this; }}; yellowStoneBoulder = new Prop("yellow-stone-boulder"){{ variants = 2; yellowStone.asFloor().decoration = regolith.asFloor().decoration = yellowStonePlates.asFloor().decoration = this; }}; //1px outline + 4.50 gaussian shadow in gimp arkyicBoulder = new Prop("arkyic-boulder"){{ variants = 3; customShadow = true; arkyicStone.asFloor().decoration = this; }}; crystalCluster = new TallBlock("crystal-cluster"){{ variants = 3; clipSize = 128f; }}; vibrantCrystalCluster = new TallBlock("vibrant-crystal-cluster"){{ variants = 3; clipSize = 128f; }}; crystalBlocks = new TallBlock("crystal-blocks"){{ variants = 3; clipSize = 128f; shadowAlpha = 0.5f; shadowOffset = -2.5f; }}; crystalOrbs = new TallBlock("crystal-orbs"){{ variants = 3; clipSize = 128f; shadowAlpha = 0.5f; shadowOffset = -2.5f; }}; crystallineBoulder = new Prop("crystalline-boulder"){{ variants = 2; crystallineStone.asFloor().decoration = this; }}; redIceBoulder = new Prop("red-ice-boulder"){{ variants = 3; redIce.asFloor().decoration = this; }}; rhyoliteBoulder = new Prop("rhyolite-boulder"){{ variants = 3; rhyolite.asFloor().decoration = roughRhyolite.asFloor().decoration = this; }}; redStoneBoulder = new Prop("red-stone-boulder"){{ variants = 4; denseRedStone.asFloor().decoration = redStone.asFloor().decoration = this; }}; metalFloor = new Floor("metal-floor", 0); metalFloorDamaged = new Floor("metal-floor-damaged", 3); metalFloor2 = new Floor("metal-floor-2", 0); metalFloor3 = new Floor("metal-floor-3", 0); metalFloor4 = new Floor("metal-floor-4", 0); metalFloor5 = new Floor("metal-floor-5", 0); darkPanel1 = new Floor("dark-panel-1", 0); darkPanel2 = new Floor("dark-panel-2", 0); darkPanel3 = new Floor("dark-panel-3", 0); darkPanel4 = new Floor("dark-panel-4", 0); darkPanel5 = new Floor("dark-panel-5", 0); darkPanel6 = new Floor("dark-panel-6", 0); darkMetal = new StaticWall("dark-metal"); Seq.with(metalFloor, metalFloorDamaged, metalFloor2, metalFloor3, metalFloor4, metalFloor5, darkPanel1, darkPanel2, darkPanel3, darkPanel4, darkPanel5, darkPanel6) .each(b -> b.asFloor().wall = darkMetal); pebbles = new OverlayFloor("pebbles"); tendrils = new OverlayFloor("tendrils"); //endregion //region ore oreCopper = new OreBlock(Items.copper){{ oreDefault = true; oreThreshold = 0.81f; oreScale = 23.47619f; }}; oreLead = new OreBlock(Items.lead){{ oreDefault = true; oreThreshold = 0.828f; oreScale = 23.952381f; }}; oreScrap = new OreBlock(Items.scrap); oreCoal = new OreBlock(Items.coal){{ oreDefault = true; oreThreshold = 0.846f; oreScale = 24.428572f; }}; oreTitanium = new OreBlock(Items.titanium){{ oreDefault = true; oreThreshold = 0.864f; oreScale = 24.904762f; }}; oreThorium = new OreBlock(Items.thorium){{ oreDefault = true; oreThreshold = 0.882f; oreScale = 25.380953f; }}; oreBeryllium = new OreBlock(Items.beryllium); oreTungsten = new OreBlock(Items.tungsten); oreCrystalThorium = new OreBlock("ore-crystal-thorium", Items.thorium); wallOreThorium = new OreBlock("ore-wall-thorium", Items.thorium){{ wallOre = true; }}; wallOreBeryllium = new OreBlock("ore-wall-beryllium", Items.beryllium){{ wallOre = true; }}; graphiticWall = new StaticWall("graphitic-wall"){{ itemDrop = Items.graphite; variants = 3; }}; //TODO merge with standard ore? wallOreTungsten = new OreBlock("ore-wall-tungsten", Items.tungsten){{ wallOre = true; }}; //endregion //region crafting graphitePress = new GenericCrafter("graphite-press"){{ requirements(Category.crafting, with(Items.copper, 75, Items.lead, 30)); craftEffect = Fx.pulverizeMedium; outputItem = new ItemStack(Items.graphite, 1); craftTime = 90f; size = 2; hasItems = true; consumeItem(Items.coal, 2); }}; multiPress = new GenericCrafter("multi-press"){{ requirements(Category.crafting, with(Items.titanium, 100, Items.silicon, 25, Items.lead, 100, Items.graphite, 50)); craftEffect = Fx.pulverizeMedium; outputItem = new ItemStack(Items.graphite, 2); craftTime = 30f; itemCapacity = 20; size = 3; hasItems = true; hasLiquids = true; hasPower = true; consumePower(1.8f); consumeItem(Items.coal, 3); consumeLiquid(Liquids.water, 0.1f); }}; siliconSmelter = new GenericCrafter("silicon-smelter"){{ requirements(Category.crafting, with(Items.copper, 30, Items.lead, 25)); craftEffect = Fx.smeltsmoke; outputItem = new ItemStack(Items.silicon, 1); craftTime = 40f; size = 2; hasPower = true; hasLiquids = false; drawer = new DrawMulti(new DrawDefault(), new DrawFlame(Color.valueOf("ffef99"))); ambientSound = Sounds.smelter; ambientSoundVolume = 0.07f; consumeItems(with(Items.coal, 1, Items.sand, 2)); consumePower(0.50f); }}; siliconCrucible = new AttributeCrafter("silicon-crucible"){{ requirements(Category.crafting, with(Items.titanium, 120, Items.metaglass, 80, Items.plastanium, 35, Items.silicon, 60)); craftEffect = Fx.smeltsmoke; outputItem = new ItemStack(Items.silicon, 8); craftTime = 90f; size = 3; hasPower = true; hasLiquids = false; itemCapacity = 30; boostScale = 0.15f; drawer = new DrawMulti(new DrawDefault(), new DrawFlame(Color.valueOf("ffef99"))); ambientSound = Sounds.smelter; ambientSoundVolume = 0.07f; consumeItems(with(Items.coal, 4, Items.sand, 6, Items.pyratite, 1)); consumePower(4f); }}; kiln = new GenericCrafter("kiln"){{ requirements(Category.crafting, with(Items.copper, 60, Items.graphite, 30, Items.lead, 30)); craftEffect = Fx.smeltsmoke; outputItem = new ItemStack(Items.metaglass, 1); craftTime = 30f; size = 2; hasPower = hasItems = true; drawer = new DrawMulti(new DrawDefault(), new DrawFlame(Color.valueOf("ffc099"))); ambientSound = Sounds.smelter; ambientSoundVolume = 0.07f; consumeItems(with(Items.lead, 1, Items.sand, 1)); consumePower(0.60f); }}; plastaniumCompressor = new GenericCrafter("plastanium-compressor"){{ requirements(Category.crafting, with(Items.silicon, 80, Items.lead, 115, Items.graphite, 60, Items.titanium, 80)); hasItems = true; liquidCapacity = 60f; craftTime = 60f; outputItem = new ItemStack(Items.plastanium, 1); size = 2; health = 320; hasPower = hasLiquids = true; craftEffect = Fx.formsmoke; updateEffect = Fx.plasticburn; drawer = new DrawMulti(new DrawDefault(), new DrawFade()); consumeLiquid(Liquids.oil, 0.25f); consumePower(3f); consumeItem(Items.titanium, 2); }}; phaseWeaver = new GenericCrafter("phase-weaver"){{ requirements(Category.crafting, with(Items.silicon, 130, Items.lead, 120, Items.thorium, 75)); craftEffect = Fx.smeltsmoke; outputItem = new ItemStack(Items.phaseFabric, 1); craftTime = 120f; size = 2; hasPower = true; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawWeave(), new DrawDefault()); envEnabled |= Env.space; ambientSound = Sounds.techloop; ambientSoundVolume = 0.02f; consumeItems(with(Items.thorium, 4, Items.sand, 10)); consumePower(5f); itemCapacity = 20; }}; surgeSmelter = new GenericCrafter("surge-smelter"){{ requirements(Category.crafting, with(Items.silicon, 80, Items.lead, 80, Items.thorium, 70)); craftEffect = Fx.smeltsmoke; outputItem = new ItemStack(Items.surgeAlloy, 1); craftTime = 75f; size = 3; hasPower = true; itemCapacity = 20; drawer = new DrawMulti(new DrawDefault(), new DrawFlame()); consumePower(4f); consumeItems(with(Items.copper, 3, Items.lead, 4, Items.titanium, 2, Items.silicon, 3)); }}; cryofluidMixer = new GenericCrafter("cryofluid-mixer"){{ requirements(Category.crafting, with(Items.lead, 65, Items.silicon, 40, Items.titanium, 60)); outputLiquid = new LiquidStack(Liquids.cryofluid, 12f / 60f); size = 2; hasPower = true; hasItems = true; hasLiquids = true; rotate = false; solid = true; outputsLiquid = true; envEnabled = Env.any; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.water), new DrawLiquidTile(Liquids.cryofluid){{drawLiquidLight = true;}}, new DrawDefault()); liquidCapacity = 24f; craftTime = 120; lightLiquid = Liquids.cryofluid; consumePower(1f); consumeItem(Items.titanium); consumeLiquid(Liquids.water, 12f / 60f); }}; pyratiteMixer = new GenericCrafter("pyratite-mixer"){{ requirements(Category.crafting, with(Items.copper, 50, Items.lead, 25)); hasItems = true; hasPower = true; outputItem = new ItemStack(Items.pyratite, 1); envEnabled |= Env.space; size = 2; consumePower(0.20f); consumeItems(with(Items.coal, 1, Items.lead, 2, Items.sand, 2)); }}; blastMixer = new GenericCrafter("blast-mixer"){{ requirements(Category.crafting, with(Items.lead, 30, Items.titanium, 20)); hasItems = true; hasPower = true; outputItem = new ItemStack(Items.blastCompound, 1); size = 2; envEnabled |= Env.space; consumeItems(with(Items.pyratite, 1, Items.sporePod, 1)); consumePower(0.40f); }}; melter = new GenericCrafter("melter"){{ requirements(Category.crafting, with(Items.copper, 30, Items.lead, 35, Items.graphite, 45)); health = 200; outputLiquid = new LiquidStack(Liquids.slag, 12f / 60f); craftTime = 10f; hasLiquids = hasPower = true; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(), new DrawDefault()); consumePower(1f); consumeItem(Items.scrap, 1); }}; separator = new Separator("separator"){{ requirements(Category.crafting, with(Items.copper, 30, Items.titanium, 25)); results = with( Items.copper, 5, Items.lead, 3, Items.graphite, 2, Items.titanium, 2 ); hasPower = true; craftTime = 35f; size = 2; consumePower(1.1f); consumeLiquid(Liquids.slag, 4f / 60f); drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(), new DrawRegion("-spinner", 3, true), new DrawDefault()); }}; disassembler = new Separator("disassembler"){{ requirements(Category.crafting, with(Items.plastanium, 40, Items.titanium, 100, Items.silicon, 150, Items.thorium, 80)); results = with( Items.sand, 2, Items.graphite, 1, Items.titanium, 1, Items.thorium, 1 ); hasPower = true; craftTime = 15f; size = 3; itemCapacity = 20; consumePower(4f); consumeItem(Items.scrap); consumeLiquid(Liquids.slag, 0.12f); drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(), new DrawRegion("-spinner", 3, true), new DrawDefault()); }}; sporePress = new GenericCrafter("spore-press"){{ requirements(Category.crafting, with(Items.lead, 35, Items.silicon, 30)); liquidCapacity = 60f; craftTime = 20f; outputLiquid = new LiquidStack(Liquids.oil, 18f / 60f); size = 2; health = 320; hasLiquids = true; hasPower = true; craftEffect = Fx.none; drawer = new DrawMulti( new DrawRegion("-bottom"), new DrawPistons(){{ sinMag = 1f; }}, new DrawDefault(), new DrawLiquidRegion(), new DrawRegion("-top") ); consumeItem(Items.sporePod, 1); consumePower(0.7f); }}; pulverizer = new GenericCrafter("pulverizer"){{ requirements(Category.crafting, with(Items.copper, 30, Items.lead, 25)); outputItem = new ItemStack(Items.sand, 1); craftEffect = Fx.pulverize; craftTime = 40f; updateEffect = Fx.pulverizeSmall; hasItems = hasPower = true; drawer = new DrawMulti(new DrawDefault(), new DrawRegion("-rotator"){{ spinSprite = true; rotateSpeed = 2f; }}, new DrawRegion("-top")); ambientSound = Sounds.grinding; ambientSoundVolume = 0.025f; consumeItem(Items.scrap, 1); consumePower(0.50f); }}; coalCentrifuge = new GenericCrafter("coal-centrifuge"){{ requirements(Category.crafting, with(Items.titanium, 20, Items.graphite, 40, Items.lead, 30)); craftEffect = Fx.coalSmeltsmoke; outputItem = new ItemStack(Items.coal, 1); craftTime = 30f; size = 2; hasPower = hasItems = hasLiquids = true; rotateDraw = false; consumeLiquid(Liquids.oil, 0.1f); consumePower(0.7f); }}; incinerator = new Incinerator("incinerator"){{ requirements(Category.crafting, with(Items.graphite, 5, Items.lead, 15)); health = 90; envEnabled |= Env.space; consumePower(0.50f); }}; //erekir siliconArcFurnace = new GenericCrafter("silicon-arc-furnace"){{ requirements(Category.crafting, with(Items.beryllium, 70, Items.graphite, 80)); craftEffect = Fx.none; outputItem = new ItemStack(Items.silicon, 4); craftTime = 50f; size = 3; hasPower = true; hasLiquids = false; envEnabled |= Env.space | Env.underwater; envDisabled = Env.none; itemCapacity = 30; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawArcSmelt(), new DrawDefault()); fogRadius = 3; researchCost = with(Items.beryllium, 150, Items.graphite, 50); ambientSound = Sounds.smelter; ambientSoundVolume = 0.12f; consumeItems(with(Items.graphite, 1, Items.sand, 4)); consumePower(6f); }}; electrolyzer = new GenericCrafter("electrolyzer"){{ requirements(Category.crafting, with(Items.silicon, 50, Items.graphite, 40, Items.beryllium, 130, Items.tungsten, 80)); size = 3; researchCostMultiplier = 1.2f; craftTime = 10f; rotate = true; invertFlip = true; group = BlockGroup.liquids; itemCapacity = 0; liquidCapacity = 50f; consumeLiquid(Liquids.water, 10f / 60f); consumePower(1f); drawer = new DrawMulti( new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.water, 2f), new DrawBubbles(Color.valueOf("7693e3")){{ sides = 10; recurrence = 3f; spread = 6; radius = 1.5f; amount = 20; }}, new DrawRegion(), new DrawLiquidOutputs(), new DrawGlowRegion(){{ alpha = 0.7f; color = Color.valueOf("c4bdf3"); glowIntensity = 0.3f; glowScale = 6f; }} ); ambientSound = Sounds.electricHum; ambientSoundVolume = 0.08f; regionRotated1 = 3; outputLiquids = LiquidStack.with(Liquids.ozone, 4f / 60, Liquids.hydrogen, 6f / 60); liquidOutputDirections = new int[]{1, 3}; }}; atmosphericConcentrator = new HeatCrafter("atmospheric-concentrator"){{ requirements(Category.crafting, with(Items.oxide, 60, Items.beryllium, 180, Items.silicon, 150)); size = 3; hasLiquids = true; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.nitrogen, 4.1f), new DrawDefault(), new DrawHeatInput(), new DrawParticles(){{ color = Color.valueOf("d4f0ff"); alpha = 0.6f; particleSize = 4f; particles = 10; particleRad = 12f; particleLife = 140f; }}); researchCostMultiplier = 1.1f; itemCapacity = 0; liquidCapacity = 40f; consumePower(2f); ambientSound = Sounds.extractLoop; ambientSoundVolume = 0.06f; heatRequirement = 6f; outputLiquid = new LiquidStack(Liquids.nitrogen, 4f / 60f); researchCost = with(Items.silicon, 2000, Items.oxide, 900, Items.beryllium, 2400); }}; oxidationChamber = new HeatProducer("oxidation-chamber"){{ requirements(Category.crafting, with(Items.tungsten, 120, Items.graphite, 80, Items.silicon, 100, Items.beryllium, 120)); size = 3; outputItem = new ItemStack(Items.oxide, 1); researchCostMultiplier = 1.1f; consumeLiquid(Liquids.ozone, 2f / 60f); consumeItem(Items.beryllium); consumePower(0.5f); rotateDraw = false; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidRegion(), new DrawDefault(), new DrawHeatOutput()); ambientSound = Sounds.extractLoop; ambientSoundVolume = 0.08f; regionRotated1 = 2; craftTime = 60f * 2f; liquidCapacity = 30f; heatOutput = 5f; }}; electricHeater = new HeatProducer("electric-heater"){{ requirements(Category.crafting, with(Items.tungsten, 30, Items.oxide, 30)); researchCostMultiplier = 4f; drawer = new DrawMulti(new DrawDefault(), new DrawHeatOutput()); rotateDraw = false; size = 2; heatOutput = 3f; regionRotated1 = 1; ambientSound = Sounds.hum; itemCapacity = 0; consumePower(100f / 60f); }}; slagHeater = new HeatProducer("slag-heater"){{ requirements(Category.crafting, with(Items.tungsten, 50, Items.oxide, 20, Items.beryllium, 20)); researchCostMultiplier = 4f; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.slag), new DrawDefault(), new DrawHeatOutput()); size = 3; itemCapacity = 0; liquidCapacity = 40f; rotateDraw = false; regionRotated1 = 1; ambientSound = Sounds.hum; consumeLiquid(Liquids.slag, 40f / 60f); heatOutput = 8f; researchCost = with(Items.tungsten, 1200, Items.oxide, 900, Items.beryllium, 2400); }}; phaseHeater = new HeatProducer("phase-heater"){{ requirements(Category.crafting, with(Items.oxide, 30, Items.carbide, 30, Items.beryllium, 30)); drawer = new DrawMulti(new DrawDefault(), new DrawHeatOutput()); size = 2; heatOutput = 15f; craftTime = 60f * 8f; ambientSound = Sounds.hum; consumeItem(Items.phaseFabric); }}; heatRedirector = new HeatConductor("heat-redirector"){{ requirements(Category.crafting, with(Items.tungsten, 10, Items.graphite, 10)); researchCostMultiplier = 10f; group = BlockGroup.heat; size = 3; drawer = new DrawMulti(new DrawDefault(), new DrawHeatOutput(), new DrawHeatInput("-heat")); regionRotated1 = 1; }}; heatRouter = new HeatConductor("heat-router"){{ requirements(Category.crafting, with(Items.tungsten, 15, Items.graphite, 10)); researchCostMultiplier = 10f; group = BlockGroup.heat; size = 3; drawer = new DrawMulti(new DrawDefault(), new DrawHeatOutput(-1, false), new DrawHeatOutput(), new DrawHeatOutput(1, false), new DrawHeatInput("-heat")); regionRotated1 = 1; splitHeat = true; }}; slagIncinerator = new ItemIncinerator("slag-incinerator"){{ requirements(Category.crafting, with(Items.tungsten, 15)); size = 1; consumeLiquid(Liquids.slag, 2f / 60f); }}; carbideCrucible = new HeatCrafter("carbide-crucible"){{ requirements(Category.crafting, with(Items.tungsten, 110, Items.thorium, 150, Items.oxide, 60)); craftEffect = Fx.none; outputItem = new ItemStack(Items.carbide, 1); craftTime = 60f * 2.25f; size = 3; itemCapacity = 20; hasPower = hasItems = true; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawCrucibleFlame(), new DrawDefault(), new DrawHeatInput()); ambientSound = Sounds.smelter; ambientSoundVolume = 0.09f; heatRequirement = 10f; consumeItems(with(Items.tungsten, 2, Items.graphite, 3)); consumePower(2f); }}; slagCentrifuge = new GenericCrafter("slag-centrifuge"){{ requirements(Category.crafting, BuildVisibility.debugOnly, with(Items.carbide, 70, Items.graphite, 60, Items.silicon, 40, Items.oxide, 40)); consumePower(2f / 60f); size = 3; consumeItem(Items.sand, 1); consumeLiquid(Liquids.slag, 40f / 60f); liquidCapacity = 80f; var drawers = Seq.with(new DrawRegion("-bottom"), new DrawLiquidRegion(Liquids.slag){{ alpha = 0.7f; }}); for(int i = 0; i < 5; i++){ int fi = i; drawers.add(new DrawGlowRegion(-1f){{ glowIntensity = 0.3f; rotateSpeed = 3f / (1f + fi/1.4f); alpha = 0.4f; color = new Color(1f, 0.5f, 0.5f, 1f); }}); } drawer = new DrawMulti(drawers.add(new DrawDefault())); craftTime = 60f * 2f; outputLiquid = new LiquidStack(Liquids.gallium, 1f / 60f); //TODO something else? //outputItem = new ItemStack(Items.scrap, 1); }}; surgeCrucible = new HeatCrafter("surge-crucible"){{ requirements(Category.crafting, with(Items.silicon, 100, Items.graphite, 80, Items.tungsten, 80, Items.oxide, 80)); size = 3; itemCapacity = 20; heatRequirement = 10f; craftTime = 60f * 3f; liquidCapacity = 80f * 5; ambientSound = Sounds.smelter; ambientSoundVolume = 0.9f; outputItem = new ItemStack(Items.surgeAlloy, 1); craftEffect = new RadialEffect(Fx.surgeCruciSmoke, 4, 90f, 5f); drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawCircles(){{ color = Color.valueOf("ffc073").a(0.24f); strokeMax = 2.5f; radius = 10f; amount = 3; }}, new DrawLiquidRegion(Liquids.slag), new DrawDefault(), new DrawHeatInput(), new DrawHeatRegion(){{ color = Color.valueOf("ff6060ff"); }}, new DrawHeatRegion("-vents"){{ color.a = 1f; }}); consumeItem(Items.silicon, 3); //TODO consume hydrogen/ozone? consumeLiquid(Liquids.slag, 40f / 60f); consumePower(2f); }}; cyanogenSynthesizer = new HeatCrafter("cyanogen-synthesizer"){{ requirements(Category.crafting, with(Items.carbide, 50, Items.silicon, 80, Items.beryllium, 90)); heatRequirement = 5f; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.cyanogen), new DrawParticles(){{ color = Color.valueOf("89e8b6"); alpha = 0.5f; particleSize = 3f; particles = 10; particleRad = 9f; particleLife = 200f; reverse = true; particleSizeInterp = Interp.one; }}, new DrawDefault(), new DrawHeatInput(), new DrawHeatRegion("-heat-top")); size = 3; ambientSound = Sounds.extractLoop; ambientSoundVolume = 0.08f; liquidCapacity = 80f; outputLiquid = new LiquidStack(Liquids.cyanogen, 3f / 60f); //consumeLiquids(LiquidStack.with(Liquids.hydrogen, 3f / 60f, Liquids.nitrogen, 2f / 60f)); consumeLiquid(Liquids.arkycite, 40f / 60f); consumeItem(Items.graphite); consumePower(2f); }}; phaseSynthesizer = new HeatCrafter("phase-synthesizer"){{ requirements(Category.crafting, with(Items.carbide, 90, Items.silicon, 100, Items.thorium, 100, Items.tungsten, 200)); size = 3; itemCapacity = 40; heatRequirement = 8f; craftTime = 60f * 2f; liquidCapacity = 10f * 4; ambientSound = Sounds.techloop; ambientSoundVolume = 0.04f; outputItem = new ItemStack(Items.phaseFabric, 1); drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawSpikes(){{ color = Color.valueOf("ffd59e"); stroke = 1.5f; layers = 2; amount = 12; rotateSpeed = 0.5f; layerSpeed = -0.9f; }}, new DrawMultiWeave(){{ glowColor = new Color(1f, 0.4f, 0.4f, 0.8f); }}, new DrawDefault(), new DrawHeatInput(), new DrawHeatRegion("-vents"){{ color = new Color(1f, 0.4f, 0.3f, 1f); }}); consumeItems(with(Items.thorium, 2, Items.sand, 6)); consumeLiquid(Liquids.ozone, 2f / 60f); consumePower(8f); }}; heatReactor = new HeatProducer("heat-reactor"){{ requirements(Category.crafting, BuildVisibility.debugOnly, with(Items.oxide, 70, Items.graphite, 20, Items.carbide, 10, Items.thorium, 80)); size = 3; craftTime = 60f * 10f; craftEffect = new RadialEffect(Fx.heatReactorSmoke, 4, 90f, 7f); itemCapacity = 20; outputItem = new ItemStack(Items.fissileMatter, 1); consumeItem(Items.thorium, 3); consumeLiquid(Liquids.nitrogen, 1f / 60f); }}; //endregion //region defense int wallHealthMultiplier = 4; copperWall = new Wall("copper-wall"){{ requirements(Category.defense, with(Items.copper, 6)); health = 80 * wallHealthMultiplier; researchCostMultiplier = 0.1f; envDisabled |= Env.scorching; }}; copperWallLarge = new Wall("copper-wall-large"){{ requirements(Category.defense, ItemStack.mult(copperWall.requirements, 4)); health = 80 * 4 * wallHealthMultiplier; size = 2; envDisabled |= Env.scorching; }}; titaniumWall = new Wall("titanium-wall"){{ requirements(Category.defense, with(Items.titanium, 6)); health = 110 * wallHealthMultiplier; envDisabled |= Env.scorching; }}; titaniumWallLarge = new Wall("titanium-wall-large"){{ requirements(Category.defense, ItemStack.mult(titaniumWall.requirements, 4)); health = 110 * wallHealthMultiplier * 4; size = 2; envDisabled |= Env.scorching; }}; plastaniumWall = new Wall("plastanium-wall"){{ requirements(Category.defense, with(Items.plastanium, 5, Items.metaglass, 2)); health = 125 * wallHealthMultiplier; insulated = true; absorbLasers = true; schematicPriority = 10; envDisabled |= Env.scorching; }}; plastaniumWallLarge = new Wall("plastanium-wall-large"){{ requirements(Category.defense, ItemStack.mult(plastaniumWall.requirements, 4)); health = 125 * wallHealthMultiplier * 4; size = 2; insulated = true; absorbLasers = true; schematicPriority = 10; envDisabled |= Env.scorching; }}; thoriumWall = new Wall("thorium-wall"){{ requirements(Category.defense, with(Items.thorium, 6)); health = 200 * wallHealthMultiplier; envDisabled |= Env.scorching; }}; thoriumWallLarge = new Wall("thorium-wall-large"){{ requirements(Category.defense, ItemStack.mult(thoriumWall.requirements, 4)); health = 200 * wallHealthMultiplier * 4; size = 2; envDisabled |= Env.scorching; }}; phaseWall = new Wall("phase-wall"){{ requirements(Category.defense, with(Items.phaseFabric, 6)); health = 150 * wallHealthMultiplier; chanceDeflect = 10f; flashHit = true; envDisabled |= Env.scorching; }}; phaseWallLarge = new Wall("phase-wall-large"){{ requirements(Category.defense, ItemStack.mult(phaseWall.requirements, 4)); health = 150 * 4 * wallHealthMultiplier; size = 2; chanceDeflect = 10f; flashHit = true; envDisabled |= Env.scorching; }}; surgeWall = new Wall("surge-wall"){{ requirements(Category.defense, with(Items.surgeAlloy, 6)); health = 230 * wallHealthMultiplier; lightningChance = 0.05f; envDisabled |= Env.scorching; }}; surgeWallLarge = new Wall("surge-wall-large"){{ requirements(Category.defense, ItemStack.mult(surgeWall.requirements, 4)); health = 230 * 4 * wallHealthMultiplier; size = 2; lightningChance = 0.05f; envDisabled |= Env.scorching; }}; door = new Door("door"){{ requirements(Category.defense, with(Items.titanium, 6, Items.silicon, 4)); health = 100 * wallHealthMultiplier; envDisabled |= Env.scorching; }}; doorLarge = new Door("door-large"){{ requirements(Category.defense, ItemStack.mult(door.requirements, 4)); openfx = Fx.dooropenlarge; closefx = Fx.doorcloselarge; health = 100 * 4 * wallHealthMultiplier; size = 2; envDisabled |= Env.scorching; }}; scrapWall = new Wall("scrap-wall"){{ requirements(Category.defense, BuildVisibility.sandboxOnly, with(Items.scrap, 6)); health = 60 * wallHealthMultiplier; variants = 5; envDisabled |= Env.scorching; }}; scrapWallLarge = new Wall("scrap-wall-large"){{ requirements(Category.defense, BuildVisibility.sandboxOnly, ItemStack.mult(scrapWall.requirements, 4)); health = 60 * 4 * wallHealthMultiplier; size = 2; variants = 4; envDisabled |= Env.scorching; }}; scrapWallHuge = new Wall("scrap-wall-huge"){{ requirements(Category.defense, BuildVisibility.sandboxOnly, ItemStack.mult(scrapWall.requirements, 9)); health = 60 * 9 * wallHealthMultiplier; size = 3; variants = 3; envDisabled |= Env.scorching; }}; scrapWallGigantic = new Wall("scrap-wall-gigantic"){{ requirements(Category.defense, BuildVisibility.sandboxOnly, ItemStack.mult(scrapWall.requirements, 16)); health = 60 * 16 * wallHealthMultiplier; size = 4; envDisabled |= Env.scorching; }}; thruster = new Thruster("thruster"){{ requirements(Category.defense, BuildVisibility.sandboxOnly, with(Items.scrap, 96)); health = 55 * 16 * wallHealthMultiplier; size = 4; envDisabled |= Env.scorching; }}; berylliumWall = new Wall("beryllium-wall"){{ requirements(Category.defense, with(Items.beryllium, 6)); health = 130 * wallHealthMultiplier; armor = 2f; buildCostMultiplier = 8f; }}; berylliumWallLarge = new Wall("beryllium-wall-large"){{ requirements(Category.defense, ItemStack.mult(berylliumWall.requirements, 4)); health = 130 * wallHealthMultiplier * 4; armor = 2f; buildCostMultiplier = 5f; size = 2; }}; tungstenWall = new Wall("tungsten-wall"){{ requirements(Category.defense, with(Items.tungsten, 6)); health = 180 * wallHealthMultiplier; armor = 14f; buildCostMultiplier = 8f; }}; tungstenWallLarge = new Wall("tungsten-wall-large"){{ requirements(Category.defense, ItemStack.mult(tungstenWall.requirements, 4)); health = 180 * wallHealthMultiplier * 4; armor = 14f; buildCostMultiplier = 5f; size = 2; }}; blastDoor = new AutoDoor("blast-door"){{ requirements(Category.defense, with(Items.tungsten, 24, Items.silicon, 24)); health = 175 * wallHealthMultiplier * 4; armor = 14f; size = 2; }}; reinforcedSurgeWall = new Wall("reinforced-surge-wall"){{ requirements(Category.defense, with(Items.surgeAlloy, 6, Items.tungsten, 2)); health = 250 * wallHealthMultiplier; lightningChance = 0.05f; lightningDamage = 30f; armor = 20f; researchCost = with(Items.surgeAlloy, 20, Items.tungsten, 100); }}; reinforcedSurgeWallLarge = new Wall("reinforced-surge-wall-large"){{ requirements(Category.defense, ItemStack.mult(reinforcedSurgeWall.requirements, 4)); health = 250 * wallHealthMultiplier * 4; lightningChance = 0.05f; lightningDamage = 30f; armor = 20f; size = 2; researchCost = with(Items.surgeAlloy, 40, Items.tungsten, 200); }}; carbideWall = new Wall("carbide-wall"){{ requirements(Category.defense, with(Items.thorium, 6, Items.carbide, 6)); health = 270 * wallHealthMultiplier; armor = 16f; }}; carbideWallLarge = new Wall("carbide-wall-large"){{ requirements(Category.defense, ItemStack.mult(carbideWall.requirements, 4)); health = 270 * wallHealthMultiplier * 4; armor = 16f; size = 2; }}; shieldedWall = new ShieldWall("shielded-wall"){{ requirements(Category.defense, ItemStack.with(Items.phaseFabric, 20, Items.surgeAlloy, 12, Items.beryllium, 12)); consumePower(3f / 60f); outputsPower = false; hasPower = true; consumesPower = true; conductivePower = true; chanceDeflect = 8f; health = 260 * wallHealthMultiplier * 4; armor = 15f; size = 2; }}; mender = new MendProjector("mender"){{ requirements(Category.effect, with(Items.lead, 30, Items.copper, 25)); consumePower(0.3f); size = 1; reload = 200f; range = 40f; healPercent = 4f; phaseBoost = 4f; phaseRangeBoost = 20f; health = 80; consumeItem(Items.silicon).boost(); }}; mendProjector = new MendProjector("mend-projector"){{ requirements(Category.effect, with(Items.lead, 100, Items.titanium, 25, Items.silicon, 40, Items.copper, 50)); consumePower(1.5f); size = 2; reload = 250f; range = 85f; healPercent = 11f; phaseBoost = 15f; scaledHealth = 80; consumeItem(Items.phaseFabric).boost(); }}; overdriveProjector = new OverdriveProjector("overdrive-projector"){{ requirements(Category.effect, with(Items.lead, 100, Items.titanium, 75, Items.silicon, 75, Items.plastanium, 30)); consumePower(3.50f); size = 2; consumeItem(Items.phaseFabric).boost(); }}; overdriveDome = new OverdriveProjector("overdrive-dome"){{ requirements(Category.effect, with(Items.lead, 200, Items.titanium, 130, Items.silicon, 130, Items.plastanium, 80, Items.surgeAlloy, 120)); consumePower(10f); size = 3; range = 200f; speedBoost = 2.5f; useTime = 300f; hasBoost = false; consumeItems(with(Items.phaseFabric, 1, Items.silicon, 1)); }}; forceProjector = new ForceProjector("force-projector"){{ requirements(Category.effect, with(Items.lead, 100, Items.titanium, 75, Items.silicon, 125)); size = 3; phaseRadiusBoost = 80f; radius = 101.7f; shieldHealth = 750f; cooldownNormal = 1.5f; cooldownLiquid = 1.2f; cooldownBrokenBase = 0.35f; itemConsumer = consumeItem(Items.phaseFabric).boost(); consumePower(4f); }}; shockMine = new ShockMine("shock-mine"){{ requirements(Category.effect, with(Items.lead, 25, Items.silicon, 12)); hasShadow = false; health = 50; damage = 25; tileDamage = 7f; length = 10; tendrils = 4; }}; radar = new Radar("radar"){{ requirements(Category.effect, BuildVisibility.fogOnly, with(Items.silicon, 60, Items.graphite, 50, Items.beryllium, 10)); outlineColor = Color.valueOf("4a4b53"); fogRadius = 34; researchCost = with(Items.silicon, 70, Items.graphite, 70); consumePower(0.6f); }}; buildTower = new BuildTurret("build-tower"){{ requirements(Category.effect, with(Items.silicon, 150, Items.oxide, 40, Items.thorium, 60)); outlineColor = Pal.darkOutline; range = 200f; size = 3; buildSpeed = 1.5f; consumePower(3f); consumeLiquid(Liquids.nitrogen, 3f / 60f); }}; regenProjector = new RegenProjector("regen-projector"){{ requirements(Category.effect, with(Items.silicon, 80, Items.tungsten, 60, Items.oxide, 40, Items.beryllium, 80)); size = 3; range = 28; baseColor = Pal.regen; consumePower(1f); consumeLiquid(Liquids.hydrogen, 1f / 60f); consumeItem(Items.phaseFabric).boost(); healPercent = 4f / 60f; Color col = Color.valueOf("8ca9e8"); drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.hydrogen, 9f / 4f), new DrawDefault(), new DrawGlowRegion(){{ color = Color.sky; }}, new DrawPulseShape(false){{ layer = Layer.effect; color = col; }}, new DrawShape(){{ layer = Layer.effect; radius = 3.5f; useWarmupRadius = true; timeScl = 2f; color = col; }}); }}; //TODO implement if(false) barrierProjector = new DirectionalForceProjector("barrier-projector"){{ requirements(Category.effect, with(Items.surgeAlloy, 100, Items.silicon, 125)); size = 3; width = 50f; length = 36; shieldHealth = 2000f; cooldownNormal = 3f; cooldownBrokenBase = 0.35f; consumePower(4f); }}; shockwaveTower = new ShockwaveTower("shockwave-tower"){{ requirements(Category.effect, with(Items.surgeAlloy, 50, Items.silicon, 150, Items.oxide, 30, Items.tungsten, 100)); size = 3; consumeLiquids(LiquidStack.with(Liquids.cyanogen, 1.5f / 60f)); consumePower(100f / 60f); range = 170f; reload = 80f; }}; //TODO 5x5?? shieldProjector = new BaseShield("shield-projector"){{ requirements(Category.effect, BuildVisibility.editorOnly, with()); size = 3; consumePower(5f); }}; largeShieldProjector = new BaseShield("large-shield-projector"){{ requirements(Category.effect, BuildVisibility.editorOnly, with()); size = 4; radius = 400f; consumePower(5f); }}; //endregion //region distribution conveyor = new Conveyor("conveyor"){{ requirements(Category.distribution, with(Items.copper, 1)); health = 45; speed = 0.03f; displayedSpeed = 4.2f; buildCostMultiplier = 2f; researchCost = with(Items.copper, 5); }}; titaniumConveyor = new Conveyor("titanium-conveyor"){{ requirements(Category.distribution, with(Items.copper, 1, Items.lead, 1, Items.titanium, 1)); health = 65; speed = 0.08f; displayedSpeed = 11f; }}; plastaniumConveyor = new StackConveyor("plastanium-conveyor"){{ requirements(Category.distribution, with(Items.plastanium, 1, Items.silicon, 1, Items.graphite, 1)); health = 75; speed = 4f / 60f; itemCapacity = 10; }}; armoredConveyor = new ArmoredConveyor("armored-conveyor"){{ requirements(Category.distribution, with(Items.plastanium, 1, Items.thorium, 1, Items.metaglass, 1)); health = 180; speed = 0.08f; displayedSpeed = 11f; }}; junction = new Junction("junction"){{ requirements(Category.distribution, with(Items.copper, 2)); speed = 26; capacity = 6; health = 30; buildCostMultiplier = 6f; }}; itemBridge = new BufferedItemBridge("bridge-conveyor"){{ requirements(Category.distribution, with(Items.lead, 6, Items.copper, 6)); fadeIn = moveArrows = false; range = 4; speed = 74f; arrowSpacing = 6f; bufferCapacity = 14; }}; phaseConveyor = new ItemBridge("phase-conveyor"){{ requirements(Category.distribution, with(Items.phaseFabric, 5, Items.silicon, 7, Items.lead, 10, Items.graphite, 10)); range = 12; arrowPeriod = 0.9f; arrowTimeScl = 2.75f; hasPower = true; pulse = true; envEnabled |= Env.space; consumePower(0.30f); }}; sorter = new Sorter("sorter"){{ requirements(Category.distribution, with(Items.lead, 2, Items.copper, 2)); buildCostMultiplier = 3f; }}; invertedSorter = new Sorter("inverted-sorter"){{ requirements(Category.distribution, with(Items.lead, 2, Items.copper, 2)); buildCostMultiplier = 3f; invert = true; }}; router = new Router("router"){{ requirements(Category.distribution, with(Items.copper, 3)); buildCostMultiplier = 4f; }}; distributor = new Router("distributor"){{ requirements(Category.distribution, with(Items.lead, 4, Items.copper, 4)); buildCostMultiplier = 3f; size = 2; }}; overflowGate = new OverflowGate("overflow-gate"){{ requirements(Category.distribution, with(Items.lead, 2, Items.copper, 4)); buildCostMultiplier = 3f; }}; underflowGate = new OverflowGate("underflow-gate"){{ requirements(Category.distribution, with(Items.lead, 2, Items.copper, 4)); buildCostMultiplier = 3f; invert = true; }}; massDriver = new MassDriver("mass-driver"){{ requirements(Category.distribution, with(Items.titanium, 125, Items.silicon, 75, Items.lead, 125, Items.thorium, 50)); size = 3; itemCapacity = 120; reload = 200f; range = 440f; consumePower(1.75f); }}; //erekir transport blocks duct = new Duct("duct"){{ requirements(Category.distribution, with(Items.beryllium, 1)); health = 90; speed = 4f; researchCost = with(Items.beryllium, 5); }}; armoredDuct = new Duct("armored-duct"){{ requirements(Category.distribution, with(Items.beryllium, 2, Items.tungsten, 1)); health = 140; speed = 4f; armored = true; researchCost = with(Items.beryllium, 300, Items.tungsten, 100); }}; ductRouter = new DuctRouter("duct-router"){{ requirements(Category.distribution, with(Items.beryllium, 10)); health = 90; speed = 4f; regionRotated1 = 1; solid = false; researchCost = with(Items.beryllium, 30); }}; overflowDuct = new OverflowDuct("overflow-duct"){{ requirements(Category.distribution, with(Items.graphite, 8, Items.beryllium, 8)); health = 90; speed = 4f; solid = false; researchCostMultiplier = 1.5f; }}; underflowDuct = new OverflowDuct("underflow-duct"){{ requirements(Category.distribution, with(Items.graphite, 8, Items.beryllium, 8)); health = 90; speed = 4f; solid = false; researchCostMultiplier = 1.5f; invert = true; }}; ductBridge = new DuctBridge("duct-bridge"){{ requirements(Category.distribution, with(Items.beryllium, 20)); health = 90; speed = 4f; buildCostMultiplier = 2f; researchCostMultiplier = 0.3f; }}; ductUnloader = new DirectionalUnloader("duct-unloader"){{ requirements(Category.distribution, with(Items.graphite, 20, Items.silicon, 20, Items.tungsten, 10)); health = 120; speed = 4f; solid = false; underBullets = true; regionRotated1 = 1; }}; surgeConveyor = new StackConveyor("surge-conveyor"){{ requirements(Category.distribution, with(Items.surgeAlloy, 1, Items.tungsten, 1)); health = 130; //TODO different base speed/item capacity? speed = 5f / 60f; itemCapacity = 10; outputRouter = false; hasPower = true; consumesPower = true; conductivePower = true; underBullets = true; baseEfficiency = 1f; consumePower(1f / 60f); researchCost = with(Items.surgeAlloy, 30, Items.tungsten, 80); }}; surgeRouter = new StackRouter("surge-router"){{ requirements(Category.distribution, with(Items.surgeAlloy, 5, Items.tungsten, 1)); health = 130; speed = 6f; hasPower = true; consumesPower = true; conductivePower = true; baseEfficiency = 1f; underBullets = true; solid = false; consumePower(3f / 60f); }}; unitCargoLoader = new UnitCargoLoader("unit-cargo-loader"){{ requirements(Category.distribution, with(Items.silicon, 80, Items.surgeAlloy, 50, Items.oxide, 20)); size = 3; buildTime = 60f * 8f; consumePower(8f / 60f); //intentionally set absurdly high to make this block not overpowered consumeLiquid(Liquids.nitrogen, 10f / 60f); itemCapacity = 200; researchCost = with(Items.silicon, 2500, Items.surgeAlloy, 20, Items.oxide, 30); }}; unitCargoUnloadPoint = new UnitCargoUnloadPoint("unit-cargo-unload-point"){{ requirements(Category.distribution, with(Items.silicon, 60, Items.tungsten, 60)); size = 2; itemCapacity = 100; researchCost = with(Items.silicon, 3000, Items.oxide, 20); }}; //endregion //region liquid mechanicalPump = new Pump("mechanical-pump"){{ requirements(Category.liquid, with(Items.copper, 15, Items.metaglass, 10)); pumpAmount = 7f / 60f; }}; rotaryPump = new Pump("rotary-pump"){{ requirements(Category.liquid, with(Items.copper, 70, Items.metaglass, 50, Items.silicon, 20, Items.titanium, 35)); pumpAmount = 0.2f; consumePower(0.3f); liquidCapacity = 30f; hasPower = true; size = 2; }}; impulsePump = new Pump("impulse-pump"){{ requirements(Category.liquid, with(Items.copper, 80, Items.metaglass, 90, Items.silicon, 30, Items.titanium, 40, Items.thorium, 35)); pumpAmount = 0.22f; consumePower(1.3f); liquidCapacity = 40f; hasPower = true; size = 3; }}; conduit = new Conduit("conduit"){{ requirements(Category.liquid, with(Items.metaglass, 1)); health = 45; }}; pulseConduit = new Conduit("pulse-conduit"){{ requirements(Category.liquid, with(Items.titanium, 2, Items.metaglass, 1)); liquidCapacity = 16f; liquidPressure = 1.025f; health = 90; }}; platedConduit = new ArmoredConduit("plated-conduit"){{ requirements(Category.liquid, with(Items.thorium, 2, Items.metaglass, 1, Items.plastanium, 1)); liquidCapacity = 16f; liquidPressure = 1.025f; health = 220; }}; liquidRouter = new LiquidRouter("liquid-router"){{ requirements(Category.liquid, with(Items.graphite, 4, Items.metaglass, 2)); liquidCapacity = 20f; underBullets = true; solid = false; }}; liquidContainer = new LiquidRouter("liquid-container"){{ requirements(Category.liquid, with(Items.titanium, 10, Items.metaglass, 15)); liquidCapacity = 700f; size = 2; solid = true; }}; liquidTank = new LiquidRouter("liquid-tank"){{ requirements(Category.liquid, with(Items.titanium, 30, Items.metaglass, 40)); size = 3; solid = true; liquidCapacity = 1800f; health = 500; }}; liquidJunction = new LiquidJunction("liquid-junction"){{ requirements(Category.liquid, with(Items.graphite, 4, Items.metaglass, 8)); solid = false; }}; bridgeConduit = new LiquidBridge("bridge-conduit"){{ requirements(Category.liquid, with(Items.graphite, 4, Items.metaglass, 8)); fadeIn = moveArrows = false; arrowSpacing = 6f; range = 4; hasPower = false; }}; phaseConduit = new LiquidBridge("phase-conduit"){{ requirements(Category.liquid, with(Items.phaseFabric, 5, Items.silicon, 7, Items.metaglass, 20, Items.titanium, 10)); range = 12; arrowPeriod = 0.9f; arrowTimeScl = 2.75f; hasPower = true; canOverdrive = false; pulse = true; consumePower(0.30f); }}; //reinforced stuff reinforcedPump = new Pump("reinforced-pump"){{ requirements(Category.liquid, with(Items.beryllium, 40, Items.tungsten, 30, Items.silicon, 20)); consumeLiquid(Liquids.hydrogen, 1.5f / 60f); pumpAmount = 80f / 60f / 4f; liquidCapacity = 160f; size = 2; }}; reinforcedConduit = new ArmoredConduit("reinforced-conduit"){{ requirements(Category.liquid, with(Items.beryllium, 2)); botColor = Pal.darkestMetal; leaks = true; liquidCapacity = 20f; liquidPressure = 1.03f; health = 250; researchCostMultiplier = 3; underBullets = true; }}; //TODO is this necessary? junctions are not good design //TODO make it leak reinforcedLiquidJunction = new LiquidJunction("reinforced-liquid-junction"){{ requirements(Category.liquid, with(Items.graphite, 4, Items.beryllium, 8)); buildCostMultiplier = 3f; health = 260; ((Conduit)reinforcedConduit).junctionReplacement = this; researchCostMultiplier = 1; solid = false; underBullets = true; }}; reinforcedBridgeConduit = new DirectionLiquidBridge("reinforced-bridge-conduit"){{ requirements(Category.liquid, with(Items.graphite, 8, Items.beryllium, 20)); range = 4; hasPower = false; researchCostMultiplier = 1; underBullets = true; ((Conduit)reinforcedConduit).rotBridgeReplacement = this; }}; reinforcedLiquidRouter = new LiquidRouter("reinforced-liquid-router"){{ requirements(Category.liquid, with(Items.graphite, 8, Items.beryllium, 4)); liquidCapacity = 30f; liquidPadding = 3f/4f; researchCostMultiplier = 3; underBullets = true; solid = false; }}; reinforcedLiquidContainer = new LiquidRouter("reinforced-liquid-container"){{ requirements(Category.liquid, with(Items.tungsten, 10, Items.beryllium, 16)); liquidCapacity = 1000f; size = 2; liquidPadding = 6f/4f; researchCostMultiplier = 4; solid = true; }}; reinforcedLiquidTank = new LiquidRouter("reinforced-liquid-tank"){{ requirements(Category.liquid, with(Items.tungsten, 40, Items.beryllium, 50)); size = 3; solid = true; liquidCapacity = 2700f; liquidPadding = 2f; }}; //endregion //region power powerNode = new PowerNode("power-node"){{ requirements(Category.power, with(Items.copper, 1, Items.lead, 3)); maxNodes = 10; laserRange = 6; }}; powerNodeLarge = new PowerNode("power-node-large"){{ requirements(Category.power, with(Items.titanium, 5, Items.lead, 10, Items.silicon, 3)); size = 2; maxNodes = 15; laserRange = 15f; }}; surgeTower = new PowerNode("surge-tower"){{ requirements(Category.power, with(Items.titanium, 7, Items.lead, 10, Items.silicon, 15, Items.surgeAlloy, 15)); size = 2; maxNodes = 2; laserRange = 40f; schematicPriority = -15; }}; diode = new PowerDiode("diode"){{ requirements(Category.power, with(Items.silicon, 10, Items.plastanium, 5, Items.metaglass, 10)); }}; battery = new Battery("battery"){{ requirements(Category.power, with(Items.copper, 5, Items.lead, 20)); consumePowerBuffered(4000f); baseExplosiveness = 1f; }}; batteryLarge = new Battery("battery-large"){{ requirements(Category.power, with(Items.titanium, 20, Items.lead, 50, Items.silicon, 30)); size = 3; consumePowerBuffered(50000f); baseExplosiveness = 5f; }}; combustionGenerator = new ConsumeGenerator("combustion-generator"){{ requirements(Category.power, with(Items.copper, 25, Items.lead, 15)); powerProduction = 1f; itemDuration = 120f; ambientSound = Sounds.smelter; ambientSoundVolume = 0.03f; generateEffect = Fx.generatespark; consume(new ConsumeItemFlammable()); consume(new ConsumeItemExplode()); drawer = new DrawMulti(new DrawDefault(), new DrawWarmupRegion()); }}; thermalGenerator = new ThermalGenerator("thermal-generator"){{ requirements(Category.power, with(Items.copper, 40, Items.graphite, 35, Items.lead, 50, Items.silicon, 35, Items.metaglass, 40)); powerProduction = 1.8f; generateEffect = Fx.redgeneratespark; effectChance = 0.011f; size = 2; floating = true; ambientSound = Sounds.hum; ambientSoundVolume = 0.06f; }}; steamGenerator = new ConsumeGenerator("steam-generator"){{ requirements(Category.power, with(Items.copper, 35, Items.graphite, 25, Items.lead, 40, Items.silicon, 30)); powerProduction = 5.5f; itemDuration = 90f; consumeLiquid(Liquids.water, 0.1f); hasLiquids = true; size = 2; generateEffect = Fx.generatespark; ambientSound = Sounds.smelter; ambientSoundVolume = 0.06f; consume(new ConsumeItemFlammable()); consume(new ConsumeItemExplode()); drawer = new DrawMulti( new DrawDefault(), new DrawWarmupRegion(), new DrawRegion("-turbine"){{ rotateSpeed = 2f; }}, new DrawRegion("-turbine"){{ rotateSpeed = -2f; rotation = 45f; }}, new DrawRegion("-cap"), new DrawLiquidRegion() ); }}; differentialGenerator = new ConsumeGenerator("differential-generator"){{ requirements(Category.power, with(Items.copper, 70, Items.titanium, 50, Items.lead, 100, Items.silicon, 65, Items.metaglass, 50)); powerProduction = 18f; itemDuration = 220f; hasLiquids = true; hasItems = true; size = 3; ambientSound = Sounds.steam; generateEffect = Fx.generatespark; ambientSoundVolume = 0.03f; drawer = new DrawMulti(new DrawDefault(), new DrawWarmupRegion(), new DrawLiquidRegion()); consumeItem(Items.pyratite); consumeLiquid(Liquids.cryofluid, 0.1f); }}; rtgGenerator = new ConsumeGenerator("rtg-generator"){{ requirements(Category.power, with(Items.lead, 100, Items.silicon, 75, Items.phaseFabric, 25, Items.plastanium, 75, Items.thorium, 50)); size = 2; powerProduction = 4.5f; itemDuration = 60 * 14f; envEnabled = Env.any; generateEffect = Fx.generatespark; drawer = new DrawMulti(new DrawDefault(), new DrawWarmupRegion()); consume(new ConsumeItemRadioactive()); }}; solarPanel = new SolarGenerator("solar-panel"){{ requirements(Category.power, with(Items.lead, 10, Items.silicon, 15)); powerProduction = 0.1f; }}; largeSolarPanel = new SolarGenerator("solar-panel-large"){{ requirements(Category.power, with(Items.lead, 80, Items.silicon, 110, Items.phaseFabric, 15)); size = 3; powerProduction = 1.6f; }}; thoriumReactor = new NuclearReactor("thorium-reactor"){{ requirements(Category.power, with(Items.lead, 300, Items.silicon, 200, Items.graphite, 150, Items.thorium, 150, Items.metaglass, 50)); ambientSound = Sounds.hum; ambientSoundVolume = 0.24f; size = 3; health = 700; itemDuration = 360f; powerProduction = 15f; heating = 0.02f; consumeItem(Items.thorium); consumeLiquid(Liquids.cryofluid, heating / coolantPower).update(false); }}; impactReactor = new ImpactReactor("impact-reactor"){{ requirements(Category.power, with(Items.lead, 500, Items.silicon, 300, Items.graphite, 400, Items.thorium, 100, Items.surgeAlloy, 250, Items.metaglass, 250)); size = 4; health = 900; powerProduction = 130f; itemDuration = 140f; ambientSound = Sounds.pulse; ambientSoundVolume = 0.07f; liquidCapacity = 60f; consumePower(25f); consumeItem(Items.blastCompound); consumeLiquid(Liquids.cryofluid, 0.25f); }}; //erekir beamNode = new BeamNode("beam-node"){{ requirements(Category.power, with(Items.beryllium, 8)); consumesPower = outputsPower = true; health = 90; range = 10; fogRadius = 1; researchCost = with(Items.beryllium, 5); consumePowerBuffered(1000f); }}; beamTower = new BeamNode("beam-tower"){{ requirements(Category.power, with(Items.beryllium, 30, Items.oxide, 10, Items.silicon, 10)); size = 3; consumesPower = outputsPower = true; range = 23; scaledHealth = 90; fogRadius = 2; consumePowerBuffered(40000f); }}; beamLink = new LongPowerNode("beam-link"){{ requirements(Category.power, BuildVisibility.editorOnly, with()); size = 3; maxNodes = 1; laserRange = 1000f; autolink = false; laserColor2 = Color.valueOf("ffd9c2"); laserScale = 0.8f; scaledHealth = 130; }}; turbineCondenser = new ThermalGenerator("turbine-condenser"){{ requirements(Category.power, with(Items.beryllium, 60)); attribute = Attribute.steam; group = BlockGroup.liquids; displayEfficiencyScale = 1f / 9f; minEfficiency = 9f - 0.0001f; powerProduction = 3f / 9f; displayEfficiency = false; generateEffect = Fx.turbinegenerate; effectChance = 0.04f; size = 3; ambientSound = Sounds.hum; ambientSoundVolume = 0.06f; drawer = new DrawMulti(new DrawDefault(), new DrawBlurSpin("-rotator", 0.6f * 9f){{ blurThresh = 0.01f; }}); hasLiquids = true; outputLiquid = new LiquidStack(Liquids.water, 5f / 60f / 9f); liquidCapacity = 20f; fogRadius = 3; researchCost = with(Items.beryllium, 15); }}; chemicalCombustionChamber = new ConsumeGenerator("chemical-combustion-chamber"){{ requirements(Category.power, with(Items.graphite, 40, Items.tungsten, 40, Items.oxide, 40f, Items.silicon, 30)); powerProduction = 10f; researchCost = with(Items.graphite, 2000, Items.tungsten, 1000, Items.oxide, 10, Items.silicon, 1500); consumeLiquids(LiquidStack.with(Liquids.ozone, 2f / 60f, Liquids.arkycite, 40f / 60f)); size = 3; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawPistons(){{ sinMag = 3f; sinScl = 5f; }}, new DrawRegion("-mid"), new DrawLiquidTile(Liquids.arkycite, 37f / 4f), new DrawDefault(), new DrawGlowRegion(){{ alpha = 1f; glowScale = 5f; color = Color.valueOf("c967b099"); }}); generateEffect = Fx.none; liquidCapacity = 20f * 5; ambientSound = Sounds.smelter; ambientSoundVolume = 0.06f; }}; pyrolysisGenerator = new ConsumeGenerator("pyrolysis-generator"){{ requirements(Category.power, with(Items.graphite, 50, Items.carbide, 50, Items.oxide, 60f, Items.silicon, 50)); powerProduction = 25f; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawPistons(){{ sinMag = 2.75f; sinScl = 5f; sides = 8; sideOffset = Mathf.PI / 2f; }}, new DrawRegion("-mid"), new DrawLiquidTile(Liquids.arkycite, 38f / 4f), new DrawDefault(), new DrawGlowRegion(){{ alpha = 1f; glowScale = 5f; color = Pal.slagOrange; }}); consumeLiquids(LiquidStack.with(Liquids.slag, 20f / 60f, Liquids.arkycite, 40f / 60f)); size = 3; liquidCapacity = 30f * 5; outputLiquid = new LiquidStack(Liquids.water, 20f / 60f); generateEffect = Fx.none; ambientSound = Sounds.smelter; ambientSoundVolume = 0.06f; researchCostMultiplier = 0.4f; }}; fluxReactor = new VariableReactor("flux-reactor"){{ requirements(Category.power, with(Items.graphite, 300, Items.carbide, 200, Items.oxide, 100, Items.silicon, 600, Items.surgeAlloy, 300)); powerProduction = 120f; maxHeat = 140f; consumeLiquid(Liquids.cyanogen, 9f / 60f); liquidCapacity = 30f; explosionMinWarmup = 0.5f; explosionRadius = 17; explosionDamage = 2500; ambientSound = Sounds.flux; ambientSoundVolume = 0.13f; size = 5; drawer = new DrawMulti( new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.cyanogen), new DrawRegion("-mid"), new DrawSoftParticles(){{ alpha = 0.35f; particleRad = 12f; particleSize = 9f; particleLife = 120f; particles = 27; }}, new DrawDefault(), new DrawHeatInput(), new DrawGlowRegion("-ventglow"){{ color = Color.valueOf("32603a"); }} ); }}; neoplasiaReactor = new HeaterGenerator("neoplasia-reactor"){{ requirements(Category.power, with(Items.tungsten, 1000, Items.carbide, 300, Items.oxide, 150, Items.silicon, 500, Items.phaseFabric, 300, Items.surgeAlloy, 200)); size = 5; liquidCapacity = 80f; outputLiquid = new LiquidStack(Liquids.neoplasm, 20f / 60f); explodeOnFull = true; heatOutput = 60f; consumeLiquid(Liquids.arkycite, 80f / 60f); consumeLiquid(Liquids.water, 10f / 60f); consumeItem(Items.phaseFabric); itemDuration = 60f * 3f; itemCapacity = 10; explosionRadius = 9; explosionDamage = 2000; explodeEffect = new MultiEffect(Fx.bigShockwave, new WrapEffect(Fx.titanSmoke, Liquids.neoplasm.color), Fx.neoplasmSplat); explodeSound = Sounds.largeExplosion; powerProduction = 140f; ambientSound = Sounds.bioLoop; ambientSoundVolume = 0.2f; explosionPuddles = 80; explosionPuddleRange = tilesize * 7f; explosionPuddleLiquid = Liquids.neoplasm; explosionPuddleAmount = 200f; explosionMinWarmup = 0.5f; consumeEffect = new RadialEffect(Fx.neoplasiaSmoke, 4, 90f, 54f / 4f); drawer = new DrawMulti( new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.arkycite, 3f), new DrawCircles(){{ color = Color.valueOf("feb380").a(0.8f); strokeMax = 3.25f; radius = 65f / 4f; amount = 5; timeScl = 200f; }}, new DrawRegion("-center"), new DrawCells(){{ color = Color.valueOf("c33e2b"); particleColorFrom = Color.valueOf("e8803f"); particleColorTo = Color.valueOf("8c1225"); particles = 50; range = 4f; }}, new DrawDefault(), new DrawHeatOutput(), new DrawGlowRegion("-glow"){{ color = Color.valueOf("70170b"); alpha = 0.7f; }} ); }}; //endregion power //region production mechanicalDrill = new Drill("mechanical-drill"){{ requirements(Category.production, with(Items.copper, 12)); tier = 2; drillTime = 600; size = 2; //mechanical drill doesn't work in space envEnabled ^= Env.space; researchCost = with(Items.copper, 10); consumeLiquid(Liquids.water, 0.05f).boost(); }}; pneumaticDrill = new Drill("pneumatic-drill"){{ requirements(Category.production, with(Items.copper, 18, Items.graphite, 10)); tier = 3; drillTime = 400; size = 2; consumeLiquid(Liquids.water, 0.06f).boost(); }}; laserDrill = new Drill("laser-drill"){{ requirements(Category.production, with(Items.copper, 35, Items.graphite, 30, Items.silicon, 30, Items.titanium, 20)); drillTime = 280; size = 3; hasPower = true; tier = 4; updateEffect = Fx.pulverizeMedium; drillEffect = Fx.mineBig; consumePower(1.10f); consumeLiquid(Liquids.water, 0.08f).boost(); }}; blastDrill = new Drill("blast-drill"){{ requirements(Category.production, with(Items.copper, 65, Items.silicon, 60, Items.titanium, 50, Items.thorium, 75)); drillTime = 280; size = 4; drawRim = true; hasPower = true; tier = 5; updateEffect = Fx.pulverizeRed; updateEffectChance = 0.03f; drillEffect = Fx.mineHuge; rotateSpeed = 6f; warmupSpeed = 0.01f; itemCapacity = 20; //more than the laser drill liquidBoostIntensity = 1.8f; consumePower(3f); consumeLiquid(Liquids.water, 0.1f).boost(); }}; waterExtractor = new SolidPump("water-extractor"){{ requirements(Category.production, with(Items.metaglass, 30, Items.graphite, 30, Items.lead, 30, Items.copper, 30)); result = Liquids.water; pumpAmount = 0.11f; size = 2; liquidCapacity = 30f; rotateSpeed = 1.4f; attribute = Attribute.water; envRequired |= Env.groundWater; consumePower(1.5f); }}; cultivator = new AttributeCrafter("cultivator"){{ requirements(Category.production, with(Items.copper, 25, Items.lead, 25, Items.silicon, 10)); outputItem = new ItemStack(Items.sporePod, 1); craftTime = 100; size = 2; hasLiquids = true; hasPower = true; hasItems = true; craftEffect = Fx.none; envRequired |= Env.spores; attribute = Attribute.spores; legacyReadWarmup = true; drawer = new DrawMulti( new DrawRegion("-bottom"), new DrawLiquidTile(Liquids.water), new DrawDefault(), new DrawCultivator(), new DrawRegion("-top") ); maxBoost = 2f; consumePower(80f / 60f); consumeLiquid(Liquids.water, 18f / 60f); }}; oilExtractor = new Fracker("oil-extractor"){{ requirements(Category.production, with(Items.copper, 150, Items.graphite, 175, Items.lead, 115, Items.thorium, 115, Items.silicon, 75)); result = Liquids.oil; updateEffect = Fx.pulverize; updateEffectChance = 0.05f; pumpAmount = 0.25f; size = 3; liquidCapacity = 30f; attribute = Attribute.oil; baseEfficiency = 0f; itemUseTime = 60f; consumeItem(Items.sand); consumePower(3f); consumeLiquid(Liquids.water, 0.15f); }}; ventCondenser = new AttributeCrafter("vent-condenser"){{ requirements(Category.production, with(Items.graphite, 20, Items.beryllium, 60)); attribute = Attribute.steam; group = BlockGroup.liquids; minEfficiency = 9f - 0.0001f; baseEfficiency = 0f; displayEfficiency = false; craftEffect = Fx.turbinegenerate; drawer = new DrawMulti(new DrawRegion("-bottom"), new DrawBlurSpin("-rotator", 6f), new DrawRegion("-mid"), new DrawLiquidTile(Liquids.water, 38f / 4f), new DrawDefault()); craftTime = 120f; size = 3; ambientSound = Sounds.hum; ambientSoundVolume = 0.06f; hasLiquids = true; boostScale = 1f / 9f; itemCapacity = 0; outputLiquid = new LiquidStack(Liquids.water, 30f / 60f); consumePower(0.5f); liquidCapacity = 60f; }}; cliffCrusher = new WallCrafter("cliff-crusher"){{ requirements(Category.production, with(Items.graphite, 25, Items.beryllium, 20)); consumePower(11 / 60f); drillTime = 110f; size = 2; attribute = Attribute.sand; output = Items.sand; fogRadius = 2; researchCost = with(Items.beryllium, 100, Items.graphite, 40); ambientSound = Sounds.drill; ambientSoundVolume = 0.04f; }}; plasmaBore = new BeamDrill("plasma-bore"){{ requirements(Category.production, with(Items.beryllium, 40)); consumePower(0.15f); drillTime = 160f; tier = 3; size = 2; range = 5; fogRadius = 3; researchCost = with(Items.beryllium, 10); consumeLiquid(Liquids.hydrogen, 0.25f / 60f).boost(); }}; //TODO awful name largePlasmaBore = new BeamDrill("large-plasma-bore"){{ requirements(Category.production, with(Items.silicon, 100, Items.oxide, 25, Items.beryllium, 100, Items.tungsten, 70)); consumePower(0.8f); drillTime = 100f; tier = 5; size = 3; range = 6; fogRadius = 4; laserWidth = 0.7f; itemCapacity = 20; consumeLiquid(Liquids.hydrogen, 0.5f / 60f); consumeLiquid(Liquids.nitrogen, 3f / 60f).boost(); researchCost = with(Items.silicon, 1500, Items.oxide, 200, Items.beryllium, 3000, Items.tungsten, 1200); }}; impactDrill = new BurstDrill("impact-drill"){{ requirements(Category.production, with(Items.silicon, 70, Items.beryllium, 90, Items.graphite, 60)); drillTime = 60f * 12f; size = 4; hasPower = true; tier = 6; drillEffect = new MultiEffect(Fx.mineImpact, Fx.drillSteam, Fx.mineImpactWave.wrap(Pal.redLight, 40f)); shake = 4f; itemCapacity = 40; //can't mine thorium for balance reasons, needs better drill blockedItem = Items.thorium; researchCostMultiplier = 0.5f; drillMultipliers.put(Items.beryllium, 2.5f); fogRadius = 4; consumePower(160f / 60f); consumeLiquid(Liquids.water, 0.2f); }}; eruptionDrill = new BurstDrill("eruption-drill"){{ requirements(Category.production, with(Items.silicon, 200, Items.oxide, 20, Items.tungsten, 200, Items.thorium, 120)); drillTime = 60f * 6f; size = 5; hasPower = true; tier = 7; //TODO better effect drillEffect = new MultiEffect( Fx.mineImpact, Fx.drillSteam, Fx.dynamicSpikes.wrap(Liquids.hydrogen.color, 30f), Fx.mineImpactWave.wrap(Liquids.hydrogen.color, 45f) ); shake = 4f; itemCapacity = 50; arrowOffset = 2f; arrowSpacing = 5f; arrows = 2; glowColor.a = 0.6f; fogRadius = 5; drillMultipliers.put(Items.beryllium, 2.5f); //TODO different requirements consumePower(6f); consumeLiquids(LiquidStack.with(Liquids.hydrogen, 4f / 60f)); }}; //endregion //region storage coreShard = new CoreBlock("core-shard"){{ requirements(Category.effect, BuildVisibility.editorOnly, with(Items.copper, 1000, Items.lead, 800)); alwaysUnlocked = true; isFirstTier = true; unitType = UnitTypes.alpha; health = 1100; itemCapacity = 4000; size = 3; unitCapModifier = 8; }}; coreFoundation = new CoreBlock("core-foundation"){{ requirements(Category.effect, with(Items.copper, 3000, Items.lead, 3000, Items.silicon, 2000)); unitType = UnitTypes.beta; health = 3500; itemCapacity = 9000; size = 4; thrusterLength = 34/4f; unitCapModifier = 16; researchCostMultiplier = 0.07f; }}; coreNucleus = new CoreBlock("core-nucleus"){{ requirements(Category.effect, with(Items.copper, 8000, Items.lead, 8000, Items.silicon, 5000, Items.thorium, 4000)); unitType = UnitTypes.gamma; health = 6000; itemCapacity = 13000; size = 5; thrusterLength = 40/4f; unitCapModifier = 24; researchCostMultiplier = 0.11f; }}; coreBastion = new CoreBlock("core-bastion"){{ //TODO cost requirements(Category.effect, with(Items.graphite, 1000, Items.silicon, 1000, Items.beryllium, 800)); isFirstTier = true; unitType = UnitTypes.evoke; health = 4500; itemCapacity = 2000; size = 4; thrusterLength = 34/4f; armor = 5f; alwaysUnlocked = true; incinerateNonBuildable = true; requiresCoreZone = true; //TODO should this be higher? buildCostMultiplier = 0.7f; unitCapModifier = 15; researchCostMultiplier = 0.07f; }}; coreCitadel = new CoreBlock("core-citadel"){{ requirements(Category.effect, with(Items.silicon, 4000, Items.beryllium, 4000, Items.tungsten, 3000, Items.oxide, 1000)); unitType = UnitTypes.incite; health = 16000; itemCapacity = 3000; size = 5; thrusterLength = 40/4f; armor = 10f; incinerateNonBuildable = true; buildCostMultiplier = 0.7f; requiresCoreZone = true; unitCapModifier = 15; researchCostMultipliers.put(Items.silicon, 0.5f); researchCostMultiplier = 0.17f; }}; coreAcropolis = new CoreBlock("core-acropolis"){{ requirements(Category.effect, with(Items.beryllium, 6000, Items.silicon, 5000, Items.tungsten, 5000, Items.carbide, 3000, Items.oxide, 3000)); unitType = UnitTypes.emanate; health = 30000; itemCapacity = 4000; size = 6; thrusterLength = 48/4f; armor = 15f; incinerateNonBuildable = true; buildCostMultiplier = 0.7f; requiresCoreZone = true; unitCapModifier = 15; researchCostMultipliers.put(Items.silicon, 0.4f); researchCostMultiplier = 0.1f; }}; container = new StorageBlock("container"){{ requirements(Category.effect, with(Items.titanium, 100)); size = 2; itemCapacity = 300; scaledHealth = 55; }}; vault = new StorageBlock("vault"){{ requirements(Category.effect, with(Items.titanium, 250, Items.thorium, 125)); size = 3; itemCapacity = 1000; scaledHealth = 55; }}; //TODO move tabs? unloader = new Unloader("unloader"){{ requirements(Category.effect, with(Items.titanium, 25, Items.silicon, 30)); speed = 60f / 11f; group = BlockGroup.transportation; }}; reinforcedContainer = new StorageBlock("reinforced-container"){{ requirements(Category.effect, with(Items.tungsten, 30, Items.graphite, 40)); size = 2; itemCapacity = 80; scaledHealth = 120; coreMerge = false; }}; reinforcedVault = new StorageBlock("reinforced-vault"){{ requirements(Category.effect, with(Items.tungsten, 125, Items.thorium, 70, Items.beryllium, 100)); size = 3; itemCapacity = 900; scaledHealth = 120; coreMerge = false; }}; //endregion //region turrets duo = new ItemTurret("duo"){{ requirements(Category.turret, with(Items.copper, 35)); ammo( Items.copper, new BasicBulletType(2.5f, 9){{ width = 7f; height = 9f; lifetime = 60f; ammoMultiplier = 2; }}, Items.graphite, new BasicBulletType(3.5f, 18){{ width = 9f; height = 12f; reloadMultiplier = 0.6f; ammoMultiplier = 4; lifetime = 60f; }}, Items.silicon, new BasicBulletType(3f, 12){{ width = 7f; height = 9f; homingPower = 0.1f; reloadMultiplier = 1.5f; ammoMultiplier = 5; lifetime = 60f; }} ); shoot = new ShootAlternate(3.5f); recoils = 2; drawer = new DrawTurret(){{ for(int i = 0; i < 2; i ++){ int f = i; parts.add(new RegionPart("-barrel-" + (i == 0 ? "l" : "r")){{ progress = PartProgress.recoil; recoilIndex = f; under = true; moveY = -1.5f; }}); } }}; recoil = 0.5f; shootY = 3f; reload = 20f; range = 110; shootCone = 15f; ammoUseEffect = Fx.casing1; health = 250; inaccuracy = 2f; rotateSpeed = 10f; coolant = consumeCoolant(0.1f); researchCostMultiplier = 0.05f; limitRange(); }}; scatter = new ItemTurret("scatter"){{ requirements(Category.turret, with(Items.copper, 85, Items.lead, 45)); ammo( Items.scrap, new FlakBulletType(4f, 3){{ lifetime = 60f; ammoMultiplier = 5f; shootEffect = Fx.shootSmall; reloadMultiplier = 0.5f; width = 6f; height = 8f; hitEffect = Fx.flakExplosion; splashDamage = 22f * 1.5f; splashDamageRadius = 24f; }}, Items.lead, new FlakBulletType(4.2f, 3){{ lifetime = 60f; ammoMultiplier = 4f; shootEffect = Fx.shootSmall; width = 6f; height = 8f; hitEffect = Fx.flakExplosion; splashDamage = 27f * 1.5f; splashDamageRadius = 15f; }}, Items.metaglass, new FlakBulletType(4f, 3){{ lifetime = 60f; ammoMultiplier = 5f; shootEffect = Fx.shootSmall; reloadMultiplier = 0.8f; width = 6f; height = 8f; hitEffect = Fx.flakExplosion; splashDamage = 30f * 1.5f; splashDamageRadius = 20f; fragBullets = 6; fragBullet = new BasicBulletType(3f, 5){{ width = 5f; height = 12f; shrinkY = 1f; lifetime = 20f; backColor = Pal.gray; frontColor = Color.white; despawnEffect = Fx.none; collidesGround = false; }}; }} ); drawer = new DrawTurret(){{ parts.add(new RegionPart("-mid"){{ progress = PartProgress.recoil; under = false; moveY = -1.25f; }}); }}; reload = 18f; range = 220f; size = 2; targetGround = false; shoot.shotDelay = 5f; shoot.shots = 2; recoil = 1f; rotateSpeed = 15f; inaccuracy = 17f; shootCone = 35f; scaledHealth = 200; shootSound = Sounds.shootSnap; coolant = consumeCoolant(0.2f); researchCostMultiplier = 0.05f; limitRange(2); }}; scorch = new ItemTurret("scorch"){{ requirements(Category.turret, with(Items.copper, 25, Items.graphite, 22)); ammo( Items.coal, new BulletType(3.35f, 17f){{ ammoMultiplier = 3f; hitSize = 7f; lifetime = 18f; pierce = true; collidesAir = false; statusDuration = 60f * 4; shootEffect = Fx.shootSmallFlame; hitEffect = Fx.hitFlameSmall; despawnEffect = Fx.none; status = StatusEffects.burning; keepVelocity = false; hittable = false; }}, Items.pyratite, new BulletType(4f, 60f){{ ammoMultiplier = 6f; hitSize = 7f; lifetime = 18f; pierce = true; collidesAir = false; statusDuration = 60f * 10; shootEffect = Fx.shootPyraFlame; hitEffect = Fx.hitFlameSmall; despawnEffect = Fx.none; status = StatusEffects.burning; hittable = false; }} ); recoil = 0f; reload = 6f; coolantMultiplier = 1.5f; range = 60f; shootY = 3; shootCone = 50f; targetAir = false; ammoUseEffect = Fx.none; health = 400; shootSound = Sounds.flame; coolant = consumeCoolant(0.1f); }}; hail = new ItemTurret("hail"){{ requirements(Category.turret, with(Items.copper, 40, Items.graphite, 17)); ammo( Items.graphite, new ArtilleryBulletType(3f, 20){{ knockback = 0.8f; lifetime = 80f; width = height = 11f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 33f; }}, Items.silicon, new ArtilleryBulletType(3f, 20){{ knockback = 0.8f; lifetime = 80f; width = height = 11f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 33f; reloadMultiplier = 1.2f; ammoMultiplier = 3f; homingPower = 0.08f; homingRange = 50f; }}, Items.pyratite, new ArtilleryBulletType(3f, 25){{ hitEffect = Fx.blastExplosion; knockback = 0.8f; lifetime = 80f; width = height = 13f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 45f; status = StatusEffects.burning; statusDuration = 60f * 12f; frontColor = Pal.lightishOrange; backColor = Pal.lightOrange; makeFire = true; trailEffect = Fx.incendTrail; ammoMultiplier = 4f; }} ); targetAir = false; reload = 60f; recoil = 2f; range = 235f; inaccuracy = 1f; shootCone = 10f; health = 260; shootSound = Sounds.bang; coolant = consumeCoolant(0.1f); limitRange(0f); }}; wave = new LiquidTurret("wave"){{ requirements(Category.turret, with(Items.metaglass, 45, Items.lead, 75, Items.copper, 25)); ammo( Liquids.water,new LiquidBulletType(Liquids.water){{ knockback = 0.7f; drag = 0.01f; layer = Layer.bullet - 2f; }}, Liquids.slag, new LiquidBulletType(Liquids.slag){{ damage = 4; drag = 0.01f; }}, Liquids.cryofluid, new LiquidBulletType(Liquids.cryofluid){{ drag = 0.01f; }}, Liquids.oil, new LiquidBulletType(Liquids.oil){{ drag = 0.01f; layer = Layer.bullet - 2f; }} ); size = 2; recoil = 0f; reload = 3f; inaccuracy = 5f; shootCone = 50f; liquidCapacity = 10f; shootEffect = Fx.shootLiquid; range = 110f; scaledHealth = 250; flags = EnumSet.of(BlockFlag.turret, BlockFlag.extinguisher); }}; //TODO these may work in space, but what's the point? lancer = new PowerTurret("lancer"){{ requirements(Category.turret, with(Items.copper, 60, Items.lead, 70, Items.silicon, 60, Items.titanium, 30)); range = 165f; shoot.firstShotDelay = 40f; recoil = 2f; reload = 80f; shake = 2f; shootEffect = Fx.lancerLaserShoot; smokeEffect = Fx.none; heatColor = Color.red; size = 2; scaledHealth = 280; targetAir = false; moveWhileCharging = false; accurateDelay = false; shootSound = Sounds.laser; coolant = consumeCoolant(0.2f); consumePower(6f); shootType = new LaserBulletType(140){{ colors = new Color[]{Pal.lancerLaser.cpy().a(0.4f), Pal.lancerLaser, Color.white}; //TODO merge chargeEffect = new MultiEffect(Fx.lancerLaserCharge, Fx.lancerLaserChargeBegin); buildingDamageMultiplier = 0.25f; hitEffect = Fx.hitLancer; hitSize = 4; lifetime = 16f; drawSize = 400f; collidesAir = false; length = 173f; ammoMultiplier = 1f; pierceCap = 4; }}; }}; arc = new PowerTurret("arc"){{ requirements(Category.turret, with(Items.copper, 50, Items.lead, 50)); shootType = new LightningBulletType(){{ damage = 20; lightningLength = 25; collidesAir = false; ammoMultiplier = 1f; //for visual stats only. buildingDamageMultiplier = 0.25f; lightningType = new BulletType(0.0001f, 0f){{ lifetime = Fx.lightning.lifetime; hitEffect = Fx.hitLancer; despawnEffect = Fx.none; status = StatusEffects.shocked; statusDuration = 10f; hittable = false; lightColor = Color.white; collidesAir = false; buildingDamageMultiplier = 0.25f; }}; }}; reload = 35f; shootCone = 40f; rotateSpeed = 8f; targetAir = false; range = 90f; shootEffect = Fx.lightningShoot; heatColor = Color.red; recoil = 1f; size = 1; health = 260; shootSound = Sounds.spark; consumePower(3.3f); coolant = consumeCoolant(0.1f); }}; parallax = new TractorBeamTurret("parallax"){{ requirements(Category.turret, with(Items.silicon, 120, Items.titanium, 90, Items.graphite, 30)); hasPower = true; size = 2; force = 12f; scaledForce = 6f; range = 240f; damage = 0.3f; scaledHealth = 160; rotateSpeed = 10; consumePower(3f); }}; swarmer = new ItemTurret("swarmer"){{ requirements(Category.turret, with(Items.graphite, 35, Items.titanium, 35, Items.plastanium, 45, Items.silicon, 30)); ammo( Items.blastCompound, new MissileBulletType(3.7f, 10){{ width = 8f; height = 8f; shrinkY = 0f; splashDamageRadius = 30f; splashDamage = 30f * 1.5f; ammoMultiplier = 5f; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; status = StatusEffects.blasted; statusDuration = 60f; }}, Items.pyratite, new MissileBulletType(3.7f, 12){{ frontColor = Pal.lightishOrange; backColor = Pal.lightOrange; width = 7f; height = 8f; shrinkY = 0f; homingPower = 0.08f; splashDamageRadius = 20f; splashDamage = 30f * 1.5f; makeFire = true; ammoMultiplier = 5f; hitEffect = Fx.blastExplosion; status = StatusEffects.burning; }}, Items.surgeAlloy, new MissileBulletType(3.7f, 18){{ width = 8f; height = 8f; shrinkY = 0f; splashDamageRadius = 25f; splashDamage = 25f * 1.4f; hitEffect = Fx.blastExplosion; despawnEffect = Fx.blastExplosion; ammoMultiplier = 4f; lightningDamage = 10; lightning = 2; lightningLength = 10; }} ); shoot = new ShootBarrel(){{ barrels = new float[]{ -4, -1.25f, 0, 0, 0, 0, 4, -1.25f, 0 }; shots = 4; shotDelay = 5f; }}; shootY = 4.5f; reload = 30f; inaccuracy = 10f; range = 240f; consumeAmmoOnce = false; size = 2; scaledHealth = 300; shootSound = Sounds.missile; envEnabled |= Env.space; limitRange(5f); coolant = consumeCoolant(0.3f); }}; salvo = new ItemTurret("salvo"){{ requirements(Category.turret, with(Items.copper, 100, Items.graphite, 80, Items.titanium, 50)); ammo( Items.copper, new BasicBulletType(2.5f, 11){{ width = 7f; height = 9f; lifetime = 60f; ammoMultiplier = 2; }}, Items.graphite, new BasicBulletType(3.5f, 20){{ width = 9f; height = 12f; reloadMultiplier = 0.6f; ammoMultiplier = 4; lifetime = 60f; }}, Items.pyratite, new BasicBulletType(3.2f, 18){{ width = 10f; height = 12f; frontColor = Pal.lightishOrange; backColor = Pal.lightOrange; status = StatusEffects.burning; hitEffect = new MultiEffect(Fx.hitBulletSmall, Fx.fireHit); ammoMultiplier = 5; splashDamage = 12f; splashDamageRadius = 22f; makeFire = true; lifetime = 60f; }}, Items.silicon, new BasicBulletType(3f, 15, "bullet"){{ width = 7f; height = 9f; homingPower = 0.1f; reloadMultiplier = 1.5f; ammoMultiplier = 5; lifetime = 60f; }}, Items.thorium, new BasicBulletType(4f, 29, "bullet"){{ width = 10f; height = 13f; shootEffect = Fx.shootBig; smokeEffect = Fx.shootBigSmoke; ammoMultiplier = 4; lifetime = 60f; }} ); drawer = new DrawTurret(){{ parts.add(new RegionPart("-side"){{ progress = PartProgress.warmup; moveX = 0.6f; moveRot = -15f; mirror = true; layerOffset = 0.001f; moves.add(new PartMove(PartProgress.recoil, 0.5f, -0.5f, -8f)); }}, new RegionPart("-barrel"){{ progress = PartProgress.recoil; moveY = -2.5f; }}); }}; size = 2; range = 190f; reload = 31f; consumeAmmoOnce = false; ammoEjectBack = 3f; recoil = 0f; shake = 1f; shoot.shots = 4; shoot.shotDelay = 3f; ammoUseEffect = Fx.casing2; scaledHealth = 240; shootSound = Sounds.shootBig; limitRange(); coolant = consumeCoolant(0.2f); }}; segment = new PointDefenseTurret("segment"){{ requirements(Category.turret, with(Items.silicon, 130, Items.thorium, 80, Items.phaseFabric, 40, Items.titanium, 40)); scaledHealth = 250; range = 180f; hasPower = true; consumePower(8f); size = 2; shootLength = 5f; bulletDamage = 30f; reload = 8f; envEnabled |= Env.space; }}; tsunami = new LiquidTurret("tsunami"){{ requirements(Category.turret, with(Items.metaglass, 100, Items.lead, 400, Items.titanium, 250, Items.thorium, 100)); ammo( Liquids.water, new LiquidBulletType(Liquids.water){{ lifetime = 49f; speed = 4f; knockback = 1.7f; puddleSize = 8f; orbSize = 4f; drag = 0.001f; ammoMultiplier = 0.4f; statusDuration = 60f * 4f; damage = 0.2f; layer = Layer.bullet - 2f; }}, Liquids.slag, new LiquidBulletType(Liquids.slag){{ lifetime = 49f; speed = 4f; knockback = 1.3f; puddleSize = 8f; orbSize = 4f; damage = 4.75f; drag = 0.001f; ammoMultiplier = 0.4f; statusDuration = 60f * 4f; }}, Liquids.cryofluid, new LiquidBulletType(Liquids.cryofluid){{ lifetime = 49f; speed = 4f; knockback = 1.3f; puddleSize = 8f; orbSize = 4f; drag = 0.001f; ammoMultiplier = 0.4f; statusDuration = 60f * 4f; damage = 0.2f; }}, Liquids.oil, new LiquidBulletType(Liquids.oil){{ lifetime = 49f; speed = 4f; knockback = 1.3f; puddleSize = 8f; orbSize = 4f; drag = 0.001f; ammoMultiplier = 0.4f; statusDuration = 60f * 4f; damage = 0.2f; layer = Layer.bullet - 2f; }} ); size = 3; reload = 3f; shoot.shots = 2; velocityRnd = 0.1f; inaccuracy = 4f; recoil = 1f; shootCone = 45f; liquidCapacity = 40f; shootEffect = Fx.shootLiquid; range = 190f; scaledHealth = 250; flags = EnumSet.of(BlockFlag.turret, BlockFlag.extinguisher); }}; fuse = new ItemTurret("fuse"){{ requirements(Category.turret, with(Items.copper, 225, Items.graphite, 225, Items.thorium, 100)); reload = 35f; shake = 4f; range = 90f; recoil = 5f; shoot = new ShootSpread(3, 20f); shootCone = 30; size = 3; envEnabled |= Env.space; scaledHealth = 220; shootSound = Sounds.shotgun; coolant = consumeCoolant(0.3f); float brange = range + 10f; ammo( Items.titanium, new ShrapnelBulletType(){{ length = brange; damage = 66f; ammoMultiplier = 4f; width = 17f; reloadMultiplier = 1.3f; }}, Items.thorium, new ShrapnelBulletType(){{ length = brange; damage = 105f; ammoMultiplier = 5f; toColor = Pal.thoriumPink; shootEffect = smokeEffect = Fx.thoriumShoot; }} ); }}; ripple = new ItemTurret("ripple"){{ requirements(Category.turret, with(Items.copper, 150, Items.graphite, 135, Items.titanium, 60)); ammo( Items.graphite, new ArtilleryBulletType(3f, 20){{ knockback = 0.8f; lifetime = 80f; width = height = 11f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 33f; }}, Items.silicon, new ArtilleryBulletType(3f, 20){{ knockback = 0.8f; lifetime = 80f; width = height = 11f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 33f; reloadMultiplier = 1.2f; ammoMultiplier = 3f; homingPower = 0.08f; homingRange = 50f; }}, Items.pyratite, new ArtilleryBulletType(3f, 24){{ hitEffect = Fx.blastExplosion; knockback = 0.8f; lifetime = 80f; width = height = 13f; collidesTiles = false; splashDamageRadius = 25f * 0.75f; splashDamage = 45f; status = StatusEffects.burning; statusDuration = 60f * 12f; frontColor = Pal.lightishOrange; backColor = Pal.lightOrange; makeFire = true; trailEffect = Fx.incendTrail; ammoMultiplier = 4f; }}, Items.blastCompound, new ArtilleryBulletType(2f, 20, "shell"){{ hitEffect = Fx.blastExplosion; knockback = 0.8f; lifetime = 80f; width = height = 14f; collidesTiles = false; ammoMultiplier = 4f; splashDamageRadius = 45f * 0.75f; splashDamage = 55f; backColor = Pal.missileYellowBack; frontColor = Pal.missileYellow; status = StatusEffects.blasted; }}, Items.plastanium, new ArtilleryBulletType(3.4f, 20, "shell"){{ hitEffect = Fx.plasticExplosion; knockback = 1f; lifetime = 80f; width = height = 13f; collidesTiles = false; splashDamageRadius = 35f * 0.75f; splashDamage = 45f; fragBullet = new BasicBulletType(2.5f, 10, "bullet"){{ width = 10f; height = 12f; shrinkY = 1f; lifetime = 15f; backColor = Pal.plastaniumBack; frontColor = Pal.plastaniumFront; despawnEffect = Fx.none; collidesAir = false; }}; fragBullets = 10; backColor = Pal.plastaniumBack; frontColor = Pal.plastaniumFront; }} ); targetAir = false; size = 3; shoot.shots = 4; inaccuracy = 12f; reload = 60f; ammoEjectBack = 5f; ammoUseEffect = Fx.casing3Double; ammoPerShot = 2; velocityRnd = 0.2f; recoil = 6f; shake = 2f; range = 290f; minRange = 50f; coolant = consumeCoolant(0.3f); scaledHealth = 130; shootSound = Sounds.artillery; }}; cyclone = new ItemTurret("cyclone"){{ requirements(Category.turret, with(Items.copper, 200, Items.titanium, 125, Items.plastanium, 80)); ammo( Items.metaglass, new FlakBulletType(4f, 6){{ ammoMultiplier = 2f; shootEffect = Fx.shootSmall; reloadMultiplier = 0.8f; width = 6f; height = 8f; hitEffect = Fx.flakExplosion; splashDamage = 45f; splashDamageRadius = 25f; fragBullet = new BasicBulletType(3f, 12, "bullet"){{ width = 5f; height = 12f; shrinkY = 1f; lifetime = 20f; backColor = Pal.gray; frontColor = Color.white; despawnEffect = Fx.none; }}; fragBullets = 4; explodeRange = 20f; collidesGround = true; }}, Items.blastCompound, new FlakBulletType(4f, 8){{ shootEffect = Fx.shootBig; ammoMultiplier = 5f; splashDamage = 45f; splashDamageRadius = 60f; collidesGround = true; status = StatusEffects.blasted; statusDuration = 60f; }}, Items.plastanium, new FlakBulletType(4f, 8){{ ammoMultiplier = 4f; splashDamageRadius = 40f; splashDamage = 37.5f; fragBullet = new BasicBulletType(2.5f, 12, "bullet"){{ width = 10f; height = 12f; shrinkY = 1f; lifetime = 15f; backColor = Pal.plastaniumBack; frontColor = Pal.plastaniumFront; despawnEffect = Fx.none; }}; fragBullets = 6; hitEffect = Fx.plasticExplosion; frontColor = Pal.plastaniumFront; backColor = Pal.plastaniumBack; shootEffect = Fx.shootBig; collidesGround = true; explodeRange = 20f; }}, Items.surgeAlloy, new FlakBulletType(4.5f, 13){{ ammoMultiplier = 5f; splashDamage = 50f * 1.5f; splashDamageRadius = 38f; lightning = 2; lightningLength = 7; shootEffect = Fx.shootBig; collidesGround = true; explodeRange = 20f; }} ); shootY = 10f; shoot = new ShootBarrel(){{ barrels = new float[]{ 0f, 1f, 0f, 3f, 0f, 0f, -3f, 0f, 0f, }; }}; recoils = 3; drawer = new DrawTurret(){{ for(int i = 3; i > 0; i--){ int f = i; parts.add(new RegionPart("-barrel-" + i){{ progress = PartProgress.recoil; recoilIndex = f - 1; under = true; moveY = -2f; }}); } }}; reload = 8f; range = 200f; size = 3; recoil = 1.5f; recoilTime = 10; rotateSpeed = 10f; inaccuracy = 10f; shootCone = 30f; shootSound = Sounds.shootSnap; coolant = consumeCoolant(0.3f); scaledHealth = 145; limitRange(); }}; foreshadow = new ItemTurret("foreshadow"){{ float brange = range = 500f; requirements(Category.turret, with(Items.copper, 1000, Items.metaglass, 600, Items.surgeAlloy, 300, Items.plastanium, 200, Items.silicon, 600)); ammo( Items.surgeAlloy, new RailBulletType(){{ shootEffect = Fx.instShoot; hitEffect = Fx.instHit; pierceEffect = Fx.railHit; smokeEffect = Fx.smokeCloud; pointEffect = Fx.instTrail; despawnEffect = Fx.instBomb; pointEffectSpace = 20f; damage = 1350; buildingDamageMultiplier = 0.2f; maxDamageFraction = 0.6f; pierceDamageFactor = 1f; length = brange; hitShake = 6f; ammoMultiplier = 1f; }} ); maxAmmo = 40; ammoPerShot = 5; rotateSpeed = 2f; reload = 200f; ammoUseEffect = Fx.casing3Double; recoil = 5f; cooldownTime = reload; shake = 4f; size = 4; shootCone = 2f; shootSound = Sounds.railgun; unitSort = UnitSorts.strongest; envEnabled |= Env.space; coolantMultiplier = 0.4f; scaledHealth = 150; coolant = consumeCoolant(1f); consumePower(10f); }}; spectre = new ItemTurret("spectre"){{ requirements(Category.turret, with(Items.copper, 900, Items.graphite, 300, Items.surgeAlloy, 250, Items.plastanium, 175, Items.thorium, 250)); ammo( Items.graphite, new BasicBulletType(7.5f, 50){{ hitSize = 4.8f; width = 15f; height = 21f; shootEffect = Fx.shootBig; ammoMultiplier = 4; reloadMultiplier = 1.7f; knockback = 0.3f; }}, Items.thorium, new BasicBulletType(8f, 80){{ hitSize = 5; width = 16f; height = 23f; shootEffect = Fx.shootBig; pierceCap = 2; pierceBuilding = true; knockback = 0.7f; }}, Items.pyratite, new BasicBulletType(7f, 70){{ hitSize = 5; width = 16f; height = 21f; frontColor = Pal.lightishOrange; backColor = Pal.lightOrange; status = StatusEffects.burning; hitEffect = new MultiEffect(Fx.hitBulletSmall, Fx.fireHit); shootEffect = Fx.shootBig; makeFire = true; pierceCap = 2; pierceBuilding = true; knockback = 0.6f; ammoMultiplier = 3; splashDamage = 20f; splashDamageRadius = 25f; }} ); reload = 7f; recoilTime = reload * 2f; coolantMultiplier = 0.5f; ammoUseEffect = Fx.casing3; range = 260f; inaccuracy = 3f; recoil = 3f; shoot = new ShootAlternate(8f); shake = 2f; size = 4; shootCone = 24f; shootSound = Sounds.shootBig; scaledHealth = 160; coolant = consumeCoolant(1f); limitRange(); }}; meltdown = new LaserTurret("meltdown"){{ requirements(Category.turret, with(Items.copper, 1200, Items.lead, 350, Items.graphite, 300, Items.surgeAlloy, 325, Items.silicon, 325)); shootEffect = Fx.shootBigSmoke2; shootCone = 40f; recoil = 4f; size = 4; shake = 2f; range = 195f; reload = 90f; firingMoveFract = 0.5f; shootDuration = 230f; shootSound = Sounds.laserbig; loopSound = Sounds.beam; loopSoundVolume = 2f; envEnabled |= Env.space; shootType = new ContinuousLaserBulletType(78){{ length = 200f; hitEffect = Fx.hitMeltdown; hitColor = Pal.meltdownHit; status = StatusEffects.melting; drawSize = 420f; timescaleDamage = true; incendChance = 0.4f; incendSpread = 5f; incendAmount = 1; ammoMultiplier = 1f; }}; scaledHealth = 200; coolant = consumeCoolant(0.5f); consumePower(17f); }}; breach = new ItemTurret("breach"){{ requirements(Category.turret, with(Items.beryllium, 150, Items.silicon, 150, Items.graphite, 250)); Effect sfe = new MultiEffect(Fx.shootBigColor, Fx.colorSparkBig); ammo( Items.beryllium, new BasicBulletType(7.5f, 85){{ width = 12f; hitSize = 7f; height = 20f; shootEffect = sfe; smokeEffect = Fx.shootBigSmoke; ammoMultiplier = 1; pierceCap = 2; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Pal.berylShot; frontColor = Color.white; trailWidth = 2.1f; trailLength = 10; hitEffect = despawnEffect = Fx.hitBulletColor; buildingDamageMultiplier = 0.3f; }}, Items.tungsten, new BasicBulletType(8f, 95){{ width = 13f; height = 19f; hitSize = 7f; shootEffect = sfe; smokeEffect = Fx.shootBigSmoke; ammoMultiplier = 1; reloadMultiplier = 1f; pierceCap = 3; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Pal.tungstenShot; frontColor = Color.white; trailWidth = 2.2f; trailLength = 11; hitEffect = despawnEffect = Fx.hitBulletColor; rangeChange = 40f; buildingDamageMultiplier = 0.3f; }} ); coolantMultiplier = 6f; shootSound = Sounds.shootAlt; targetUnderBlocks = false; shake = 1f; ammoPerShot = 2; drawer = new DrawTurret("reinforced-"); shootY = -2; outlineColor = Pal.darkOutline; size = 3; envEnabled |= Env.space; reload = 40f; recoil = 2f; range = 190; shootCone = 3f; scaledHealth = 180; rotateSpeed = 1.5f; researchCostMultiplier = 0.05f; coolant = consume(new ConsumeLiquid(Liquids.water, 15f / 60f)); limitRange(12f); }}; diffuse = new ItemTurret("diffuse"){{ requirements(Category.turret, with(Items.beryllium, 150, Items.silicon, 200, Items.graphite, 200, Items.tungsten, 50)); ammo( Items.graphite, new BasicBulletType(8f, 41){{ knockback = 4f; width = 25f; hitSize = 7f; height = 20f; shootEffect = Fx.shootBigColor; smokeEffect = Fx.shootSmokeSquareSparse; ammoMultiplier = 1; hitColor = backColor = trailColor = Color.valueOf("ea8878"); frontColor = Pal.redLight; trailWidth = 6f; trailLength = 3; hitEffect = despawnEffect = Fx.hitSquaresColor; buildingDamageMultiplier = 0.2f; }} ); shoot = new ShootSpread(15, 4f); coolantMultiplier = 6f; inaccuracy = 0.2f; velocityRnd = 0.17f; shake = 1f; ammoPerShot = 3; maxAmmo = 30; consumeAmmoOnce = true; targetUnderBlocks = false; shootSound = Sounds.shootAltLong; drawer = new DrawTurret("reinforced-"){{ parts.add(new RegionPart("-front"){{ progress = PartProgress.warmup; moveRot = -10f; mirror = true; moves.add(new PartMove(PartProgress.recoil, 0f, -3f, -5f)); heatColor = Color.red; }}); }}; shootY = 5f; outlineColor = Pal.darkOutline; size = 3; envEnabled |= Env.space; reload = 30f; recoil = 2f; range = 125; shootCone = 40f; scaledHealth = 210; rotateSpeed = 3f; coolant = consume(new ConsumeLiquid(Liquids.water, 15f / 60f)); limitRange(25f); }}; sublimate = new ContinuousLiquidTurret("sublimate"){{ requirements(Category.turret, with(Items.tungsten, 150, Items.silicon, 200, Items.oxide, 40, Items.beryllium, 400)); drawer = new DrawTurret("reinforced-"){{ Color heatc = Color.valueOf("fa2859"); heatColor = heatc; parts.addAll( new RegionPart("-back"){{ progress = PartProgress.warmup; mirror = true; moveRot = 40f; x = 22 / 4f; y = -1f / 4f; moveY = 6f / 4f; under = true; heatColor = heatc; }}, new RegionPart("-front"){{ progress = PartProgress.warmup; mirror = true; moveRot = 40f; x = 20 / 4f; y = 17f / 4f; moveX = 1f; moveY = 1f; under = true; heatColor = heatc; }}, new RegionPart("-nozzle"){{ progress = PartProgress.warmup; mirror = true; moveX = 8f / 4f; heatColor = Color.valueOf("f03b0e"); }}); }}; outlineColor = Pal.darkOutline; liquidConsumed = 10f / 60f; targetInterval = 5f; targetUnderBlocks = false; float r = range = 130f; loopSound = Sounds.torch; shootSound = Sounds.none; loopSoundVolume = 1f; //TODO balance, set up, where is liquid/sec displayed? status effects maybe? ammo( Liquids.ozone, new ContinuousFlameBulletType(){{ damage = 60f; length = r; knockback = 1f; pierceCap = 2; buildingDamageMultiplier = 0.3f; colors = new Color[]{Color.valueOf("eb7abe").a(0.55f), Color.valueOf("e189f5").a(0.7f), Color.valueOf("907ef7").a(0.8f), Color.valueOf("91a4ff"), Color.white}; }}, Liquids.cyanogen, new ContinuousFlameBulletType(){{ damage = 130f; rangeChange = 70f; length = r + rangeChange; knockback = 2f; pierceCap = 3; buildingDamageMultiplier = 0.3f; colors = new Color[]{Color.valueOf("465ab8").a(0.55f), Color.valueOf("66a6d2").a(0.7f), Color.valueOf("89e8b6").a(0.8f), Color.valueOf("cafcbe"), Color.white}; flareColor = Color.valueOf("89e8b6"); lightColor = hitColor = flareColor; }} ); scaledHealth = 210; shootY = 7f; size = 3; researchCost = with(Items.tungsten, 400, Items.silicon, 400, Items.oxide, 80, Items.beryllium, 800); }}; titan = new ItemTurret("titan"){{ requirements(Category.turret, with(Items.tungsten, 250, Items.silicon, 300, Items.thorium, 400)); ammo( //TODO 1 more ammo type, decide on base type Items.thorium, new ArtilleryBulletType(2.5f, 350, "shell"){{ hitEffect = new MultiEffect(Fx.titanExplosion, Fx.titanSmoke); despawnEffect = Fx.none; knockback = 2f; lifetime = 140f; height = 19f; width = 17f; splashDamageRadius = 65f; splashDamage = 350f; scaledSplashDamage = true; backColor = hitColor = trailColor = Color.valueOf("ea8878").lerp(Pal.redLight, 0.5f); frontColor = Color.white; ammoMultiplier = 1f; hitSound = Sounds.titanExplosion; status = StatusEffects.blasted; trailLength = 32; trailWidth = 3.35f; trailSinScl = 2.5f; trailSinMag = 0.5f; trailEffect = Fx.none; despawnShake = 7f; shootEffect = Fx.shootTitan; smokeEffect = Fx.shootSmokeTitan; trailInterp = v -> Math.max(Mathf.slope(v), 0.8f); shrinkX = 0.2f; shrinkY = 0.1f; buildingDamageMultiplier = 0.3f; }} ); shootSound = Sounds.mediumCannon; ammoPerShot = 4; maxAmmo = ammoPerShot * 3; targetAir = false; shake = 4f; recoil = 1f; reload = 60f * 2.3f; shootY = 7f; rotateSpeed = 1.4f; minWarmup = 0.85f; shootWarmupSpeed = 0.07f; coolant = consume(new ConsumeLiquid(Liquids.water, 30f / 60f)); coolantMultiplier = 1.5f; drawer = new DrawTurret("reinforced-"){{ parts.addAll( new RegionPart("-barrel"){{ progress = PartProgress.recoil.curve(Interp.pow2In); moveY = -5f * 4f / 3f; heatColor = Color.valueOf("f03b0e"); mirror = false; }}, new RegionPart("-side"){{ heatProgress = PartProgress.warmup; progress = PartProgress.warmup; mirror = true; moveX = 2f * 4f / 3f; moveY = -0.5f; moveRot = -40f; under = true; heatColor = Color.red.cpy(); }}); }}; shootWarmupSpeed = 0.08f; outlineColor = Pal.darkOutline; consumeLiquid(Liquids.hydrogen, 5f / 60f); scaledHealth = 250; range = 390f; size = 4; }}; disperse = new ItemTurret("disperse"){{ requirements(Category.turret, with(Items.thorium, 50, Items.oxide, 150, Items.silicon, 200, Items.beryllium, 350)); ammo(Items.tungsten, new BasicBulletType(){{ damage = 65; speed = 8.5f; width = height = 16; shrinkY = 0.3f; backSprite = "large-bomb-back"; sprite = "mine-bullet"; velocityRnd = 0.11f; collidesGround = false; collidesTiles = false; shootEffect = Fx.shootBig2; smokeEffect = Fx.shootSmokeDisperse; frontColor = Color.white; backColor = trailColor = hitColor = Color.sky; trailChance = 0.44f; ammoMultiplier = 3f; lifetime = 34f; rotationOffset = 90f; trailRotation = true; trailEffect = Fx.disperseTrail; hitEffect = despawnEffect = Fx.hitBulletColor; }}); reload = 9f; shootY = 15f; rotateSpeed = 5f; shootCone = 30f; consumeAmmoOnce = true; shootSound = Sounds.shootBig; drawer = new DrawTurret("reinforced-"){{ parts.add(new RegionPart("-side"){{ mirror = true; under = true; moveX = 1.75f; moveY = -0.5f; }}, new RegionPart("-mid"){{ under = true; moveY = -1.5f; progress = PartProgress.recoil; heatProgress = PartProgress.recoil.add(0.25f).min(PartProgress.warmup); heatColor = Color.sky.cpy().a(0.9f); }}, new RegionPart("-blade"){{ heatProgress = PartProgress.warmup; heatColor = Color.sky.cpy().a(0.9f); mirror = true; under = true; moveY = 1f; moveX = 1.5f; moveRot = 8; }}); }}; shoot = new ShootAlternate(){{ spread = 4.7f; shots = 4; barrels = 4; }}; targetGround = false; inaccuracy = 8f; shootWarmupSpeed = 0.08f; outlineColor = Pal.darkOutline; scaledHealth = 280; range = 310f; size = 4; coolant = consume(new ConsumeLiquid(Liquids.water, 20f / 60f)); coolantMultiplier = 2.5f; limitRange(5f); }}; afflict = new PowerTurret("afflict"){{ requirements(Category.turret, with(Items.surgeAlloy, 100, Items.silicon, 200, Items.graphite, 250, Items.oxide, 40)); shootType = new BasicBulletType(){{ shootEffect = new MultiEffect(Fx.shootTitan, new WaveEffect(){{ colorTo = Pal.surge; sizeTo = 26f; lifetime = 14f; strokeFrom = 4f; }}); smokeEffect = Fx.shootSmokeTitan; hitColor = Pal.surge; sprite = "large-orb"; trailEffect = Fx.missileTrail; trailInterval = 3f; trailParam = 4f; pierceCap = 2; fragOnHit = false; speed = 5f; damage = 180f; lifetime = 80f; width = height = 16f; backColor = Pal.surge; frontColor = Color.white; shrinkX = shrinkY = 0f; trailColor = Pal.surge; trailLength = 12; trailWidth = 2.2f; despawnEffect = hitEffect = new ExplosionEffect(){{ waveColor = Pal.surge; smokeColor = Color.gray; sparkColor = Pal.sap; waveStroke = 4f; waveRad = 40f; }}; despawnSound = Sounds.dullExplosion; //TODO shoot sound shootSound = Sounds.cannon; fragBullet = intervalBullet = new BasicBulletType(3f, 35){{ width = 9f; hitSize = 5f; height = 15f; pierce = true; lifetime = 35f; pierceBuilding = true; hitColor = backColor = trailColor = Pal.surge; frontColor = Color.white; trailWidth = 2.1f; trailLength = 5; hitEffect = despawnEffect = new WaveEffect(){{ colorFrom = colorTo = Pal.surge; sizeTo = 4f; strokeFrom = 4f; lifetime = 10f; }}; buildingDamageMultiplier = 0.3f; homingPower = 0.2f; }}; bulletInterval = 3f; intervalRandomSpread = 20f; intervalBullets = 2; intervalAngle = 180f; intervalSpread = 300f; fragBullets = 20; fragVelocityMin = 0.5f; fragVelocityMax = 1.5f; fragLifeMin = 0.5f; }}; drawer = new DrawTurret("reinforced-"){{ parts.add(new RegionPart("-blade"){{ progress = PartProgress.recoil; heatColor = Color.valueOf("ff6214"); mirror = true; under = true; moveX = 2f; moveY = -1f; moveRot = -7f; }}, new RegionPart("-blade-glow"){{ progress = PartProgress.recoil; heatProgress = PartProgress.warmup; heatColor = Color.valueOf("ff6214"); drawRegion = false; mirror = true; under = true; moveX = 2f; moveY = -1f; moveRot = -7f; }}); }}; consumePower(5f); heatRequirement = 10f; maxHeatEfficiency = 2f; inaccuracy = 1f; shake = 2f; shootY = 4; outlineColor = Pal.darkOutline; size = 4; envEnabled |= Env.space; reload = 100f; cooldownTime = reload; recoil = 3f; range = 350; shootCone = 20f; scaledHealth = 220; rotateSpeed = 1.5f; researchCostMultiplier = 0.04f; limitRange(9f); }}; lustre = new ContinuousTurret("lustre"){{ requirements(Category.turret, with(Items.silicon, 250, Items.graphite, 200, Items.oxide, 50, Items.carbide, 90)); shootType = new PointLaserBulletType(){{ damage = 200f; buildingDamageMultiplier = 0.3f; hitColor = Color.valueOf("fda981"); }}; drawer = new DrawTurret("reinforced-"){{ var heatp = PartProgress.warmup.blend(p -> Mathf.absin(2f, 1f) * p.warmup, 0.2f); parts.add(new RegionPart("-blade"){{ progress = PartProgress.warmup; heatProgress = PartProgress.warmup; heatColor = Color.valueOf("ff6214"); mirror = true; under = true; moveX = 2f; moveRot = -7f; moves.add(new PartMove(PartProgress.warmup, 0f, -2f, 3f)); }}, new RegionPart("-inner"){{ heatProgress = heatp; progress = PartProgress.warmup; heatColor = Color.valueOf("ff6214"); mirror = true; under = false; moveX = 2f; moveY = -8f; }}, new RegionPart("-mid"){{ heatProgress = heatp; progress = PartProgress.warmup; heatColor = Color.valueOf("ff6214"); moveY = -8f; mirror = false; under = true; }}); }}; shootSound = Sounds.none; loopSoundVolume = 1f; loopSound = Sounds.laserbeam; shootWarmupSpeed = 0.08f; shootCone = 360f; aimChangeSpeed = 0.9f; rotateSpeed = 0.9f; shootY = 0.5f; outlineColor = Pal.darkOutline; size = 4; envEnabled |= Env.space; range = 250f; scaledHealth = 210; //TODO is this a good idea to begin with? unitSort = UnitSorts.strongest; consumeLiquid(Liquids.nitrogen, 6f / 60f); }}; scathe = new ItemTurret("scathe"){{ requirements(Category.turret, with(Items.silicon, 450, Items.graphite, 400, Items.tungsten, 500, Items.oxide, 100, Items.carbide, 200)); ammo( Items.carbide, new BasicBulletType(0f, 1){{ shootEffect = Fx.shootBig; smokeEffect = Fx.shootSmokeMissile; ammoMultiplier = 1f; spawnUnit = new MissileUnitType("scathe-missile"){{ speed = 4.6f; maxRange = 6f; lifetime = 60f * 5.5f; outlineColor = Pal.darkOutline; engineColor = trailColor = Pal.redLight; engineLayer = Layer.effect; engineSize = 3.1f; engineOffset = 10f; rotateSpeed = 0.25f; trailLength = 18; missileAccelTime = 50f; lowAltitude = true; loopSound = Sounds.missileTrail; loopSoundVolume = 0.6f; deathSound = Sounds.largeExplosion; targetAir = false; targetUnderBlocks = false; fogRadius = 6f; health = 210; weapons.add(new Weapon(){{ shootCone = 360f; mirror = false; reload = 1f; deathExplosionEffect = Fx.massiveExplosion; shootOnDeath = true; shake = 10f; bullet = new ExplosionBulletType(1500f, 65f){{ hitColor = Pal.redLight; shootEffect = new MultiEffect(Fx.massiveExplosion, Fx.scatheExplosion, Fx.scatheLight, new WaveEffect(){{ lifetime = 10f; strokeFrom = 4f; sizeTo = 130f; }}); collidesAir = false; buildingDamageMultiplier = 0.25f; ammoMultiplier = 1f; fragLifeMin = 0.1f; fragBullets = 7; fragBullet = new ArtilleryBulletType(3.4f, 32){{ buildingDamageMultiplier = 0.3f; drag = 0.02f; hitEffect = Fx.massiveExplosion; despawnEffect = Fx.scatheSlash; knockback = 0.8f; lifetime = 23f; width = height = 18f; collidesTiles = false; splashDamageRadius = 40f; splashDamage = 160f; backColor = trailColor = hitColor = Pal.redLight; frontColor = Color.white; smokeEffect = Fx.shootBigSmoke2; despawnShake = 7f; lightRadius = 30f; lightColor = Pal.redLight; lightOpacity = 0.5f; trailLength = 20; trailWidth = 3.5f; trailEffect = Fx.none; }}; }}; }}); abilities.add(new MoveEffectAbility(){{ effect = Fx.missileTrailSmoke; rotation = 180f; y = -9f; color = Color.grays(0.6f).lerp(Pal.redLight, 0.5f).a(0.4f); interval = 7f; }}); }}; }} ); drawer = new DrawTurret("reinforced-"){{ parts.add(new RegionPart("-blade"){{ progress = PartProgress.warmup; heatProgress = PartProgress.warmup; heatColor = Color.red; moveRot = -22f; moveX = 0f; moveY = -5f; mirror = true; children.add(new RegionPart("-side"){{ progress = PartProgress.warmup.delay(0.6f); heatProgress = PartProgress.recoil; heatColor = Color.red; mirror = true; under = false; moveY = -4f; moveX = 1f; moves.add(new PartMove(PartProgress.recoil, 1f, 6f, -40f)); }}); }}, new RegionPart("-mid"){{ progress = PartProgress.recoil; heatProgress = PartProgress.warmup.add(-0.2f).add(p -> Mathf.sin(9f, 0.2f) * p.warmup); mirror = false; under = true; moveY = -5f; }}, new RegionPart("-missile"){{ progress = PartProgress.reload.curve(Interp.pow2In); colorTo = new Color(1f, 1f, 1f, 0f); color = Color.white; mixColorTo = Pal.accent; mixColor = new Color(1f, 1f, 1f, 0f); outline = false; under = true; layerOffset = -0.01f; moves.add(new PartMove(PartProgress.warmup.inv(), 0f, -4f, 0f)); }}); }}; recoil = 0.5f; fogRadiusMultiplier = 0.4f; coolantMultiplier = 6f; shootSound = Sounds.missileLaunch; minWarmup = 0.94f; shootWarmupSpeed = 0.03f; targetAir = false; targetUnderBlocks = false; shake = 6f; ammoPerShot = 15; maxAmmo = 30; shootY = -1; outlineColor = Pal.darkOutline; size = 4; envEnabled |= Env.space; reload = 600f; range = 1350; shootCone = 1f; scaledHealth = 220; rotateSpeed = 0.9f; coolant = consume(new ConsumeLiquid(Liquids.water, 15f / 60f)); limitRange(); }}; smite = new ItemTurret("smite"){{ requirements(Category.turret, with(Items.oxide, 200, Items.surgeAlloy, 400, Items.silicon, 800, Items.carbide, 500, Items.phaseFabric, 300)); ammo( //this is really lazy Items.surgeAlloy, new BasicBulletType(7f, 250){{ sprite = "large-orb"; width = 17f; height = 21f; hitSize = 8f; shootEffect = new MultiEffect(Fx.shootTitan, Fx.colorSparkBig, new WaveEffect(){{ colorFrom = colorTo = Pal.accent; lifetime = 12f; sizeTo = 20f; strokeFrom = 3f; strokeTo = 0.3f; }}); smokeEffect = Fx.shootSmokeSmite; ammoMultiplier = 1; pierceCap = 4; pierce = true; pierceBuilding = true; hitColor = backColor = trailColor = Pal.accent; frontColor = Color.white; trailWidth = 2.8f; trailLength = 9; hitEffect = Fx.hitBulletColor; buildingDamageMultiplier = 0.3f; despawnEffect = new MultiEffect(Fx.hitBulletColor, new WaveEffect(){{ sizeTo = 30f; colorFrom = colorTo = Pal.accent; lifetime = 12f; }}); trailRotation = true; trailEffect = Fx.disperseTrail; trailInterval = 3f; intervalBullet = new LightningBulletType(){{ damage = 30; collidesAir = false; ammoMultiplier = 1f; lightningColor = Pal.accent; lightningLength = 5; lightningLengthRand = 10; //for visual stats only. buildingDamageMultiplier = 0.25f; lightningType = new BulletType(0.0001f, 0f){{ lifetime = Fx.lightning.lifetime; hitEffect = Fx.hitLancer; despawnEffect = Fx.none; status = StatusEffects.shocked; statusDuration = 10f; hittable = false; lightColor = Color.white; buildingDamageMultiplier = 0.25f; }}; }}; bulletInterval = 3f; }} ); shoot = new ShootMulti(new ShootAlternate(){{ spread = 3.3f * 1.9f; shots = barrels = 5; }}, new ShootHelix(){{ scl = 4f; mag = 3f; }}); shootSound = Sounds.shootSmite; minWarmup = 0.99f; coolantMultiplier = 6f; var haloProgress = PartProgress.warmup.delay(0.5f); float haloY = -15f, haloRotSpeed = 1f; shake = 2f; ammoPerShot = 2; drawer = new DrawTurret("reinforced-"){{ parts.addAll( new RegionPart("-mid"){{ heatProgress = PartProgress.heat.blend(PartProgress.warmup, 0.5f); mirror = false; }}, new RegionPart("-blade"){{ progress = PartProgress.warmup; heatProgress = PartProgress.warmup; mirror = true; moveX = 5.5f; moves.add(new PartMove(PartProgress.recoil, 0f, -3f, 0f)); }}, new RegionPart("-front"){{ progress = PartProgress.warmup; heatProgress = PartProgress.recoil; mirror = true; under = true; moveY = 4f; moveX = 6.5f; moves.add(new PartMove(PartProgress.recoil, 0f, -5.5f, 0f)); }}, new RegionPart("-back"){{ progress = PartProgress.warmup; heatProgress = PartProgress.warmup; mirror = true; under = true; moveX = 5.5f; }}, new ShapePart(){{ progress = PartProgress.warmup.delay(0.2f); color = Pal.accent; circle = true; hollow = true; stroke = 0f; strokeTo = 2f; radius = 10f; layer = Layer.effect; y = haloY; rotateSpeed = haloRotSpeed; }}, new ShapePart(){{ progress = PartProgress.warmup.delay(0.2f); color = Pal.accent; circle = true; hollow = true; stroke = 0f; strokeTo = 1.6f; radius = 4f; layer = Layer.effect; y = haloY; rotateSpeed = haloRotSpeed; }}, new HaloPart(){{ progress = haloProgress; color = Pal.accent; layer = Layer.effect; y = haloY; haloRotation = 90f; shapes = 2; triLength = 0f; triLengthTo = 20f; haloRadius = 16f; tri = true; radius = 4f; }}, new HaloPart(){{ progress = haloProgress; color = Pal.accent; layer = Layer.effect; y = haloY; haloRotation = 90f; shapes = 2; triLength = 0f; triLengthTo = 5f; haloRadius = 16f; tri = true; radius = 4f; shapeRotation = 180f; }}, new HaloPart(){{ progress = haloProgress; color = Pal.accent; layer = Layer.effect; y = haloY; haloRotateSpeed = -haloRotSpeed; shapes = 4; triLength = 0f; triLengthTo = 5f; haloRotation = 45f; haloRadius = 16f; tri = true; radius = 8f; }}, new HaloPart(){{ progress = haloProgress; color = Pal.accent; layer = Layer.effect; y = haloY; haloRotateSpeed = -haloRotSpeed; shapes = 4; shapeRotation = 180f; triLength = 0f; triLengthTo = 2f; haloRotation = 45f; haloRadius = 16f; tri = true; radius = 8f; }}, new HaloPart(){{ progress = haloProgress; color = Pal.accent; layer = Layer.effect; y = haloY; haloRotateSpeed = haloRotSpeed; shapes = 4; triLength = 0f; triLengthTo = 3f; haloRotation = 45f; haloRadius = 10f; tri = true; radius = 6f; }} ); for(int i = 0; i < 3; i++){ int fi = i; parts.add(new RegionPart("-blade-bar"){{ progress = PartProgress.warmup; heatProgress = PartProgress.warmup; mirror = true; under = true; outline = false; layerOffset = -0.3f; turretHeatLayer = Layer.turret - 0.2f; y = 44f / 4f - fi * 38f / 4f; moveX = 2f; color = Pal.accent; }}); } for(int i = 0; i < 4; i++){ int fi = i; parts.add(new RegionPart("-spine"){{ progress = PartProgress.warmup.delay(fi / 5f); heatProgress = PartProgress.warmup; mirror = true; under = true; layerOffset = -0.3f; turretHeatLayer = Layer.turret - 0.2f; moveY = -22f / 4f - fi * 3f; moveX = 52f / 4f - fi * 1f + 2f; moveRot = -fi * 30f; color = Pal.accent; moves.add(new PartMove(PartProgress.recoil.delay(fi / 5f), 0f, 0f, 35f)); }}); } }}; shootWarmupSpeed = 0.04f; shootY = 15f; outlineColor = Pal.darkOutline; size = 5; envEnabled |= Env.space; warmupMaintainTime = 30f; reload = 100f; recoil = 2f; range = 300; shootCone = 30f; scaledHealth = 350; rotateSpeed = 1.5f; coolant = consume(new ConsumeLiquid(Liquids.water, 15f / 60f)); limitRange(); loopSound = Sounds.glow; loopSoundVolume = 0.8f; }}; malign = new PowerTurret("malign"){{ requirements(Category.turret, with(Items.carbide, 400, Items.beryllium, 2000, Items.silicon, 800, Items.graphite, 800, Items.phaseFabric, 300)); var haloProgress = PartProgress.warmup; Color haloColor = Color.valueOf("d370d3"), heatCol = Color.purple; float haloY = -15f, haloRotSpeed = 1.5f; var circleProgress = PartProgress.warmup.delay(0.9f); var circleColor = haloColor; float circleY = 25f, circleRad = 11f, circleRotSpeed = 3.5f, circleStroke = 1.6f; shootSound = Sounds.malignShoot; loopSound = Sounds.spellLoop; loopSoundVolume = 1.3f; shootType = new FlakBulletType(8f, 70f){{ sprite = "missile-large"; lifetime = 45f; width = 12f; height = 22f; hitSize = 7f; shootEffect = Fx.shootSmokeSquareBig; smokeEffect = Fx.shootSmokeDisperse; ammoMultiplier = 1; hitColor = backColor = trailColor = lightningColor = circleColor; frontColor = Color.white; trailWidth = 3f; trailLength = 12; hitEffect = despawnEffect = Fx.hitBulletColor; buildingDamageMultiplier = 0.3f; trailEffect = Fx.colorSpark; trailRotation = true; trailInterval = 3f; lightning = 1; lightningCone = 15f; lightningLength = 20; lightningLengthRand = 30; lightningDamage = 20f; homingPower = 0.17f; homingDelay = 19f; homingRange = 160f; explodeRange = 160f; explodeDelay = 0f; flakInterval = 20f; despawnShake = 3f; fragBullet = new LaserBulletType(65f){{ colors = new Color[]{haloColor.cpy().a(0.4f), haloColor, Color.white}; buildingDamageMultiplier = 0.25f; width = 19f; hitEffect = Fx.hitLancer; sideAngle = 175f; sideWidth = 1f; sideLength = 40f; lifetime = 22f; drawSize = 400f; length = 180f; pierceCap = 2; }}; fragSpread = fragRandomSpread = 0f; splashDamage = 0f; hitEffect = Fx.hitSquaresColor; collidesGround = true; }}; size = 5; drawer = new DrawTurret("reinforced-"){{ parts.addAll( //summoning circle new ShapePart(){{ progress = circleProgress; color = circleColor; circle = true; hollow = true; stroke = 0f; strokeTo = circleStroke; radius = circleRad; layer = Layer.effect; y = circleY; }}, new ShapePart(){{ progress = circleProgress; rotateSpeed = -circleRotSpeed; color = circleColor; sides = 4; hollow = true; stroke = 0f; strokeTo = circleStroke; radius = circleRad - 1f; layer = Layer.effect; y = circleY; }}, //outer squares new ShapePart(){{ progress = circleProgress; rotateSpeed = -circleRotSpeed; color = circleColor; sides = 4; hollow = true; stroke = 0f; strokeTo = circleStroke; radius = circleRad - 1f; layer = Layer.effect; y = circleY; }}, //inner square new ShapePart(){{ progress = circleProgress; rotateSpeed = -circleRotSpeed/2f; color = circleColor; sides = 4; hollow = true; stroke = 0f; strokeTo = 2f; radius = 3f; layer = Layer.effect; y = circleY; }}, //spikes on circle new HaloPart(){{ progress = circleProgress; color = circleColor; tri = true; shapes = 3; triLength = 0f; triLengthTo = 5f; radius = 6f; haloRadius = circleRad; haloRotateSpeed = haloRotSpeed / 2f; shapeRotation = 180f; haloRotation = 180f; layer = Layer.effect; y = circleY; }}, //actual turret new RegionPart("-mouth"){{ heatColor = heatCol; heatProgress = PartProgress.warmup; moveY = -8f; }}, new RegionPart("-end"){{ moveY = 0f; }}, new RegionPart("-front"){{ heatColor = heatCol; heatProgress = PartProgress.warmup; mirror = true; moveRot = 33f; moveY = -4f; moveX = 10f; }}, new RegionPart("-back"){{ heatColor = heatCol; heatProgress = PartProgress.warmup; mirror = true; moveRot = 10f; moveX = 2f; moveY = 5f; }}, new RegionPart("-mid"){{ heatColor = heatCol; heatProgress = PartProgress.recoil; moveY = -9.5f; }}, new ShapePart(){{ progress = haloProgress; color = haloColor; circle = true; hollow = true; stroke = 0f; strokeTo = 2f; radius = 10f; layer = Layer.effect; y = haloY; }}, new ShapePart(){{ progress = haloProgress; color = haloColor; sides = 3; rotation = 90f; hollow = true; stroke = 0f; strokeTo = 2f; radius = 4f; layer = Layer.effect; y = haloY; }}, new HaloPart(){{ progress = haloProgress; color = haloColor; sides = 3; shapes = 3; hollow = true; stroke = 0f; strokeTo = 2f; radius = 3f; haloRadius = 10f + radius/2f; haloRotateSpeed = haloRotSpeed; layer = Layer.effect; y = haloY; }}, new HaloPart(){{ progress = haloProgress; color = haloColor; tri = true; shapes = 3; triLength = 0f; triLengthTo = 10f; radius = 6f; haloRadius = 16f; haloRotation = 180f; layer = Layer.effect; y = haloY; }}, new HaloPart(){{ progress = haloProgress; color = haloColor; tri = true; shapes = 3; triLength = 0f; triLengthTo = 3f; radius = 6f; haloRadius = 16f; shapeRotation = 180f; haloRotation = 180f; layer = Layer.effect; y = haloY; }}, new HaloPart(){{ progress = haloProgress; color = haloColor; sides = 3; tri = true; shapes = 3; triLength = 0f; triLengthTo = 10f; shapeRotation = 180f; radius = 6f; haloRadius = 16f; haloRotateSpeed = -haloRotSpeed; haloRotation = 180f / 3f; layer = Layer.effect; y = haloY; }}, new HaloPart(){{ progress = haloProgress; color = haloColor; sides = 3; tri = true; shapes = 3; triLength = 0f; triLengthTo = 4f; radius = 6f; haloRadius = 16f; haloRotateSpeed = -haloRotSpeed; haloRotation = 180f / 3f; layer = Layer.effect; y = haloY; }} ); Color heatCol2 = heatCol.cpy().add(0.1f, 0.1f, 0.1f).mul(1.2f); for(int i = 1; i < 4; i++){ int fi = i; parts.add(new RegionPart("-spine"){{ outline = false; progress = PartProgress.warmup.delay(fi / 5f); heatProgress = PartProgress.warmup.add(p -> (Mathf.absin(3f, 0.2f) - 0.2f) * p.warmup); mirror = true; under = true; layerOffset = -0.3f; turretHeatLayer = Layer.turret - 0.2f; moveY = 9f; moveX = 1f + fi * 4f; moveRot = fi * 60f - 130f; color = Color.valueOf("bb68c3"); heatColor = heatCol2; moves.add(new PartMove(PartProgress.recoil.delay(fi / 5f), 1f, 0f, 3f)); }}); } }}; velocityRnd = 0.15f; heatRequirement = 90f; maxHeatEfficiency = 2f; warmupMaintainTime = 30f; consumePower(10f); shoot = new ShootSummon(0f, 0f, circleRad, 48f); minWarmup = 0.96f; shootWarmupSpeed = 0.03f; shootY = circleY - 5f; outlineColor = Pal.darkOutline; envEnabled |= Env.space; reload = 9f; range = 370; shootCone = 100f; scaledHealth = 370; rotateSpeed = 2f; recoil = 0.5f; recoilTime = 30f; shake = 3f; }}; //endregion //region units groundFactory = new UnitFactory("ground-factory"){{ requirements(Category.units, with(Items.copper, 50, Items.lead, 120, Items.silicon, 80)); plans = Seq.with( new UnitPlan(UnitTypes.dagger, 60f * 15, with(Items.silicon, 10, Items.lead, 10)), new UnitPlan(UnitTypes.crawler, 60f * 10, with(Items.silicon, 8, Items.coal, 10)), new UnitPlan(UnitTypes.nova, 60f * 40, with(Items.silicon, 30, Items.lead, 20, Items.titanium, 20)) ); size = 3; consumePower(1.2f); }}; airFactory = new UnitFactory("air-factory"){{ requirements(Category.units, with(Items.copper, 60, Items.lead, 70)); plans = Seq.with( new UnitPlan(UnitTypes.flare, 60f * 15, with(Items.silicon, 15)), new UnitPlan(UnitTypes.mono, 60f * 35, with(Items.silicon, 30, Items.lead, 15)) ); size = 3; consumePower(1.2f); }}; navalFactory = new UnitFactory("naval-factory"){{ requirements(Category.units, with(Items.copper, 150, Items.lead, 130, Items.metaglass, 120)); plans = Seq.with( new UnitPlan(UnitTypes.risso, 60f * 45f, with(Items.silicon, 20, Items.metaglass, 35)), new UnitPlan(UnitTypes.retusa, 60f * 50f, with(Items.silicon, 15, Items.metaglass, 25, Items.titanium, 20)) ); size = 3; consumePower(1.2f); floating = true; }}; additiveReconstructor = new Reconstructor("additive-reconstructor"){{ requirements(Category.units, with(Items.copper, 200, Items.lead, 120, Items.silicon, 90)); size = 3; consumePower(3f); consumeItems(with(Items.silicon, 40, Items.graphite, 40)); constructTime = 60f * 10f; upgrades.addAll( new UnitType[]{UnitTypes.nova, UnitTypes.pulsar}, new UnitType[]{UnitTypes.dagger, UnitTypes.mace}, new UnitType[]{UnitTypes.crawler, UnitTypes.atrax}, new UnitType[]{UnitTypes.flare, UnitTypes.horizon}, new UnitType[]{UnitTypes.mono, UnitTypes.poly}, new UnitType[]{UnitTypes.risso, UnitTypes.minke}, new UnitType[]{UnitTypes.retusa, UnitTypes.oxynoe} ); }}; multiplicativeReconstructor = new Reconstructor("multiplicative-reconstructor"){{ requirements(Category.units, with(Items.lead, 650, Items.silicon, 450, Items.titanium, 350, Items.thorium, 650)); size = 5; consumePower(6f); consumeItems(with(Items.silicon, 130, Items.titanium, 80, Items.metaglass, 40)); constructTime = 60f * 30f; upgrades.addAll( new UnitType[]{UnitTypes.horizon, UnitTypes.zenith}, new UnitType[]{UnitTypes.mace, UnitTypes.fortress}, new UnitType[]{UnitTypes.poly, UnitTypes.mega}, new UnitType[]{UnitTypes.minke, UnitTypes.bryde}, new UnitType[]{UnitTypes.pulsar, UnitTypes.quasar}, new UnitType[]{UnitTypes.atrax, UnitTypes.spiroct}, new UnitType[]{UnitTypes.oxynoe, UnitTypes.cyerce} ); }}; exponentialReconstructor = new Reconstructor("exponential-reconstructor"){{ requirements(Category.units, with(Items.lead, 2000, Items.silicon, 1000, Items.titanium, 2000, Items.thorium, 750, Items.plastanium, 450, Items.phaseFabric, 600)); size = 7; consumePower(13f); consumeItems(with(Items.silicon, 850, Items.titanium, 750, Items.plastanium, 650)); consumeLiquid(Liquids.cryofluid, 1f); constructTime = 60f * 60f * 1.5f; liquidCapacity = 60f; upgrades.addAll( new UnitType[]{UnitTypes.zenith, UnitTypes.antumbra}, new UnitType[]{UnitTypes.spiroct, UnitTypes.arkyid}, new UnitType[]{UnitTypes.fortress, UnitTypes.scepter}, new UnitType[]{UnitTypes.bryde, UnitTypes.sei}, new UnitType[]{UnitTypes.mega, UnitTypes.quad}, new UnitType[]{UnitTypes.quasar, UnitTypes.vela}, new UnitType[]{UnitTypes.cyerce, UnitTypes.aegires} ); }}; tetrativeReconstructor = new Reconstructor("tetrative-reconstructor"){{ requirements(Category.units, with(Items.lead, 4000, Items.silicon, 3000, Items.thorium, 1000, Items.plastanium, 600, Items.phaseFabric, 600, Items.surgeAlloy, 800)); size = 9; consumePower(25f); consumeItems(with(Items.silicon, 1000, Items.plastanium, 600, Items.surgeAlloy, 500, Items.phaseFabric, 350)); consumeLiquid(Liquids.cryofluid, 3f); constructTime = 60f * 60f * 4; liquidCapacity = 180f; upgrades.addAll( new UnitType[]{UnitTypes.antumbra, UnitTypes.eclipse}, new UnitType[]{UnitTypes.arkyid, UnitTypes.toxopid}, new UnitType[]{UnitTypes.scepter, UnitTypes.reign}, new UnitType[]{UnitTypes.sei, UnitTypes.omura}, new UnitType[]{UnitTypes.quad, UnitTypes.oct}, new UnitType[]{UnitTypes.vela, UnitTypes.corvus}, new UnitType[]{UnitTypes.aegires, UnitTypes.navanax} ); }}; repairPoint = new RepairTurret("repair-point"){{ requirements(Category.units, with(Items.lead, 30, Items.copper, 30, Items.silicon, 20)); repairSpeed = 0.45f; repairRadius = 60f; beamWidth = 0.73f; powerUse = 1f; pulseRadius = 5f; }}; repairTurret = new RepairTurret("repair-turret"){{ requirements(Category.units, with(Items.silicon, 90, Items.thorium, 80, Items.plastanium, 60)); size = 2; length = 6f; repairSpeed = 3f; repairRadius = 145f; powerUse = 5f; beamWidth = 1.1f; pulseRadius = 6.1f; coolantUse = 0.16f; coolantMultiplier = 1.6f; acceptCoolant = true; }}; //endregion //region units - erekir tankFabricator = new UnitFactory("tank-fabricator"){{ requirements(Category.units, with(Items.silicon, 200, Items.beryllium, 150)); size = 3; configurable = false; plans.add(new UnitPlan(UnitTypes.stell, 60f * 35f, with(Items.beryllium, 40, Items.silicon, 50))); researchCost = with(Items.beryllium, 200, Items.graphite, 80, Items.silicon, 80); regionSuffix = "-dark"; fogRadius = 3; consumePower(2f); }}; shipFabricator = new UnitFactory("ship-fabricator"){{ requirements(Category.units, with(Items.silicon, 250, Items.beryllium, 200)); size = 3; configurable = false; plans.add(new UnitPlan(UnitTypes.elude, 60f * 40f, with(Items.graphite, 50, Items.silicon, 70))); regionSuffix = "-dark"; fogRadius = 3; researchCostMultiplier = 0.5f; consumePower(2f); }}; mechFabricator = new UnitFactory("mech-fabricator"){{ requirements(Category.units, with(Items.silicon, 200, Items.graphite, 300, Items.tungsten, 60)); size = 3; configurable = false; plans.add(new UnitPlan(UnitTypes.merui, 60f * 40f, with(Items.beryllium, 50, Items.silicon, 70))); regionSuffix = "-dark"; fogRadius = 3; researchCostMultiplier = 0.65f; consumePower(2f); }}; tankRefabricator = new Reconstructor("tank-refabricator"){{ requirements(Category.units, with(Items.beryllium, 200, Items.tungsten, 80, Items.silicon, 100)); regionSuffix = "-dark"; size = 3; consumePower(3f); consumeLiquid(Liquids.hydrogen, 3f / 60f); consumeItems(with(Items.silicon, 40, Items.tungsten, 30)); constructTime = 60f * 30f; researchCostMultiplier = 0.75f; upgrades.addAll( new UnitType[]{UnitTypes.stell, UnitTypes.locus} ); }}; shipRefabricator = new Reconstructor("ship-refabricator"){{ requirements(Category.units, with(Items.beryllium, 200, Items.tungsten, 100, Items.silicon, 150, Items.oxide, 40)); regionSuffix = "-dark"; size = 3; consumePower(2.5f); consumeLiquid(Liquids.hydrogen, 3f / 60f); consumeItems(with(Items.silicon, 60, Items.tungsten, 40)); constructTime = 60f * 50f; upgrades.addAll( new UnitType[]{UnitTypes.elude, UnitTypes.avert} ); researchCost = with(Items.beryllium, 500, Items.tungsten, 200, Items.silicon, 300, Items.oxide, 80); }}; mechRefabricator = new Reconstructor("mech-refabricator"){{ requirements(Category.units, with(Items.beryllium, 250, Items.tungsten, 120, Items.silicon, 150)); regionSuffix = "-dark"; size = 3; consumePower(2.5f); consumeLiquid(Liquids.hydrogen, 3f / 60f); consumeItems(with(Items.silicon, 50, Items.tungsten, 40)); constructTime = 60f * 45f; researchCostMultiplier = 0.75f; upgrades.addAll( new UnitType[]{UnitTypes.merui, UnitTypes.cleroi} ); }}; //yes very silly name primeRefabricator = new Reconstructor("prime-refabricator"){{ requirements(Category.units, with(Items.thorium, 250, Items.oxide, 200, Items.tungsten, 200, Items.silicon, 400)); regionSuffix = "-dark"; researchCostMultipliers.put(Items.thorium, 0.2f); size = 5; consumePower(5f); consumeLiquid(Liquids.nitrogen, 10f / 60f); consumeItems(with(Items.thorium, 80, Items.silicon, 100)); constructTime = 60f * 60f; upgrades.addAll( new UnitType[]{UnitTypes.locus, UnitTypes.precept}, new UnitType[]{UnitTypes.cleroi, UnitTypes.anthicus}, new UnitType[]{UnitTypes.avert, UnitTypes.obviate} ); }}; tankAssembler = new UnitAssembler("tank-assembler"){{ requirements(Category.units, with(Items.thorium, 500, Items.oxide, 150, Items.carbide, 80, Items.silicon, 500)); regionSuffix = "-dark"; size = 5; plans.add( new AssemblerUnitPlan(UnitTypes.vanquish, 60f * 50f, PayloadStack.list(UnitTypes.stell, 4, Blocks.tungstenWallLarge, 10)), new AssemblerUnitPlan(UnitTypes.conquer, 60f * 60f * 3f, PayloadStack.list(UnitTypes.locus, 6, Blocks.carbideWallLarge, 20)) ); areaSize = 13; researchCostMultiplier = 0.4f; consumePower(3f); consumeLiquid(Liquids.cyanogen, 9f / 60f); }}; shipAssembler = new UnitAssembler("ship-assembler"){{ requirements(Category.units, with(Items.carbide, 100, Items.oxide, 200, Items.tungsten, 500, Items.silicon, 800, Items.thorium, 400)); regionSuffix = "-dark"; size = 5; plans.add( new AssemblerUnitPlan(UnitTypes.quell, 60f * 60f, PayloadStack.list(UnitTypes.elude, 4, Blocks.berylliumWallLarge, 12)), new AssemblerUnitPlan(UnitTypes.disrupt, 60f * 60f * 3f, PayloadStack.list(UnitTypes.avert, 6, Blocks.carbideWallLarge, 20)) ); areaSize = 13; consumePower(3f); consumeLiquid(Liquids.cyanogen, 12f / 60f); }}; mechAssembler = new UnitAssembler("mech-assembler"){{ requirements(Category.units, with(Items.carbide, 200, Items.thorium, 600, Items.oxide, 200, Items.tungsten, 500, Items.silicon, 900)); regionSuffix = "-dark"; size = 5; //TODO different reqs plans.add( new AssemblerUnitPlan(UnitTypes.tecta, 60f * 70f, PayloadStack.list(UnitTypes.merui, 5, Blocks.tungstenWallLarge, 12)), new AssemblerUnitPlan(UnitTypes.collaris, 60f * 60f * 3f, PayloadStack.list(UnitTypes.cleroi, 6, Blocks.carbideWallLarge, 20)) ); areaSize = 13; consumePower(3.5f); consumeLiquid(Liquids.cyanogen, 12f / 60f); }}; //TODO requirements / only accept inputs basicAssemblerModule = new UnitAssemblerModule("basic-assembler-module"){{ requirements(Category.units, with(Items.carbide, 300, Items.thorium, 500, Items.oxide, 200, Items.phaseFabric, 400)); consumePower(4f); regionSuffix = "-dark"; researchCostMultiplier = 0.75f; size = 5; }}; unitRepairTower = new RepairTower("unit-repair-tower"){{ requirements(Category.units, with(Items.graphite, 90, Items.silicon, 90, Items.tungsten, 80)); size = 2; range = 100f; healAmount = 1.5f; consumePower(1f); consumeLiquid(Liquids.ozone, 3f / 60f); }}; //endregion //region payloads payloadConveyor = new PayloadConveyor("payload-conveyor"){{ requirements(Category.units, with(Items.graphite, 10, Items.copper, 10)); canOverdrive = false; }}; payloadRouter = new PayloadRouter("payload-router"){{ requirements(Category.units, with(Items.graphite, 15, Items.copper, 10)); canOverdrive = false; }}; reinforcedPayloadConveyor = new PayloadConveyor("reinforced-payload-conveyor"){{ requirements(Category.units, with(Items.tungsten, 10)); moveTime = 35f; canOverdrive = false; health = 800; researchCostMultiplier = 4f; underBullets = true; }}; reinforcedPayloadRouter = new PayloadRouter("reinforced-payload-router"){{ requirements(Category.units, with(Items.tungsten, 15)); moveTime = 35f; health = 800; canOverdrive = false; researchCostMultiplier = 4f; underBullets = true; }}; payloadMassDriver = new PayloadMassDriver("payload-mass-driver"){{ requirements(Category.units, with(Items.tungsten, 120, Items.silicon, 120, Items.graphite, 50)); regionSuffix = "-dark"; size = 3; reload = 130f; chargeTime = 90f; range = 700f; maxPayloadSize = 2.5f; fogRadius = 5; consumePower(0.5f); }}; largePayloadMassDriver = new PayloadMassDriver("large-payload-mass-driver"){{ requirements(Category.units, with(Items.thorium, 200, Items.tungsten, 200, Items.silicon, 200, Items.graphite, 100, Items.oxide, 30)); regionSuffix = "-dark"; size = 5; reload = 130f; chargeTime = 100f; range = 1100f; maxPayloadSize = 3.5f; consumePower(3f); }}; smallDeconstructor = new PayloadDeconstructor("small-deconstructor"){{ requirements(Category.units, with(Items.beryllium, 100, Items.silicon, 100, Items.oxide, 40, Items.graphite, 80)); regionSuffix = "-dark"; itemCapacity = 100; consumePower(1f); size = 3; deconstructSpeed = 1f; }}; deconstructor = new PayloadDeconstructor("deconstructor"){{ requirements(Category.units, with(Items.beryllium, 250, Items.oxide, 100, Items.silicon, 250, Items.carbide, 250)); regionSuffix = "-dark"; itemCapacity = 250; consumePower(3f); size = 5; deconstructSpeed = 2f; }}; constructor = new Constructor("constructor"){{ requirements(Category.units, with(Items.silicon, 100, Items.beryllium, 150, Items.tungsten, 80)); regionSuffix = "-dark"; hasPower = true; buildSpeed = 0.6f; consumePower(2f); size = 3; //TODO expand this list filter = Seq.with(Blocks.tungstenWallLarge, Blocks.berylliumWallLarge, Blocks.carbideWallLarge, Blocks.reinforcedSurgeWallLarge, Blocks.reinforcedLiquidContainer, Blocks.reinforcedContainer, Blocks.beamNode); }}; //yes this block is pretty much useless largeConstructor = new Constructor("large-constructor"){{ requirements(Category.units, with(Items.silicon, 150, Items.oxide, 150, Items.tungsten, 200, Items.phaseFabric, 40)); regionSuffix = "-dark"; hasPower = true; buildSpeed = 0.75f; maxBlockSize = 4; minBlockSize = 3; size = 5; consumePower(2f); }}; payloadLoader = new PayloadLoader("payload-loader"){{ requirements(Category.units, with(Items.graphite, 50, Items.silicon, 50, Items.tungsten, 80)); regionSuffix = "-dark"; hasPower = true; consumePower(2f); size = 3; fogRadius = 5; }}; payloadUnloader = new PayloadUnloader("payload-unloader"){{ requirements(Category.units, with(Items.graphite, 50, Items.silicon, 50, Items.tungsten, 30)); regionSuffix = "-dark"; hasPower = true; consumePower(2f); size = 3; fogRadius = 5; }}; //endregion //region sandbox powerSource = new PowerSource("power-source"){{ requirements(Category.power, BuildVisibility.sandboxOnly, with()); powerProduction = 1000000f / 60f; alwaysUnlocked = true; }}; powerVoid = new PowerVoid("power-void"){{ requirements(Category.power, BuildVisibility.sandboxOnly, with()); alwaysUnlocked = true; }}; itemSource = new ItemSource("item-source"){{ requirements(Category.distribution, BuildVisibility.sandboxOnly, with()); alwaysUnlocked = true; }}; itemVoid = new ItemVoid("item-void"){{ requirements(Category.distribution, BuildVisibility.sandboxOnly, with()); alwaysUnlocked = true; }}; liquidSource = new LiquidSource("liquid-source"){{ requirements(Category.liquid, BuildVisibility.sandboxOnly, with()); alwaysUnlocked = true; }}; liquidVoid = new LiquidVoid("liquid-void"){{ requirements(Category.liquid, BuildVisibility.sandboxOnly, with()); alwaysUnlocked = true; }}; payloadSource = new PayloadSource("payload-source"){{ requirements(Category.units, BuildVisibility.sandboxOnly, with()); size = 5; alwaysUnlocked = true; }}; payloadVoid = new PayloadVoid("payload-void"){{ requirements(Category.units, BuildVisibility.sandboxOnly, with()); size = 5; alwaysUnlocked = true; }}; heatSource = new HeatProducer("heat-source"){{ requirements(Category.crafting, BuildVisibility.sandboxOnly, with()); drawer = new DrawMulti(new DrawDefault(), new DrawHeatOutput()); rotateDraw = false; size = 1; heatOutput = 1000f; warmupRate = 1000f; regionRotated1 = 1; itemCapacity = 0; alwaysUnlocked = true; ambientSound = Sounds.none; }}; //TODO move illuminator = new LightBlock("illuminator"){{ requirements(Category.effect, BuildVisibility.lightingOnly, with(Items.graphite, 12, Items.silicon, 8, Items.lead, 8)); brightness = 0.75f; radius = 140f; consumePower(0.05f); }}; //endregion //region legacy //looked up by name, no ref needed new LegacyMechPad("legacy-mech-pad"); new LegacyUnitFactory("legacy-unit-factory"); new LegacyUnitFactory("legacy-unit-factory-air"){{ replacement = Blocks.airFactory; }}; new LegacyUnitFactory("legacy-unit-factory-ground"){{ replacement = Blocks.groundFactory; }}; new LegacyCommandCenter("command-center"){{ size = 2; }}; //endregion //region campaign launchPad = new LaunchPad("launch-pad"){{ requirements(Category.effect, BuildVisibility.campaignOnly, with(Items.copper, 350, Items.silicon, 140, Items.lead, 200, Items.titanium, 150)); size = 3; itemCapacity = 100; launchTime = 60f * 20; hasPower = true; consumePower(4f); }}; interplanetaryAccelerator = new Accelerator("interplanetary-accelerator"){{ requirements(Category.effect, BuildVisibility.hidden, with(Items.copper, 16000, Items.silicon, 11000, Items.thorium, 13000, Items.titanium, 12000, Items.surgeAlloy, 6000, Items.phaseFabric, 5000)); researchCostMultiplier = 0.1f; size = 7; hasPower = true; consumePower(10f); buildCostMultiplier = 0.5f; scaledHealth = 80; }}; //endregion campaign //region logic message = new MessageBlock("message"){{ requirements(Category.logic, with(Items.graphite, 5, Items.copper, 5)); }}; switchBlock = new SwitchBlock("switch"){{ requirements(Category.logic, with(Items.graphite, 5, Items.copper, 5)); }}; microProcessor = new LogicBlock("micro-processor"){{ requirements(Category.logic, with(Items.copper, 90, Items.lead, 50, Items.silicon, 50)); instructionsPerTick = 2; size = 1; }}; logicProcessor = new LogicBlock("logic-processor"){{ requirements(Category.logic, with(Items.lead, 320, Items.silicon, 80, Items.graphite, 60, Items.thorium, 50)); instructionsPerTick = 8; range = 8 * 22; size = 2; }}; hyperProcessor = new LogicBlock("hyper-processor"){{ requirements(Category.logic, with(Items.lead, 450, Items.silicon, 150, Items.thorium, 75, Items.surgeAlloy, 50)); consumeLiquid(Liquids.cryofluid, 0.08f); hasLiquids = true; instructionsPerTick = 25; range = 8 * 42; size = 3; }}; memoryCell = new MemoryBlock("memory-cell"){{ requirements(Category.logic, with(Items.graphite, 30, Items.silicon, 30, Items.copper, 30)); memoryCapacity = 64; }}; memoryBank = new MemoryBlock("memory-bank"){{ requirements(Category.logic, with(Items.graphite, 80, Items.silicon, 80, Items.phaseFabric, 30, Items.copper, 30)); memoryCapacity = 512; size = 2; }}; logicDisplay = new LogicDisplay("logic-display"){{ requirements(Category.logic, with(Items.lead, 100, Items.silicon, 50, Items.metaglass, 50)); displaySize = 80; size = 3; }}; largeLogicDisplay = new LogicDisplay("large-logic-display"){{ requirements(Category.logic, with(Items.lead, 200, Items.silicon, 150, Items.metaglass, 100, Items.phaseFabric, 75)); displaySize = 176; size = 6; }}; canvas = new CanvasBlock("canvas"){{ requirements(Category.logic, BuildVisibility.shown, with(Items.silicon, 10, Items.beryllium, 10)); canvasSize = 12; padding = 7f / 4f * 2f; size = 2; }}; reinforcedMessage = new MessageBlock("reinforced-message"){{ requirements(Category.logic, with(Items.graphite, 10, Items.beryllium, 5)); health = 100; }}; worldProcessor = new LogicBlock("world-processor"){{ requirements(Category.logic, BuildVisibility.editorOnly, with()); canOverdrive = false; targetable = false; instructionsPerTick = 8; forceDark = true; privileged = true; size = 1; maxInstructionsPerTick = 1000; range = Float.MAX_VALUE; }}; worldCell = new MemoryBlock("world-cell"){{ requirements(Category.logic, BuildVisibility.editorOnly, with()); targetable = false; privileged = true; memoryCapacity = 128; forceDark = true; }}; worldMessage = new MessageBlock("world-message"){{ requirements(Category.logic, BuildVisibility.editorOnly, with()); targetable = false; privileged = true; }}; worldSwitch = new SwitchBlock("world-switch"){{ requirements(Category.logic, BuildVisibility.editorOnly, with()); targetable = false; privileged = true; }}; //endregion } }
Anuken/Mindustry
core/src/mindustry/content/Blocks.java
959
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.transactionscript; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.Optional; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.sql.DataSource; import lombok.extern.slf4j.Slf4j; /** * Implementation of database operations for Hotel class. */ @Slf4j public class HotelDaoImpl implements HotelDao { private final DataSource dataSource; public HotelDaoImpl(DataSource dataSource) { this.dataSource = dataSource; } @Override public Stream<Room> getAll() throws Exception { try { var connection = getConnection(); var statement = connection.prepareStatement("SELECT * FROM ROOMS"); // NOSONAR var resultSet = statement.executeQuery(); // NOSONAR return StreamSupport.stream(new Spliterators.AbstractSpliterator<Room>(Long.MAX_VALUE, Spliterator.ORDERED) { @Override public boolean tryAdvance(Consumer<? super Room> action) { try { if (!resultSet.next()) { return false; } action.accept(createRoom(resultSet)); return true; } catch (Exception e) { throw new RuntimeException(e); // NOSONAR } } }, false).onClose(() -> { try { mutedClose(connection, statement, resultSet); } catch (Exception e) { LOGGER.error(e.getMessage()); } }); } catch (Exception e) { throw new Exception(e.getMessage(), e); } } @Override public Optional<Room> getById(int id) throws Exception { ResultSet resultSet = null; try (var connection = getConnection(); var statement = connection.prepareStatement("SELECT * FROM ROOMS WHERE ID = ?")) { statement.setInt(1, id); resultSet = statement.executeQuery(); if (resultSet.next()) { return Optional.of(createRoom(resultSet)); } else { return Optional.empty(); } } catch (Exception e) { throw new Exception(e.getMessage(), e); } finally { if (resultSet != null) { resultSet.close(); } } } @Override public Boolean add(Room room) throws Exception { if (getById(room.getId()).isPresent()) { return false; } try (var connection = getConnection(); var statement = connection.prepareStatement("INSERT INTO ROOMS VALUES (?,?,?,?)")) { statement.setInt(1, room.getId()); statement.setString(2, room.getRoomType()); statement.setInt(3, room.getPrice()); statement.setBoolean(4, room.isBooked()); statement.execute(); return true; } catch (Exception e) { throw new Exception(e.getMessage(), e); } } @Override public Boolean update(Room room) throws Exception { try (var connection = getConnection(); var statement = connection .prepareStatement("UPDATE ROOMS SET ROOM_TYPE = ?, PRICE = ?, BOOKED = ?" + " WHERE ID = ?")) { statement.setString(1, room.getRoomType()); statement.setInt(2, room.getPrice()); statement.setBoolean(3, room.isBooked()); statement.setInt(4, room.getId()); return statement.executeUpdate() > 0; } catch (Exception e) { throw new Exception(e.getMessage(), e); } } @Override public Boolean delete(Room room) throws Exception { try (var connection = getConnection(); var statement = connection.prepareStatement("DELETE FROM ROOMS WHERE ID = ?")) { statement.setInt(1, room.getId()); return statement.executeUpdate() > 0; } catch (Exception e) { throw new Exception(e.getMessage(), e); } } private Connection getConnection() throws Exception { return dataSource.getConnection(); } private void mutedClose(Connection connection, PreparedStatement statement, ResultSet resultSet) throws Exception { try { resultSet.close(); statement.close(); connection.close(); } catch (Exception e) { throw new Exception(e.getMessage(), e); } } private Room createRoom(ResultSet resultSet) throws Exception { return new Room(resultSet.getInt("ID"), resultSet.getString("ROOM_TYPE"), resultSet.getInt("PRICE"), resultSet.getBoolean("BOOKED")); } }
tyrellbaker-blip/java-design-patterns
transaction-script/src/main/java/com/iluwatar/transactionscript/HotelDaoImpl.java
960
/* * The MIT License * * Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi, Tom Huybrechts, * Yahoo!, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST; import com.thoughtworks.xstream.converters.ConversionException; import com.thoughtworks.xstream.io.StreamException; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.DescriptorExtensionList; import hudson.Extension; import hudson.ExtensionPoint; import hudson.Functions; import hudson.Indenter; import hudson.Util; import hudson.init.InitMilestone; import hudson.init.Initializer; import hudson.model.Descriptor.FormException; import hudson.model.listeners.ItemListener; import hudson.search.CollectionSearchIndex; import hudson.search.SearchIndexBuilder; import hudson.security.ACL; import hudson.security.AccessControlled; import hudson.security.Permission; import hudson.security.PermissionGroup; import hudson.security.PermissionScope; import hudson.util.AlternativeUiTextProvider; import hudson.util.AlternativeUiTextProvider.Message; import hudson.util.DescribableList; import hudson.util.DescriptorList; import hudson.util.FormApply; import hudson.util.FormValidation; import hudson.util.RunList; import hudson.util.XStream2; import hudson.views.ListViewColumn; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.io.StringWriter; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import javax.xml.transform.Source; import javax.xml.transform.TransformerException; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import jenkins.model.Jenkins; import jenkins.model.ModelObjectWithChildren; import jenkins.model.ModelObjectWithContextMenu; import jenkins.model.item_category.Categories; import jenkins.model.item_category.Category; import jenkins.model.item_category.ItemCategory; import jenkins.util.xml.XMLUtils; import jenkins.widgets.HasWidgets; import net.sf.json.JSONObject; import org.jenkins.ui.icon.Icon; import org.jenkins.ui.icon.IconSet; import org.jenkins.ui.symbol.Symbol; import org.jenkins.ui.symbol.SymbolRequest; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.DoNotUse; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.HttpResponses; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.WebMethod; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; import org.kohsuke.stapler.interceptor.RequirePOST; import org.kohsuke.stapler.verb.POST; import org.xml.sax.SAXException; /** * Encapsulates the rendering of the list of {@link TopLevelItem}s * that {@link Jenkins} owns. * * <p> * This is an extension point in Hudson, allowing different kind of * rendering to be added as plugins. * * <h2>Note for implementers</h2> * <ul> * <li> * {@link View} subtypes need the {@code newViewDetail.jelly} page, * which is included in the "new view" page. This page should have some * description of what the view is about. * </ul> * * @author Kohsuke Kawaguchi * @see ViewDescriptor * @see ViewGroup */ @ExportedBean public abstract class View extends AbstractModelObject implements AccessControlled, Describable<View>, ExtensionPoint, Saveable, ModelObjectWithChildren, DescriptorByNameOwner, HasWidgets { /** * Container of this view. Set right after the construction * and never change thereafter. */ protected /*final*/ ViewGroup owner; /** * Name of this view. */ protected String name; /** * Message displayed in the view page. */ protected String description; /** * If true, only show relevant executors */ protected boolean filterExecutors; /** * If true, only show relevant queue items */ protected boolean filterQueue; /** * List of {@link ViewProperty}s configured for this view. * @since 1.406 */ private volatile DescribableList<ViewProperty, ViewPropertyDescriptor> properties = new PropertyList(this); protected View(String name) { this.name = name; } protected View(String name, ViewGroup owner) { this.name = name; this.owner = owner; } /** * Gets all the items in this collection in a read-only view. */ @NonNull @Exported(name = "jobs") public abstract Collection<TopLevelItem> getItems(); /** * Gets all the items recursively contained in this collection in a read-only view. * <p> * The default implementation recursively adds the items of all contained Views * in case this view implements {@link ViewGroup}, which should be enough for most cases. * * @since 1.520 */ public Collection<TopLevelItem> getAllItems() { if (this instanceof ViewGroup) { final Collection<TopLevelItem> items = new LinkedHashSet<>(getItems()); for (View view : ((ViewGroup) this).getViews()) { items.addAll(view.getAllItems()); } return Collections.unmodifiableCollection(items); } else { return getItems(); } } /** * Gets the {@link TopLevelItem} of the given name. */ public TopLevelItem getItem(String name) { return getOwner().getItemGroup().getItem(name); } /** * Alias for {@link #getItem(String)}. This is the one used in the URL binding. */ public final TopLevelItem getJob(String name) { return getItem(name); } /** * Checks if the job is in this collection. */ public abstract boolean contains(TopLevelItem item); /** * Gets the name of all this collection. * * @see #rename(String) */ @Exported(visibility = 2, name = "name") @NonNull public String getViewName() { return name; } /** * Renames this view. */ public void rename(String newName) throws Failure, FormException { if (name.equals(newName)) return; // noop Jenkins.checkGoodName(newName); if (owner.getView(newName) != null) throw new FormException(Messages.Hudson_ViewAlreadyExists(newName), "name"); String oldName = name; name = newName; owner.onViewRenamed(this, oldName, newName); } /** * Gets the {@link ViewGroup} that this view belongs to. */ public ViewGroup getOwner() { return owner; } /** @deprecated call {@link ViewGroup#getItemGroup} directly */ @Deprecated public ItemGroup<? extends TopLevelItem> getOwnerItemGroup() { return owner.getItemGroup(); } /** @deprecated call {@link ViewGroup#getPrimaryView} directly */ @Deprecated public View getOwnerPrimaryView() { return owner.getPrimaryView(); } /** @deprecated call {@link ViewGroup#getViewActions} directly */ @Deprecated public List<Action> getOwnerViewActions() { return owner.getViewActions(); } /** * Message displayed in the top page. Can be null. Includes HTML. */ @Exported public synchronized String getDescription() { return description; } @DataBoundSetter public synchronized void setDescription(String description) { this.description = Util.nullify(description); } /** * Gets the view properties configured for this view. * @since 1.406 */ public DescribableList<ViewProperty, ViewPropertyDescriptor> getProperties() { // readResolve was the best place to do this, but for compatibility reasons, // this class can no longer have readResolve() (the mechanism itself isn't suitable for class hierarchy) // see JENKINS-9431 // // until we have that, putting this logic here. synchronized (PropertyList.class) { if (properties == null) { properties = new PropertyList(this); } else { properties.setOwner(this); } return properties; } } /** * Returns all the {@link ViewPropertyDescriptor}s that can be potentially configured * on this view. Returns both {@link ViewPropertyDescriptor}s visible and invisible for user, see * {@link View#getVisiblePropertyDescriptors} to filter invisible one. */ public List<ViewPropertyDescriptor> getApplicablePropertyDescriptors() { List<ViewPropertyDescriptor> r = new ArrayList<>(); for (ViewPropertyDescriptor pd : ViewProperty.all()) { if (pd.isEnabledFor(this)) r.add(pd); } return r; } /** * @return all the {@link ViewPropertyDescriptor}s that can be potentially configured on this View and are visible * for the user. Use {@link DescriptorVisibilityFilter} to make a View property invisible for users. * @since 2.214 */ public List<ViewPropertyDescriptor> getVisiblePropertyDescriptors() { return DescriptorVisibilityFilter.apply(this, getApplicablePropertyDescriptors()); } @Override public void save() throws IOException { // persistence is a part of the owner // due to initialization timing issue, it can be null when this method is called if (owner != null) { owner.save(); } } /** * List of all {@link ViewProperty}s exposed primarily for the remoting API. * @since 1.406 */ @Exported(name = "property", inline = true) public List<ViewProperty> getAllProperties() { return getProperties().toList(); } @Override public ViewDescriptor getDescriptor() { return (ViewDescriptor) Jenkins.get().getDescriptorOrDie(getClass()); } @Override public String getDisplayName() { return getViewName(); } public String getNewPronoun() { return AlternativeUiTextProvider.get(NEW_PRONOUN, this, Messages.AbstractItem_Pronoun()); } /** * By default, return true to render the "Edit view" link on the page. * This method is really just for the default "All" view to hide the edit link * so that the default Hudson top page remains the same as before 1.316. * * @since 1.316 */ public boolean isEditable() { return true; } /** * Used to enable or disable automatic refreshes of the view. * * @since 1.557 * * @deprecated Auto-refresh has been removed */ @Deprecated public boolean isAutomaticRefreshEnabled() { return false; } /** * If true, only show relevant executors */ public boolean isFilterExecutors() { return filterExecutors; } /** * @since 2.426 */ @DataBoundSetter public void setFilterExecutors(boolean filterExecutors) { this.filterExecutors = filterExecutors; } /** * If true, only show relevant queue items */ public boolean isFilterQueue() { return filterQueue; } /** * @since 2.426 */ @DataBoundSetter public void setFilterQueue(boolean filterQueue) { this.filterQueue = filterQueue; } /** * If this view uses {@code <t:projectView>} for rendering, this method returns columns to be displayed. */ public Iterable<? extends ListViewColumn> getColumns() { return ListViewColumn.createDefaultInitialColumnList(this); } /** * If this view uses {@code t:projectView} for rendering, this method returns the indenter used * to indent each row. */ public Indenter getIndenter() { return null; } /** * If true, this is a view that renders the top page of Hudson. */ public boolean isDefault() { return getOwner().getPrimaryView() == this; } public List<Computer> getComputers() { Computer[] computers = Jenkins.get().getComputers(); if (!isFilterExecutors()) { return Arrays.asList(computers); } List<Computer> result = new ArrayList<>(); HashSet<Label> labels = new HashSet<>(); for (Item item : getItems()) { if (item instanceof AbstractProject<?, ?>) { labels.addAll(((AbstractProject<?, ?>) item).getRelevantLabels()); } } for (Computer c : computers) { if (isRelevant(labels, c)) result.add(c); } return result; } private boolean isRelevant(Collection<Label> labels, Computer computer) { Node node = computer.getNode(); if (node == null) return false; if (labels.contains(null) && node.getMode() == Node.Mode.NORMAL) return true; for (Label l : labels) if (l != null && l.contains(node)) return true; return false; } private static final int FILTER_LOOP_MAX_COUNT = 10; private List<Queue.Item> filterQueue(List<Queue.Item> base) { if (!isFilterQueue()) { return base; } Collection<TopLevelItem> items = getItems(); return base.stream().filter(qi -> filterQueueItemTest(qi, items)) .collect(Collectors.toList()); } private boolean filterQueueItemTest(Queue.Item item, Collection<TopLevelItem> viewItems) { // Check if the task of parent tasks are in the list of viewItems. // Pipeline jobs and other jobs which allow parts require us to // check owner tasks as well. Queue.Task currentTask = item.task; for (int count = 1;; count++) { if (viewItems.contains(currentTask)) { return true; } Queue.Task next = currentTask.getOwnerTask(); if (next == currentTask) { break; } else { currentTask = next; } if (count == FILTER_LOOP_MAX_COUNT) { LOGGER.warning(String.format( "Failed to find root task for queue item '%s' for " + "view '%s' in under %d iterations, aborting!", item.getDisplayName(), getDisplayName(), FILTER_LOOP_MAX_COUNT)); break; } } // Check root project for sub-job projects (e.g. matrix jobs). if (item.task instanceof AbstractProject<?, ?>) { AbstractProject<?, ?> project = (AbstractProject<?, ?>) item.task; return viewItems.contains(project.getRootProject()); } return false; } public List<Queue.Item> getQueueItems() { return filterQueue(Arrays.asList(Jenkins.get().getQueue().getItems())); } /** * @return The items in the queue. * @deprecated Use {@link #getQueueItems()}. As of 1.607 the approximation is no longer needed. */ @Deprecated public List<Queue.Item> getApproximateQueueItemsQuickly() { return filterQueue(Jenkins.get().getQueue().getApproximateItemsQuickly()); } /** * Returns the path relative to the context root. * * Doesn't start with '/' but ends with '/' (except returns * empty string when this is the default view). */ public String getUrl() { return isDefault() ? (owner != null ? owner.getUrl() : "") : getViewUrl(); } /** * Same as {@link #getUrl()} except this returns a view/{name} path * even for the default view. */ public String getViewUrl() { return (owner != null ? owner.getUrl() : "") + "view/" + Util.rawEncode(getViewName()) + '/'; } @Override public String toString() { return super.toString() + "[" + getViewUrl() + "]"; } @Override public String getSearchUrl() { return getUrl(); } /** * Returns the transient {@link Action}s associated with the top page. * * <p> * If views don't want to show top-level actions, this method * can be overridden to return different objects. * * @see Jenkins#getActions() */ public List<Action> getActions() { List<Action> result = new ArrayList<>(); result.addAll(getOwner().getViewActions()); result.addAll(TransientViewActionFactory.createAllFor(this)); return result; } /** * No-op. Included to maintain backwards compatibility. * @deprecated This method does nothing and should not be used */ @Restricted(DoNotUse.class) @Deprecated public void updateTransientActions() {} public Object getDynamic(String token) { for (Action a : getActions()) { String url = a.getUrlName(); if (url == null) continue; if (url.equals(token)) return a; } return null; } /** * Gets the absolute URL of this view. */ @Exported(visibility = 2, name = "url") public String getAbsoluteUrl() { return Jenkins.get().getRootUrl() + getUrl(); } public Api getApi() { return new Api(this); } /** * Returns the page to redirect the user to, after the view is created. * * The returned string is appended to "/view/foobar/", so for example * to direct the user to the top page of the view, return "", etc. */ public String getPostConstructLandingPage() { return "configure"; } /** * Returns the {@link ACL} for this object. */ @NonNull @Override public ACL getACL() { return Jenkins.get().getAuthorizationStrategy().getACL(this); } /** @deprecated Does not work properly with moved jobs. Use {@link ItemListener#onLocationChanged} instead. */ @Deprecated public void onJobRenamed(Item item, String oldName, String newName) {} void addDisplayNamesToSearchIndex(SearchIndexBuilder sib, Collection<TopLevelItem> items) { for (TopLevelItem item : items) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Adding url=%s,displayName=%s", item.getSearchUrl(), item.getDisplayName())); } sib.add(item.getSearchUrl(), item.getDisplayName()); } } /** * Add a simple CollectionSearchIndex object to sib * * @param sib the SearchIndexBuilder * @since 2.200 */ protected void makeSearchIndex(SearchIndexBuilder sib) { sib.add(new CollectionSearchIndex<TopLevelItem>() { // for jobs in the view @Override protected TopLevelItem get(String key) { return getItem(key); } @Override protected Collection<TopLevelItem> all() { return getItems(); } @Override protected String getName(TopLevelItem o) { // return the name instead of the display for suggestion searching return o.getName(); } }); } @Override public SearchIndexBuilder makeSearchIndex() { SearchIndexBuilder sib = super.makeSearchIndex(); makeSearchIndex(sib); // add the display name for each item in the search index addDisplayNamesToSearchIndex(sib, getItems()); return sib; } /** * Accepts the new description. */ @RequirePOST public synchronized void doSubmitDescription(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { checkPermission(CONFIGURE); description = req.getParameter("description"); save(); rsp.sendRedirect("."); // go to the top page } /** * Accepts submission from the configuration page. * * Subtypes should override the {@link #submit(StaplerRequest)} method. */ @POST public final synchronized void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException, FormException { checkPermission(CONFIGURE); submit(req); var json = req.getSubmittedForm(); setDescription(json.optString("description")); setFilterExecutors(json.optBoolean("filterExecutors")); setFilterQueue(json.optBoolean("filterQueue")); rename(req.getParameter("name")); getProperties().rebuild(req, json, getApplicablePropertyDescriptors()); save(); FormApply.success("../" + Util.rawEncode(name)).generateResponse(req, rsp, this); } /** * Handles the configuration submission. * * Load view-specific properties here. */ protected abstract void submit(StaplerRequest req) throws IOException, ServletException, FormException; /** * Deletes this view. */ @RequirePOST public synchronized void doDoDelete(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { checkPermission(DELETE); owner.deleteView(this); rsp.sendRedirect2(req.getContextPath() + "/" + owner.getUrl()); } /** * Creates a new {@link Item} in this collection. * * <p> * This method should call {@link ModifiableItemGroup#doCreateItem(StaplerRequest, StaplerResponse)} * and then add the newly created item to this view. * * @return * null if fails. */ public abstract Item doCreateItem(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException; /** * Makes sure that the given name is good as a job name. * For use from {@code newJob}. */ @Restricted(DoNotUse.class) // called from newJob view public FormValidation doCheckJobName(@QueryParameter String value) { // this method can be used to check if a file exists anywhere in the file system, // so it should be protected. getOwner().checkPermission(Item.CREATE); if (Util.fixEmpty(value) == null) { return FormValidation.ok(); } try { Jenkins.checkGoodName(value); value = value.trim(); // why trim *after* checkGoodName? not sure, but ItemGroupMixIn.createTopLevelItem does the same ItemGroup<?> parent = getOwner().getItemGroup(); Jenkins.get().getProjectNamingStrategy().checkName(parent.getFullName(), value); } catch (Failure e) { return FormValidation.error(e.getMessage()); } if (getOwner().getItemGroup().getItem(value) != null) { return FormValidation.error(Messages.Hudson_JobAlreadyExists(value)); } // looks good return FormValidation.ok(); } /** * An API REST method to get the allowed {$link TopLevelItem}s and its categories. * * @return A {@link Categories} entity that is shown as JSON file. */ @Restricted(DoNotUse.class) public Categories doItemCategories(StaplerRequest req, StaplerResponse rsp, @QueryParameter String iconStyle) throws IOException, ServletException { getOwner().checkPermission(Item.CREATE); rsp.addHeader("Cache-Control", "no-cache, no-store, must-revalidate"); rsp.addHeader("Pragma", "no-cache"); rsp.addHeader("Expires", "0"); Categories categories = new Categories(); int order = 0; String resUrl; if (iconStyle != null && !iconStyle.isBlank()) { resUrl = req.getContextPath() + Jenkins.RESOURCE_PATH; } else { resUrl = null; } for (TopLevelItemDescriptor descriptor : DescriptorVisibilityFilter.apply(getOwner().getItemGroup(), Items.all2(Jenkins.getAuthentication2(), getOwner().getItemGroup()))) { ItemCategory ic = ItemCategory.getCategory(descriptor); Map<String, Serializable> metadata = new HashMap<>(); // Information about Item. metadata.put("class", descriptor.getId()); metadata.put("order", ++order); metadata.put("displayName", descriptor.getDisplayName()); metadata.put("description", descriptor.getDescription()); metadata.put("iconFilePathPattern", descriptor.getIconFilePathPattern()); String iconClassName = descriptor.getIconClassName(); if (iconClassName != null && !iconClassName.isBlank()) { metadata.put("iconClassName", iconClassName); if (iconClassName.startsWith("symbol-")) { String iconXml = Symbol.get(new SymbolRequest.Builder() .withName(iconClassName.split(" ")[0].substring(7)) .withPluginName(Functions.extractPluginNameFromIconSrc(iconClassName)) .withClasses("icon-xlg") .build()); metadata.put("iconXml", iconXml); } else { if (resUrl != null) { Icon icon = IconSet.icons .getIconByClassSpec(String.join(" ", iconClassName, iconStyle)); if (icon != null) { metadata.put("iconQualifiedUrl", icon.getQualifiedUrl(resUrl)); } } } } Category category = categories.getItem(ic.getId()); if (category != null) { category.getItems().add(metadata); } else { List<Map<String, Serializable>> temp = new ArrayList<>(); temp.add(metadata); category = new Category(ic.getId(), ic.getDisplayName(), ic.getDescription(), ic.getOrder(), ic.getMinToShow(), temp); categories.getItems().add(category); } } return categories; } public void doRssAll(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (all builds)", getUrl(), getBuilds().newBuilds()); } public void doRssFailed(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (failed builds)", getUrl(), getBuilds().failureOnly().newBuilds()); } public RunList getBuilds() { return new RunList(this); } @Deprecated @Restricted(DoNotUse.class) public BuildTimelineWidget getTimeline() { return new BuildTimelineWidget(getBuilds()); } public void doRssLatest(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { List<Run> lastBuilds = new ArrayList<>(); for (TopLevelItem item : getItems()) { if (item instanceof Job) { Job job = (Job) item; Run lb = job.getLastBuild(); if (lb != null) lastBuilds.add(lb); } } RSS.rss(req, rsp, "Jenkins:" + getDisplayName() + " (latest builds)", getUrl(), RunList.fromRuns(lastBuilds), Run.FEED_ADAPTER_LATEST); } /** * Accepts {@code config.xml} submission, as well as serve it. */ @WebMethod(name = "config.xml") public HttpResponse doConfigDotXml(StaplerRequest req) throws IOException { if (req.getMethod().equals("GET")) { // read checkPermission(READ); return new HttpResponse() { @Override public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException { rsp.setContentType("application/xml"); View.this.writeXml(rsp.getOutputStream()); } }; } if (req.getMethod().equals("POST")) { // submission updateByXml(new StreamSource(req.getReader())); return HttpResponses.ok(); } // huh? return HttpResponses.error(SC_BAD_REQUEST, "Unexpected request method " + req.getMethod()); } /** * @since 1.538 */ public void writeXml(OutputStream out) throws IOException { // pity we don't have a handy way to clone Jenkins.XSTREAM to temp add the omit Field XStream2 xStream2 = new XStream2(); xStream2.omitField(View.class, "owner"); xStream2.toXMLUTF8(View.this, out); } /** * Updates the View with the new XML definition. * @param source source of the Item's new definition. * The source should be either a {@link StreamSource} or {@link SAXSource}, other sources * may not be handled. */ public void updateByXml(Source source) throws IOException { checkPermission(CONFIGURE); StringWriter out = new StringWriter(); try { // this allows us to use UTF-8 for storing data, // plus it checks any well-formedness issue in the submitted // data XMLUtils.safeTransform(source, new StreamResult(out)); out.close(); } catch (TransformerException | SAXException e) { throw new IOException("Failed to persist configuration.xml", e); } // try to reflect the changes by reloading try (InputStream in = new BufferedInputStream(new ByteArrayInputStream(out.toString().getBytes(StandardCharsets.UTF_8)))) { // Do not allow overwriting view name as it might collide with another // view in same ViewGroup and might not satisfy Jenkins.checkGoodName. String oldname = name; ViewGroup oldOwner = owner; // oddly, this field is not transient Object o = Jenkins.XSTREAM2.unmarshal(XStream2.getDefaultDriver().createReader(in), this, null, true); if (!o.getClass().equals(getClass())) { // ensure that we've got the same view type. extending this code to support updating // to different view type requires destroying & creating a new view type throw new IOException("Expecting view type: " + this.getClass() + " but got: " + o.getClass() + " instead." + "\nShould you needed to change to a new view type, you must first delete and then re-create " + "the view with the new view type."); } name = oldname; owner = oldOwner; } catch (StreamException | ConversionException | Error e) { // mostly reflection errors throw new IOException("Unable to read", e); } save(); } @Override public ModelObjectWithContextMenu.ContextMenu doChildrenContextMenu(StaplerRequest request, StaplerResponse response) throws Exception { ModelObjectWithContextMenu.ContextMenu m = new ModelObjectWithContextMenu.ContextMenu(); for (TopLevelItem i : getItems()) m.add(Functions.getRelativeLinkTo(i), Functions.getRelativeDisplayNameFrom(i, getOwner().getItemGroup())); return m; } /** * A list of available view types. * @deprecated as of 1.286 * Use {@link #all()} for read access, and use {@link Extension} for registration. */ @Deprecated public static final DescriptorList<View> LIST = new DescriptorList<>(View.class); /** * Returns all the registered {@link ViewDescriptor}s. */ public static DescriptorExtensionList<View, ViewDescriptor> all() { return Jenkins.get().getDescriptorList(View.class); } /** * Returns the {@link ViewDescriptor} instances that can be instantiated for the {@link ViewGroup} in the current * {@link StaplerRequest}. * <p> * <strong>NOTE: Historically this method is only ever called from a {@link StaplerRequest}</strong> * @return the list of instantiable {@link ViewDescriptor} instances for the current {@link StaplerRequest} */ @NonNull public static List<ViewDescriptor> allInstantiable() { List<ViewDescriptor> r = new ArrayList<>(); StaplerRequest request = Stapler.getCurrentRequest(); if (request == null) { throw new IllegalStateException("This method can only be invoked from a stapler request"); } ViewGroup owner = request.findAncestorObject(ViewGroup.class); if (owner == null) { throw new IllegalStateException("This method can only be invoked from a request with a ViewGroup ancestor"); } for (ViewDescriptor d : DescriptorVisibilityFilter.apply(owner, all())) { if (d.isApplicableIn(owner) && d.isInstantiable() && owner.getACL().hasCreatePermission2(Jenkins.getAuthentication2(), owner, d)) { r.add(d); } } return r; } public static final Comparator<View> SORTER = Comparator.comparing(View::getViewName); public static final PermissionGroup PERMISSIONS = new PermissionGroup(View.class, Messages._View_Permissions_Title()); /** * Permission to create new views. */ public static final Permission CREATE = new Permission(PERMISSIONS, "Create", Messages._View_CreatePermission_Description(), Permission.CREATE, PermissionScope.ITEM_GROUP); public static final Permission DELETE = new Permission(PERMISSIONS, "Delete", Messages._View_DeletePermission_Description(), Permission.DELETE, PermissionScope.ITEM_GROUP); public static final Permission CONFIGURE = new Permission(PERMISSIONS, "Configure", Messages._View_ConfigurePermission_Description(), Permission.CONFIGURE, PermissionScope.ITEM_GROUP); public static final Permission READ = new Permission(PERMISSIONS, "Read", Messages._View_ReadPermission_Description(), Permission.READ, PermissionScope.ITEM_GROUP); @SuppressFBWarnings(value = "RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT", justification = "to guard against potential future compiler optimizations") @Initializer(before = InitMilestone.SYSTEM_CONFIG_LOADED) @Restricted(DoNotUse.class) public static void registerPermissions() { // Pending JENKINS-17200, ensure that the above permissions have been registered prior to // allowing plugins to adapt the system configuration, which may depend on these permissions // having been registered. Since this method is static and since it follows the above // construction of static permission objects (and therefore their calls to // PermissionGroup#register), there is nothing further to do in this method. We call // Objects.hash() to guard against potential future compiler optimizations. Objects.hash(PERMISSIONS, CREATE, DELETE, CONFIGURE, READ); } // to simplify access from Jelly public static Permission getItemCreatePermission() { return Item.CREATE; } public static View create(StaplerRequest req, StaplerResponse rsp, ViewGroup owner) throws FormException, IOException, ServletException { String mode = req.getParameter("mode"); String requestContentType = req.getContentType(); if (requestContentType == null && !(mode != null && mode.equals("copy"))) throw new Failure("No Content-Type header set"); boolean isXmlSubmission = requestContentType != null && (requestContentType.startsWith("application/xml") || requestContentType.startsWith("text/xml")); String name = req.getParameter("name"); Jenkins.checkGoodName(name); if (owner.getView(name) != null) throw new Failure(Messages.Hudson_ViewAlreadyExists(name)); if (mode == null || mode.isEmpty()) { if (isXmlSubmission) { View v = createViewFromXML(name, req.getInputStream()); owner.getACL().checkCreatePermission(owner, v.getDescriptor()); v.owner = owner; rsp.setStatus(HttpServletResponse.SC_OK); return v; } else throw new Failure(Messages.View_MissingMode()); } View v; if ("copy".equals(mode)) { v = copy(req, owner, name); } else { ViewDescriptor descriptor = all().findByName(mode); if (descriptor == null) { throw new Failure("No view type ‘" + mode + "’ is known"); } // create a view JSONObject submittedForm = req.getSubmittedForm(); submittedForm.put("name", name); v = descriptor.newInstance(req, submittedForm); } owner.getACL().checkCreatePermission(owner, v.getDescriptor()); v.owner = owner; // redirect to the config screen rsp.sendRedirect2(req.getContextPath() + '/' + v.getUrl() + v.getPostConstructLandingPage()); return v; } private static View copy(StaplerRequest req, ViewGroup owner, String name) throws IOException { View v; String from = req.getParameter("from"); View src = owner.getView(from); if (src == null) { if (Util.fixEmpty(from) == null) throw new Failure("Specify which view to copy"); else throw new Failure("No such view: " + from); } String xml = Jenkins.XSTREAM.toXML(src); v = createViewFromXML(name, new ByteArrayInputStream(xml.getBytes(Charset.defaultCharset()))); return v; } /** * Instantiate View subtype from XML stream. * * @param name Alternative name to use or {@code null} to keep the one in xml. */ public static View createViewFromXML(String name, InputStream xml) throws IOException { try (InputStream in = new BufferedInputStream(xml)) { View v = (View) Jenkins.XSTREAM.fromXML(in); if (name != null) v.name = name; Jenkins.checkGoodName(v.name); return v; } catch (StreamException | ConversionException | Error e) { // mostly reflection errors throw new IOException("Unable to read", e); } } public static class PropertyList extends DescribableList<ViewProperty, ViewPropertyDescriptor> { private PropertyList(View owner) { super(owner); } public PropertyList() {// needed for XStream deserialization } public View getOwner() { return (View) owner; } @Override protected void onModified() throws IOException { for (ViewProperty p : this) p.setView(getOwner()); } } /** * "Job" in "New Job". When a view is used in a context that restricts the child type, * It might be useful to override this. */ public static final Message<View> NEW_PRONOUN = new Message<>(); private static final Logger LOGGER = Logger.getLogger(View.class.getName()); }
MarkEWaite/jenkins
core/src/main/java/hudson/model/View.java
961
404: Not Found
apache/dubbo
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/FrameworkModel.java
962
/* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */ /* Part of the Processing project - http://processing.org Copyright (c) 2008 Ben Fry and Casey Reas This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package processing.app; import cc.arduino.packages.BoardPort; import processing.app.debug.TargetBoard; import processing.app.debug.TargetPackage; import processing.app.debug.TargetPlatform; import processing.app.legacy.PConstants; import javax.swing.*; import java.io.File; import java.io.IOException; import java.util.*; import static processing.app.I18n.tr; /** * Used by Base for platform-specific tweaking, for instance finding the * sketchbook location using the Windows registry, or OS X event handling. * <p/> * The methods in this implementation are used by default, and can be * overridden by a subclass, if loaded by Base.main(). * <p/> * These methods throw vanilla-flavored Exceptions, so that error handling * occurs inside Base. * <p/> * There is currently no mechanism for adding new platforms, as the setup is * not automated. We could use getProperty("os.arch") perhaps, but that's * debatable (could be upper/lowercase, have spaces, etc.. basically we don't * know if name is proper Java package syntax.) */ public class Platform { /** * Set the default L & F. While I enjoy the bounty of the sixteen possible * exception types that this UIManager method might throw, I feel that in * just this one particular case, I'm being spoiled by those engineers * at Sun, those Masters of the Abstractionverse. It leaves me feeling sad * and overweight. So instead, I'll pretend that I'm not offered eleven dozen * ways to report to the user exactly what went wrong, and I'll bundle them * all into a single catch-all "Exception". Because in the end, all I really * care about is whether things worked or not. And even then, I don't care. * * @throws Exception Just like I said. */ public void setLookAndFeel() throws Exception { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } public void init() throws Exception { } public File getSettingsFolder() throws Exception { // otherwise make a .processing directory int the user's home dir File home = new File(System.getProperty("user.home")); File dataFolder = new File(home, ".arduino15"); return dataFolder; /* try { Class clazz = Class.forName("processing.app.macosx.ThinkDifferent"); Method m = clazz.getMethod("getLibraryFolder", new Class[] { }); String libraryPath = (String) m.invoke(null, new Object[] { }); //String libraryPath = BaseMacOS.getLibraryFolder(); File libraryFolder = new File(libraryPath); dataFolder = new File(libraryFolder, "Processing"); } catch (Exception e) { showError("Problem getting data folder", "Error getting the Processing data folder.", e); } */ } /** * @return null if not overridden, which will cause a prompt to show instead. * @throws Exception */ public File getDefaultSketchbookFolder() throws Exception { return null; } public void openURL(File folder, String url) throws Exception { if (!url.startsWith("file://./")) { openURL(url); return; } url = url.replaceAll("file://./", folder.getCanonicalFile().toURI().toASCIIString()); openURL(url); } public void openURL(String url) throws Exception { String launcher = PreferencesData.get("launcher"); if (launcher != null) { Runtime.getRuntime().exec(new String[]{launcher, url}); } else { showLauncherWarning(); } } public boolean openFolderAvailable() { return PreferencesData.get("launcher") != null; } public void openFolder(File file) throws Exception { String launcher = PreferencesData.get("launcher"); if (launcher != null) { String folder = file.getAbsolutePath(); Runtime.getRuntime().exec(new String[]{launcher, folder}); } else { showLauncherWarning(); } } static { loadLib(new File(BaseNoGui.getContentFile("lib"), System.mapLibraryName("listSerialsj"))); } private static void loadLib(File lib) { try { System.load(lib.getAbsolutePath()); } catch (UnsatisfiedLinkError e) { e.printStackTrace(); System.out.println(e.getMessage()); System.out.println("Cannot load native library " + lib.getAbsolutePath()); System.out.println("The program has terminated!"); System.exit(1); } } private native String resolveDeviceAttachedToNative(String serial); private native String[] listSerialsNative(); public String preListAllCandidateDevices() { return null; } public List<String> listSerials() { return new ArrayList<>(Arrays.asList(listSerialsNative())); } public List<String> listSerialsNames() { List<String> list = new LinkedList<>(); for (String port : listSerialsNative()) { list.add(port.split("_")[0]); } return list; } public synchronized Map<String, Object> resolveDeviceByVendorIdProductId(String serial, Map<String, TargetPackage> packages) { String vid_pid_iSerial = resolveDeviceAttachedToNative(serial); for (TargetPackage targetPackage : packages.values()) { for (TargetPlatform targetPlatform : targetPackage.getPlatforms().values()) { for (TargetBoard board : targetPlatform.getBoards().values()) { List<String> vids = new LinkedList<>(board.getPreferences().subTree("vid", 1).values()); if (!vids.isEmpty()) { List<String> pids = new LinkedList<>(board.getPreferences().subTree("pid", 1).values()); List<String> descriptors = new LinkedList<>(board.getPreferences().subTree("descriptor", 1).values()); for (int i = 0; i < vids.size(); i++) { String vidPid = vids.get(i) + "_" + pids.get(i); if (vid_pid_iSerial.toUpperCase().contains(vidPid.toUpperCase())) { if (!descriptors.isEmpty()) { boolean matched = false; for (int j = 0; j < descriptors.size(); j++) { if (vid_pid_iSerial.toUpperCase().contains(descriptors.get(j).toUpperCase())) { matched = true; break; } } if (matched == false) { continue; } } Map<String, Object> boardData = new HashMap<>(); boardData.put("board", board); // remove 0x from VID / PID to keep them as reported by liblistserial boardData.put("vid", vids.get(i).replaceAll("0x", "")); boardData.put("pid", pids.get(i).replaceAll("0x", "")); String extrafields = vid_pid_iSerial.substring(vidPid.length() + 1); String[] parts = extrafields.split("_"); boardData.put("iserial", parts[0]); return boardData; } } } } } } return null; } public String resolveDeviceByBoardID(Map<String, TargetPackage> packages, String boardId) { assert packages != null; assert boardId != null; for (TargetPackage targetPackage : packages.values()) { for (TargetPlatform targetPlatform : targetPackage.getPlatforms().values()) { for (TargetBoard board : targetPlatform.getBoards().values()) { if (boardId.equals(board.getId())) { return board.getName(); } } } } return null; } // . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . public String getName() { return PConstants.platformNames[PConstants.OTHER]; } // . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . protected void showLauncherWarning() { BaseNoGui.showWarning(tr("No launcher available"), tr("Unspecified platform, no launcher available.\nTo enable opening URLs or folders, add a \n\"launcher=/path/to/app\" line to preferences.txt"), null); } public List<BoardPort> filterPorts(List<BoardPort> ports, boolean aBoolean) { return new LinkedList<>(ports); } public void fixPrefsFilePermissions(File prefsFile) throws IOException, InterruptedException { Process process = Runtime.getRuntime().exec(new String[]{"chmod", "600", prefsFile.getAbsolutePath()}, null, null); process.waitFor(); } public List<File> postInstallScripts(File folder) { List<File> scripts = new LinkedList<>(); scripts.add(new File(folder, "install_script.sh")); scripts.add(new File(folder, "post_install.sh")); return scripts; } public List<File> preUninstallScripts(File folder) { List<File> scripts = new LinkedList<>(); scripts.add(new File(folder, "pre_uninstall.sh")); return scripts; } public String getOsName() { return System.getProperty("os.name"); } public String getOsArch() { return System.getProperty("os.arch"); } public void symlink(String something, File somewhere) throws IOException, InterruptedException { Process process = Runtime.getRuntime().exec(new String[]{"ln", "-s", something, somewhere.getAbsolutePath()}, null, somewhere.getParentFile()); process.waitFor(); } public void link(File something, File somewhere) throws IOException, InterruptedException { Process process = Runtime.getRuntime().exec(new String[]{"ln", something.getAbsolutePath(), somewhere.getAbsolutePath()}, null, null); process.waitFor(); } public void chmod(File file, int mode) throws IOException, InterruptedException { Process process = Runtime.getRuntime().exec(new String[]{"chmod", Integer.toOctalString(mode), file.getAbsolutePath()}, null, null); process.waitFor(); } public void fixSettingsLocation() throws Exception { //noop } public int getSystemDPI() { return 96; } }
PaulStoffregen/Arduino-1.8.19-Teensyduino
arduino-core/src/processing/app/Platform.java
963
/* * This project is licensed under the MIT license. Module model-view-viewmodel is using ZK framework licensed under LGPL (see lgpl-3.0.txt). * * The MIT License * Copyright © 2014-2022 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.layers; import dto.CakeInfo; import dto.CakeLayerInfo; import dto.CakeToppingInfo; import exception.CakeBakingException; import java.util.List; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; import service.CakeBakingService; import view.CakeViewImpl; /** * The Runner class is the entry point of the application. * It implements CommandLineRunner, which means it will execute the run method after the application context is loaded. * * <p>The Runner class is responsible for initializing the cake baking service with sample data and creating a view to render the cakes. * It uses the CakeBakingService to save new layers and toppings and to bake new cakes. * It also handles exceptions that might occur during the cake baking process.</p> */ @Component @Slf4j public class Runner implements CommandLineRunner { private final CakeBakingService cakeBakingService; public static final String STRAWBERRY = "strawberry"; @Autowired public Runner(CakeBakingService cakeBakingService) { this.cakeBakingService = cakeBakingService; } @Override public void run(String... args) { //initialize sample data initializeData(); // create view and render it var cakeView = new CakeViewImpl(cakeBakingService); cakeView.render(); } /** * Initializes the example data. */ private void initializeData() { cakeBakingService.saveNewLayer(new CakeLayerInfo("chocolate", 1200)); cakeBakingService.saveNewLayer(new CakeLayerInfo("banana", 900)); cakeBakingService.saveNewLayer(new CakeLayerInfo(STRAWBERRY, 950)); cakeBakingService.saveNewLayer(new CakeLayerInfo("lemon", 950)); cakeBakingService.saveNewLayer(new CakeLayerInfo("vanilla", 950)); cakeBakingService.saveNewLayer(new CakeLayerInfo(STRAWBERRY, 950)); cakeBakingService.saveNewTopping(new CakeToppingInfo("candies", 350)); cakeBakingService.saveNewTopping(new CakeToppingInfo("cherry", 350)); var cake1 = new CakeInfo(new CakeToppingInfo("candies", 0), List.of(new CakeLayerInfo("chocolate", 0), new CakeLayerInfo("banana", 0), new CakeLayerInfo(STRAWBERRY, 0))); try { cakeBakingService.bakeNewCake(cake1); } catch (CakeBakingException e) { LOGGER.error("Cake baking exception", e); } var cake2 = new CakeInfo(new CakeToppingInfo("cherry", 0), List.of(new CakeLayerInfo("vanilla", 0), new CakeLayerInfo("lemon", 0), new CakeLayerInfo(STRAWBERRY, 0))); try { cakeBakingService.bakeNewCake(cake2); } catch (CakeBakingException e) { LOGGER.error("Cake baking exception", e); } } }
rajprins/java-design-patterns
layers/src/main/java/com/iluwatar/layers/Runner.java
964
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.unsafe.map; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.LinkedList; import com.google.common.annotations.VisibleForTesting; import com.google.common.io.Closeables; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.spark.SparkEnv; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.memory.MemoryConsumer; import org.apache.spark.memory.SparkOutOfMemoryError; import org.apache.spark.memory.TaskMemoryManager; import org.apache.spark.serializer.SerializerManager; import org.apache.spark.storage.BlockManager; import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.UnsafeAlignedOffset; import org.apache.spark.unsafe.array.ByteArrayMethods; import org.apache.spark.unsafe.array.LongArray; import org.apache.spark.unsafe.hash.Murmur3_x86_32; import org.apache.spark.unsafe.memory.MemoryBlock; import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterSpillReader; import org.apache.spark.util.collection.unsafe.sort.UnsafeSorterSpillWriter; /** * An append-only hash map where keys and values are contiguous regions of bytes. * * This is backed by a power-of-2-sized hash table, using quadratic probing with triangular numbers, * which is guaranteed to exhaust the space. * * The map can support up to 2^29 keys. If the key cardinality is higher than this, you should * probably be using sorting instead of hashing for better cache locality. * * The key and values under the hood are stored together, in the following format: * Bytes 0 to 4: len(k) (key length in bytes) + len(v) (value length in bytes) + 4 * Bytes 4 to 8: len(k) * Bytes 8 to 8 + len(k): key data * Bytes 8 + len(k) to 8 + len(k) + len(v): value data * Bytes 8 + len(k) + len(v) to 8 + len(k) + len(v) + 8: pointer to next pair * * This means that the first four bytes store the entire record (key + value) length. This format * is compatible with {@link org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter}, * so we can pass records from this map directly into the sorter to sort records in place. */ public final class BytesToBytesMap extends MemoryConsumer { private static final Logger logger = LoggerFactory.getLogger(BytesToBytesMap.class); private static final HashMapGrowthStrategy growthStrategy = HashMapGrowthStrategy.DOUBLING; private final TaskMemoryManager taskMemoryManager; /** * A linked list for tracking all allocated data pages so that we can free all of our memory. */ private final LinkedList<MemoryBlock> dataPages = new LinkedList<>(); /** * The data page that will be used to store keys and values for new hashtable entries. When this * page becomes full, a new page will be allocated and this pointer will change to point to that * new page. */ private MemoryBlock currentPage = null; /** * Offset into `currentPage` that points to the location where new data can be inserted into * the page. This does not incorporate the page's base offset. */ private long pageCursor = 0; /** * The maximum number of keys that BytesToBytesMap supports. The hash table has to be * power-of-2-sized and its backing Java array can contain at most (1 &lt;&lt; 30) elements, * since that's the largest power-of-2 that's less than Integer.MAX_VALUE. We need two long array * entries per key, giving us a maximum capacity of (1 &lt;&lt; 29). */ @VisibleForTesting static final int MAX_CAPACITY = (1 << 29); // This choice of page table size and page size means that we can address up to 500 gigabytes // of memory. /** * A single array to store the key and value. * * Position {@code 2 * i} in the array is used to track a pointer to the key at index {@code i}, * while position {@code 2 * i + 1} in the array holds key's full 32-bit hashcode. */ @Nullable private LongArray longArray; // TODO: we're wasting 32 bits of space here; we can probably store fewer bits of the hashcode // and exploit word-alignment to use fewer bits to hold the address. This might let us store // only one long per map entry, increasing the chance that this array will fit in cache at the // expense of maybe performing more lookups if we have hash collisions. Say that we stored only // 27 bits of the hashcode and 37 bits of the address. 37 bits is enough to address 1 terabyte // of RAM given word-alignment. If we use 13 bits of this for our page table, that gives us a // maximum page size of 2^24 * 8 = ~134 megabytes per page. This change will require us to store // full base addresses in the page table for off-heap mode so that we can reconstruct the full // absolute memory addresses. /** * Whether or not the longArray can grow. We will not insert more elements if it's false. */ private boolean canGrowArray = true; private final double loadFactor; /** * The size of the data pages that hold key and value data. Map entries cannot span multiple * pages, so this limits the maximum entry size. */ private final long pageSizeBytes; /** * Number of keys defined in the map. */ private int numKeys; /** * Number of values defined in the map. A key could have multiple values. */ private int numValues; /** * The map will be expanded once the number of keys exceeds this threshold. */ private int growthThreshold; /** * Mask for truncating hashcodes so that they do not exceed the long array's size. * This is a strength reduction optimization; we're essentially performing a modulus operation, * but doing so with a bitmask because this is a power-of-2-sized hash map. */ private int mask; /** * Return value of {@link BytesToBytesMap#lookup(Object, long, int)}. */ private final Location loc; private long numProbes = 0L; private long numKeyLookups = 0L; private long peakMemoryUsedBytes = 0L; private final int initialCapacity; private final BlockManager blockManager; private final SerializerManager serializerManager; private volatile MapIterator destructiveIterator = null; private LinkedList<UnsafeSorterSpillWriter> spillWriters = new LinkedList<>(); public BytesToBytesMap( TaskMemoryManager taskMemoryManager, BlockManager blockManager, SerializerManager serializerManager, int initialCapacity, double loadFactor, long pageSizeBytes) { super(taskMemoryManager, pageSizeBytes, taskMemoryManager.getTungstenMemoryMode()); this.taskMemoryManager = taskMemoryManager; this.blockManager = blockManager; this.serializerManager = serializerManager; this.loadFactor = loadFactor; this.loc = new Location(); this.pageSizeBytes = pageSizeBytes; if (initialCapacity <= 0) { throw new IllegalArgumentException("Initial capacity must be greater than 0"); } if (initialCapacity > MAX_CAPACITY) { throw new IllegalArgumentException( "Initial capacity " + initialCapacity + " exceeds maximum capacity of " + MAX_CAPACITY); } if (pageSizeBytes > TaskMemoryManager.MAXIMUM_PAGE_SIZE_BYTES) { throw new IllegalArgumentException("Page size " + pageSizeBytes + " cannot exceed " + TaskMemoryManager.MAXIMUM_PAGE_SIZE_BYTES); } this.initialCapacity = initialCapacity; allocate(initialCapacity); } public BytesToBytesMap( TaskMemoryManager taskMemoryManager, int initialCapacity, long pageSizeBytes) { this( taskMemoryManager, SparkEnv.get() != null ? SparkEnv.get().blockManager() : null, SparkEnv.get() != null ? SparkEnv.get().serializerManager() : null, initialCapacity, // In order to re-use the longArray for sorting, the load factor cannot be larger than 0.5. 0.5, pageSizeBytes); } /** * Returns the number of keys defined in the map. */ public int numKeys() { return numKeys; } /** * Returns the number of values defined in the map. A key could have multiple values. */ public int numValues() { return numValues; } public final class MapIterator implements Iterator<Location> { private int numRecords; private final Location loc; private MemoryBlock currentPage = null; private int recordsInPage = 0; private Object pageBaseObject; private long offsetInPage; // If this iterator destructive or not. When it is true, it frees each page as it moves onto // next one. private boolean destructive = false; private UnsafeSorterSpillReader reader = null; private MapIterator(int numRecords, Location loc, boolean destructive) { this.numRecords = numRecords; this.loc = loc; this.destructive = destructive; if (destructive) { destructiveIterator = this; // longArray will not be used anymore if destructive is true, release it now. if (longArray != null) { freeArray(longArray); longArray = null; } } } private void advanceToNextPage() { // SPARK-26265: We will first lock this `MapIterator` and then `TaskMemoryManager` when going // to free a memory page by calling `freePage`. At the same time, it is possibly that another // memory consumer first locks `TaskMemoryManager` and then this `MapIterator` when it // acquires memory and causes spilling on this `MapIterator`. To avoid deadlock here, we keep // reference to the page to free and free it after releasing the lock of `MapIterator`. MemoryBlock pageToFree = null; try { synchronized (this) { int nextIdx = dataPages.indexOf(currentPage) + 1; if (destructive && currentPage != null) { dataPages.remove(currentPage); pageToFree = currentPage; nextIdx--; } if (dataPages.size() > nextIdx) { currentPage = dataPages.get(nextIdx); pageBaseObject = currentPage.getBaseObject(); offsetInPage = currentPage.getBaseOffset(); recordsInPage = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage); offsetInPage += UnsafeAlignedOffset.getUaoSize(); } else { currentPage = null; if (reader != null) { handleFailedDelete(); } try { Closeables.close(reader, /* swallowIOException = */ false); reader = spillWriters.getFirst().getReader(serializerManager); recordsInPage = -1; } catch (IOException e) { // Scala iterator does not handle exception Platform.throwException(e); } } } } finally { if (pageToFree != null) { freePage(pageToFree); } } } @Override public boolean hasNext() { if (numRecords == 0) { if (reader != null) { handleFailedDelete(); } } return numRecords > 0; } @Override public Location next() { if (recordsInPage == 0) { advanceToNextPage(); } numRecords--; if (currentPage != null) { int totalLength = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage); loc.with(currentPage, offsetInPage); // [total size] [key size] [key] [value] [pointer to next] offsetInPage += UnsafeAlignedOffset.getUaoSize() + totalLength + 8; recordsInPage --; return loc; } else { assert(reader != null); if (!reader.hasNext()) { advanceToNextPage(); } try { reader.loadNext(); } catch (IOException e) { try { reader.close(); } catch(IOException e2) { logger.error("Error while closing spill reader", e2); } // Scala iterator does not handle exception Platform.throwException(e); } loc.with(reader.getBaseObject(), reader.getBaseOffset(), reader.getRecordLength()); return loc; } } public synchronized long spill(long numBytes) throws IOException { if (!destructive || dataPages.size() == 1) { return 0L; } updatePeakMemoryUsed(); // TODO: use existing ShuffleWriteMetrics ShuffleWriteMetrics writeMetrics = new ShuffleWriteMetrics(); long released = 0L; while (dataPages.size() > 0) { MemoryBlock block = dataPages.getLast(); // The currentPage is used, cannot be released if (block == currentPage) { break; } Object base = block.getBaseObject(); long offset = block.getBaseOffset(); int numRecords = UnsafeAlignedOffset.getSize(base, offset); int uaoSize = UnsafeAlignedOffset.getUaoSize(); offset += uaoSize; final UnsafeSorterSpillWriter writer = new UnsafeSorterSpillWriter(blockManager, 32 * 1024, writeMetrics, numRecords); while (numRecords > 0) { int length = UnsafeAlignedOffset.getSize(base, offset); writer.write(base, offset + uaoSize, length, 0); offset += uaoSize + length + 8; numRecords--; } writer.close(); spillWriters.add(writer); dataPages.removeLast(); released += block.size(); freePage(block); if (released >= numBytes) { break; } } return released; } @Override public void remove() { throw new UnsupportedOperationException(); } private void handleFailedDelete() { // remove the spill file from disk File file = spillWriters.removeFirst().getFile(); if (file != null && file.exists() && !file.delete()) { logger.error("Was unable to delete spill file {}", file.getAbsolutePath()); } } } /** * Returns an iterator for iterating over the entries of this map. * * For efficiency, all calls to `next()` will return the same {@link Location} object. * * If any other lookups or operations are performed on this map while iterating over it, including * `lookup()`, the behavior of the returned iterator is undefined. */ public MapIterator iterator() { return new MapIterator(numValues, loc, false); } /** * Returns a destructive iterator for iterating over the entries of this map. It frees each page * as it moves onto next one. Notice: it is illegal to call any method on the map after * `destructiveIterator()` has been called. * * For efficiency, all calls to `next()` will return the same {@link Location} object. * * If any other lookups or operations are performed on this map while iterating over it, including * `lookup()`, the behavior of the returned iterator is undefined. */ public MapIterator destructiveIterator() { updatePeakMemoryUsed(); return new MapIterator(numValues, loc, true); } /** * Looks up a key, and return a {@link Location} handle that can be used to test existence * and read/write values. * * This function always return the same {@link Location} instance to avoid object allocation. */ public Location lookup(Object keyBase, long keyOffset, int keyLength) { safeLookup(keyBase, keyOffset, keyLength, loc, Murmur3_x86_32.hashUnsafeWords(keyBase, keyOffset, keyLength, 42)); return loc; } /** * Looks up a key, and return a {@link Location} handle that can be used to test existence * and read/write values. * * This function always return the same {@link Location} instance to avoid object allocation. */ public Location lookup(Object keyBase, long keyOffset, int keyLength, int hash) { safeLookup(keyBase, keyOffset, keyLength, loc, hash); return loc; } /** * Looks up a key, and saves the result in provided `loc`. * * This is a thread-safe version of `lookup`, could be used by multiple threads. */ public void safeLookup(Object keyBase, long keyOffset, int keyLength, Location loc, int hash) { assert(longArray != null); numKeyLookups++; int pos = hash & mask; int step = 1; while (true) { numProbes++; if (longArray.get(pos * 2) == 0) { // This is a new key. loc.with(pos, hash, false); return; } else { long stored = longArray.get(pos * 2 + 1); if ((int) (stored) == hash) { // Full hash code matches. Let's compare the keys for equality. loc.with(pos, hash, true); if (loc.getKeyLength() == keyLength) { final boolean areEqual = ByteArrayMethods.arrayEquals( keyBase, keyOffset, loc.getKeyBase(), loc.getKeyOffset(), keyLength ); if (areEqual) { return; } } } } pos = (pos + step) & mask; step++; } } /** * Handle returned by {@link BytesToBytesMap#lookup(Object, long, int)} function. */ public final class Location { /** An index into the hash map's Long array */ private int pos; /** True if this location points to a position where a key is defined, false otherwise */ private boolean isDefined; /** * The hashcode of the most recent key passed to * {@link BytesToBytesMap#lookup(Object, long, int, int)}. Caching this hashcode here allows us * to avoid re-hashing the key when storing a value for that key. */ private int keyHashcode; private Object baseObject; // the base object for key and value private long keyOffset; private int keyLength; private long valueOffset; private int valueLength; /** * Memory page containing the record. Only set if created by {@link BytesToBytesMap#iterator()}. */ @Nullable private MemoryBlock memoryPage; private void updateAddressesAndSizes(long fullKeyAddress) { updateAddressesAndSizes( taskMemoryManager.getPage(fullKeyAddress), taskMemoryManager.getOffsetInPage(fullKeyAddress)); } private void updateAddressesAndSizes(final Object base, long offset) { baseObject = base; final int totalLength = UnsafeAlignedOffset.getSize(base, offset); int uaoSize = UnsafeAlignedOffset.getUaoSize(); offset += uaoSize; keyLength = UnsafeAlignedOffset.getSize(base, offset); offset += uaoSize; keyOffset = offset; valueOffset = offset + keyLength; valueLength = totalLength - keyLength - uaoSize; } private Location with(int pos, int keyHashcode, boolean isDefined) { assert(longArray != null); this.pos = pos; this.isDefined = isDefined; this.keyHashcode = keyHashcode; if (isDefined) { final long fullKeyAddress = longArray.get(pos * 2); updateAddressesAndSizes(fullKeyAddress); } return this; } private Location with(MemoryBlock page, long offsetInPage) { this.isDefined = true; this.memoryPage = page; updateAddressesAndSizes(page.getBaseObject(), offsetInPage); return this; } /** * This is only used for spilling */ private Location with(Object base, long offset, int length) { this.isDefined = true; this.memoryPage = null; baseObject = base; int uaoSize = UnsafeAlignedOffset.getUaoSize(); keyOffset = offset + uaoSize; keyLength = UnsafeAlignedOffset.getSize(base, offset); valueOffset = offset + uaoSize + keyLength; valueLength = length - uaoSize - keyLength; return this; } /** * Find the next pair that has the same key as current one. */ public boolean nextValue() { assert isDefined; long nextAddr = Platform.getLong(baseObject, valueOffset + valueLength); if (nextAddr == 0) { return false; } else { updateAddressesAndSizes(nextAddr); return true; } } /** * Returns the memory page that contains the current record. * This is only valid if this is returned by {@link BytesToBytesMap#iterator()}. */ public MemoryBlock getMemoryPage() { return this.memoryPage; } /** * Returns true if the key is defined at this position, and false otherwise. */ public boolean isDefined() { return isDefined; } /** * Returns the base object for key. */ public Object getKeyBase() { assert (isDefined); return baseObject; } /** * Returns the offset for key. */ public long getKeyOffset() { assert (isDefined); return keyOffset; } /** * Returns the base object for value. */ public Object getValueBase() { assert (isDefined); return baseObject; } /** * Returns the offset for value. */ public long getValueOffset() { assert (isDefined); return valueOffset; } /** * Returns the length of the key defined at this position. * Unspecified behavior if the key is not defined. */ public int getKeyLength() { assert (isDefined); return keyLength; } /** * Returns the length of the value defined at this position. * Unspecified behavior if the key is not defined. */ public int getValueLength() { assert (isDefined); return valueLength; } /** * Append a new value for the key. This method could be called multiple times for a given key. * The return value indicates whether the put succeeded or whether it failed because additional * memory could not be acquired. * <p> * It is only valid to call this method immediately after calling `lookup()` using the same key. * </p> * <p> * The key and value must be word-aligned (that is, their sizes must be a multiple of 8). * </p> * <p> * After calling this method, calls to `get[Key|Value]Address()` and `get[Key|Value]Length` * will return information on the data stored by this `append` call. * </p> * <p> * As an example usage, here's the proper way to store a new key: * </p> * <pre> * Location loc = map.lookup(keyBase, keyOffset, keyLength); * if (!loc.isDefined()) { * if (!loc.append(keyBase, keyOffset, keyLength, ...)) { * // handle failure to grow map (by spilling, for example) * } * } * </pre> * <p> * Unspecified behavior if the key is not defined. * </p> * * @return true if the put() was successful and false if the put() failed because memory could * not be acquired. */ public boolean append(Object kbase, long koff, int klen, Object vbase, long voff, int vlen) { assert (klen % 8 == 0); assert (vlen % 8 == 0); assert (longArray != null); if (numKeys == MAX_CAPACITY // The map could be reused from last spill (because of no enough memory to grow), // then we don't try to grow again if hit the `growthThreshold`. || !canGrowArray && numKeys >= growthThreshold) { return false; } // Here, we'll copy the data into our data pages. Because we only store a relative offset from // the key address instead of storing the absolute address of the value, the key and value // must be stored in the same memory page. // (8 byte key length) (key) (value) (8 byte pointer to next value) int uaoSize = UnsafeAlignedOffset.getUaoSize(); final long recordLength = (2L * uaoSize) + klen + vlen + 8; if (currentPage == null || currentPage.size() - pageCursor < recordLength) { if (!acquireNewPage(recordLength + uaoSize)) { return false; } } // --- Append the key and value data to the current data page -------------------------------- final Object base = currentPage.getBaseObject(); long offset = currentPage.getBaseOffset() + pageCursor; final long recordOffset = offset; UnsafeAlignedOffset.putSize(base, offset, klen + vlen + uaoSize); UnsafeAlignedOffset.putSize(base, offset + uaoSize, klen); offset += (2L * uaoSize); Platform.copyMemory(kbase, koff, base, offset, klen); offset += klen; Platform.copyMemory(vbase, voff, base, offset, vlen); offset += vlen; // put this value at the beginning of the list Platform.putLong(base, offset, isDefined ? longArray.get(pos * 2) : 0); // --- Update bookkeeping data structures ---------------------------------------------------- offset = currentPage.getBaseOffset(); UnsafeAlignedOffset.putSize(base, offset, UnsafeAlignedOffset.getSize(base, offset) + 1); pageCursor += recordLength; final long storedKeyAddress = taskMemoryManager.encodePageNumberAndOffset( currentPage, recordOffset); longArray.set(pos * 2, storedKeyAddress); updateAddressesAndSizes(storedKeyAddress); numValues++; if (!isDefined) { numKeys++; longArray.set(pos * 2 + 1, keyHashcode); isDefined = true; if (numKeys >= growthThreshold && longArray.size() < MAX_CAPACITY) { try { growAndRehash(); } catch (SparkOutOfMemoryError oom) { canGrowArray = false; } } } return true; } } /** * Acquire a new page from the memory manager. * @return whether there is enough space to allocate the new page. */ private boolean acquireNewPage(long required) { try { currentPage = allocatePage(required); } catch (SparkOutOfMemoryError e) { return false; } dataPages.add(currentPage); UnsafeAlignedOffset.putSize(currentPage.getBaseObject(), currentPage.getBaseOffset(), 0); pageCursor = UnsafeAlignedOffset.getUaoSize(); return true; } @Override public long spill(long size, MemoryConsumer trigger) throws IOException { if (trigger != this && destructiveIterator != null) { return destructiveIterator.spill(size); } return 0L; } /** * Allocate new data structures for this map. When calling this outside of the constructor, * make sure to keep references to the old data structures so that you can free them. * * @param capacity the new map capacity */ private void allocate(int capacity) { assert (capacity >= 0); capacity = Math.max((int) Math.min(MAX_CAPACITY, ByteArrayMethods.nextPowerOf2(capacity)), 64); assert (capacity <= MAX_CAPACITY); longArray = allocateArray(capacity * 2L); longArray.zeroOut(); this.growthThreshold = (int) (capacity * loadFactor); this.mask = capacity - 1; } /** * Free all allocated memory associated with this map, including the storage for keys and values * as well as the hash map array itself. * * This method is idempotent and can be called multiple times. */ public void free() { updatePeakMemoryUsed(); if (longArray != null) { freeArray(longArray); longArray = null; } Iterator<MemoryBlock> dataPagesIterator = dataPages.iterator(); while (dataPagesIterator.hasNext()) { MemoryBlock dataPage = dataPagesIterator.next(); dataPagesIterator.remove(); freePage(dataPage); } assert(dataPages.isEmpty()); while (!spillWriters.isEmpty()) { File file = spillWriters.removeFirst().getFile(); if (file != null && file.exists()) { if (!file.delete()) { logger.error("Was unable to delete spill file {}", file.getAbsolutePath()); } } } } public TaskMemoryManager getTaskMemoryManager() { return taskMemoryManager; } public long getPageSizeBytes() { return pageSizeBytes; } /** * Returns the total amount of memory, in bytes, consumed by this map's managed structures. */ public long getTotalMemoryConsumption() { long totalDataPagesSize = 0L; for (MemoryBlock dataPage : dataPages) { totalDataPagesSize += dataPage.size(); } return totalDataPagesSize + ((longArray != null) ? longArray.memoryBlock().size() : 0L); } private void updatePeakMemoryUsed() { long mem = getTotalMemoryConsumption(); if (mem > peakMemoryUsedBytes) { peakMemoryUsedBytes = mem; } } /** * Return the peak memory used so far, in bytes. */ public long getPeakMemoryUsedBytes() { updatePeakMemoryUsed(); return peakMemoryUsedBytes; } /** * Returns the average number of probes per key lookup. */ public double getAverageProbesPerLookup() { return (1.0 * numProbes) / numKeyLookups; } @VisibleForTesting public int getNumDataPages() { return dataPages.size(); } /** * Returns the underline long[] of longArray. */ public LongArray getArray() { assert(longArray != null); return longArray; } /** * Reset this map to initialized state. */ public void reset() { updatePeakMemoryUsed(); numKeys = 0; numValues = 0; freeArray(longArray); while (dataPages.size() > 0) { MemoryBlock dataPage = dataPages.removeLast(); freePage(dataPage); } allocate(initialCapacity); canGrowArray = true; currentPage = null; pageCursor = 0; } /** * Grows the size of the hash table and re-hash everything. */ @VisibleForTesting void growAndRehash() { assert(longArray != null); // Store references to the old data structures to be used when we re-hash final LongArray oldLongArray = longArray; final int oldCapacity = (int) oldLongArray.size() / 2; // Allocate the new data structures allocate(Math.min(growthStrategy.nextCapacity(oldCapacity), MAX_CAPACITY)); // Re-mask (we don't recompute the hashcode because we stored all 32 bits of it) for (int i = 0; i < oldLongArray.size(); i += 2) { final long keyPointer = oldLongArray.get(i); if (keyPointer == 0) { continue; } final int hashcode = (int) oldLongArray.get(i + 1); int newPos = hashcode & mask; int step = 1; while (longArray.get(newPos * 2) != 0) { newPos = (newPos + step) & mask; step++; } longArray.set(newPos * 2, keyPointer); longArray.set(newPos * 2 + 1, hashcode); } freeArray(oldLongArray); } }
UFFeScience/SAMbA
core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
965
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd package com.google.protobuf; import java.io.IOException; import java.util.List; import java.util.Map; /** A reader of fields from a serialized protobuf message. */ // TODO: Refactor to allow the reader to allocate properly sized lists. @ExperimentalApi @CheckReturnValue interface Reader { /** Value used to indicate that the end of input has been reached. */ int READ_DONE = Integer.MAX_VALUE; /** Value used to indicate that the reader does not know the tag about the field. */ int TAG_UNKNOWN = 0; boolean shouldDiscardUnknownFields(); /** * Gets the field number for the current field being read. * * <p>TODO: Rename it to make it more explicit about the side effect on the underlying * buffer. * * @return the current field number or {@link #READ_DONE} if the end of input has been reached. */ int getFieldNumber() throws IOException; /** * Gets the wire tag of the current field. * * @return the current wire tag or {@link #TAG_UNKNOWN} if the reader does not know the tag of the * current field. */ int getTag(); /** * Skips the current field and advances the reader to the next field. * * @return {@code true} if there are more fields or {@code false} if the end of input has been * reached. */ boolean skipField() throws IOException; /** * Reads and returns the next field of type {@code DOUBLE} and advances the reader to the next * field. */ double readDouble() throws IOException; /** * Reads and returns the next field of type {@code FLOAT} and advances the reader to the next * field. */ float readFloat() throws IOException; /** * Reads and returns the next field of type {@code UINT64} and advances the reader to the next * field. */ long readUInt64() throws IOException; /** * Reads and returns the next field of type {@code INT64} and advances the reader to the next * field. */ long readInt64() throws IOException; /** * Reads and returns the next field of type {@code INT32} and advances the reader to the next * field. */ int readInt32() throws IOException; /** * Reads and returns the next field of type {@code FIXED64} and advances the reader to the next * field. */ long readFixed64() throws IOException; /** * Reads and returns the next field of type {@code FIXED32} and advances the reader to the next * field. */ int readFixed32() throws IOException; /** * Reads and returns the next field of type {@code BOOL} and advances the reader to the next * field. */ boolean readBool() throws IOException; /** * Reads and returns the next field of type {@code STRING} and advances the reader to the next * field. If the stream contains malformed UTF-8, replace the offending bytes with the standard * UTF-8 replacement character. */ String readString() throws IOException; /** * Reads and returns the next field of type {@code STRING} and advances the reader to the next * field. If the stream contains malformed UTF-8, throw exception {@link * InvalidProtocolBufferException}. */ String readStringRequireUtf8() throws IOException; // TODO: the lack of other opinions for whether to expose this on the interface <T> T readMessageBySchemaWithCheck(Schema<T> schema, ExtensionRegistryLite extensionRegistry) throws IOException; /** * Reads and returns the next field of type {@code MESSAGE} and advances the reader to the next * field. */ <T> T readMessage(Class<T> clazz, ExtensionRegistryLite extensionRegistry) throws IOException; /** * Reads and returns the next field of type {@code GROUP} and advances the reader to the next * field. * * @deprecated groups fields are deprecated. */ @Deprecated <T> T readGroup(Class<T> clazz, ExtensionRegistryLite extensionRegistry) throws IOException; // TODO: the lack of other opinions for whether to expose this on the interface @Deprecated <T> T readGroupBySchemaWithCheck(Schema<T> schema, ExtensionRegistryLite extensionRegistry) throws IOException; /** Read a message field from the wire format and merge the results into the given target. */ <T> void mergeMessageField(T target, Schema<T> schema, ExtensionRegistryLite extensionRegistry) throws IOException; /** Read a group field from the wire format and merge the results into the given target. */ <T> void mergeGroupField(T target, Schema<T> schema, ExtensionRegistryLite extensionRegistry) throws IOException; /** * Reads and returns the next field of type {@code BYTES} and advances the reader to the next * field. */ ByteString readBytes() throws IOException; /** * Reads and returns the next field of type {@code UINT32} and advances the reader to the next * field. */ int readUInt32() throws IOException; /** * Reads and returns the next field of type {@code ENUM} and advances the reader to the next * field. */ int readEnum() throws IOException; /** * Reads and returns the next field of type {@code SFIXED32} and advances the reader to the next * field. */ int readSFixed32() throws IOException; /** * Reads and returns the next field of type {@code SFIXED64} and advances the reader to the next * field. */ long readSFixed64() throws IOException; /** * Reads and returns the next field of type {@code SINT32} and advances the reader to the next * field. */ int readSInt32() throws IOException; /** * Reads and returns the next field of type {@code SINT64} and advances the reader to the next * field. */ long readSInt64() throws IOException; /** * Reads the next field of type {@code DOUBLE_LIST} or {@code DOUBLE_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readDoubleList(List<Double> target) throws IOException; /** * Reads the next field of type {@code FLOAT_LIST} or {@code FLOAT_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readFloatList(List<Float> target) throws IOException; /** * Reads the next field of type {@code UINT64_LIST} or {@code UINT64_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readUInt64List(List<Long> target) throws IOException; /** * Reads the next field of type {@code INT64_LIST} or {@code INT64_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readInt64List(List<Long> target) throws IOException; /** * Reads the next field of type {@code INT32_LIST} or {@code INT32_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readInt32List(List<Integer> target) throws IOException; /** * Reads the next field of type {@code FIXED64_LIST} or {@code FIXED64_LIST_PACKED} and advances * the reader to the next field. * * @param target the list that will receive the read values. */ void readFixed64List(List<Long> target) throws IOException; /** * Reads the next field of type {@code FIXED32_LIST} or {@code FIXED32_LIST_PACKED} and advances * the reader to the next field. * * @param target the list that will receive the read values. */ void readFixed32List(List<Integer> target) throws IOException; /** * Reads the next field of type {@code BOOL_LIST} or {@code BOOL_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readBoolList(List<Boolean> target) throws IOException; /** * Reads the next field of type {@code STRING_LIST} and advances the reader to the next field. * * @param target the list that will receive the read values. */ void readStringList(List<String> target) throws IOException; /** * Reads the next field of type {@code STRING_LIST} and advances the reader to the next field. If * the stream contains malformed UTF-8, throw exception {@link InvalidProtocolBufferException}. * * @param target the list that will receive the read values. */ void readStringListRequireUtf8(List<String> target) throws IOException; /** * Reads the next field of type {@code MESSAGE_LIST} and advances the reader to the next field. * * @param target the list that will receive the read values. * @param targetType the type of the elements stored in the {@code target} list. */ <T> void readMessageList( List<T> target, Schema<T> schema, ExtensionRegistryLite extensionRegistry) throws IOException; <T> void readMessageList( List<T> target, Class<T> targetType, ExtensionRegistryLite extensionRegistry) throws IOException; /** * Reads the next field of type {@code GROUP_LIST} and advances the reader to the next field. * * @param target the list that will receive the read values. * @param targetType the type of the elements stored in the {@code target} list. * @deprecated groups fields are deprecated. */ @Deprecated <T> void readGroupList( List<T> target, Class<T> targetType, ExtensionRegistryLite extensionRegistry) throws IOException; @Deprecated <T> void readGroupList( List<T> target, Schema<T> targetType, ExtensionRegistryLite extensionRegistry) throws IOException; /** * Reads the next field of type {@code BYTES_LIST} and advances the reader to the next field. * * @param target the list that will receive the read values. */ void readBytesList(List<ByteString> target) throws IOException; /** * Reads the next field of type {@code UINT32_LIST} or {@code UINT32_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readUInt32List(List<Integer> target) throws IOException; /** * Reads the next field of type {@code ENUM_LIST} or {@code ENUM_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readEnumList(List<Integer> target) throws IOException; /** * Reads the next field of type {@code SFIXED32_LIST} or {@code SFIXED32_LIST_PACKED} and advances * the reader to the next field. * * @param target the list that will receive the read values. */ void readSFixed32List(List<Integer> target) throws IOException; /** * Reads the next field of type {@code SFIXED64_LIST} or {@code SFIXED64_LIST_PACKED} and advances * the reader to the next field. * * @param target the list that will receive the read values. */ void readSFixed64List(List<Long> target) throws IOException; /** * Reads the next field of type {@code SINT32_LIST} or {@code SINT32_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readSInt32List(List<Integer> target) throws IOException; /** * Reads the next field of type {@code SINT64_LIST} or {@code SINT64_LIST_PACKED} and advances the * reader to the next field. * * @param target the list that will receive the read values. */ void readSInt64List(List<Long> target) throws IOException; /** * Reads the next field of type {@code MAP} and advances the reader to the next field. * * @param target the mutable map that will receive the read values. * @param mapDefaultEntry the default entry of the map field. * @param extensionRegistry the extension registry for parsing message value fields. */ <K, V> void readMap( Map<K, V> target, MapEntryLite.Metadata<K, V> mapDefaultEntry, ExtensionRegistryLite extensionRegistry) throws IOException; }
protocolbuffers/protobuf
java/core/src/main/java/com/google/protobuf/Reader.java
967
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; public interface Alert { void dismiss(); void accept(); String getText(); void sendKeys(String keysToSend); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/Alert.java
968
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import java.util.Objects; /** A copy of java.awt.Point, to remove dependency on awt. */ public class Point { public int x; public int y; public Point(int x, int y) { this.x = x; this.y = y; } public int getX() { return x; } public int getY() { return y; } public Point moveBy(int xOffset, int yOffset) { return new Point(x + xOffset, y + yOffset); } @Override public boolean equals(Object o) { if (!(o instanceof Point)) { return false; } Point other = (Point) o; return other.x == x && other.y == y; } @Override public int hashCode() { return Objects.hash(x, y); } @Override public String toString() { return String.format("(%d, %d)", x, y); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/Point.java
969
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.security; import io.netty.channel.Channel; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpUtil; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.ssl.KeyStoreUtil; import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; import org.elasticsearch.http.netty4.internal.HttpValidator; import org.elasticsearch.index.IndexModule; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.license.ClusterStateLicenseService; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.plugins.interceptor.RestServerActionPlugin; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHeaderDefinition; import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.netty4.AcceptChannelHandler; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.transport.netty4.TLSConfig; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecuritySettings; import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyRequestTranslator; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequestBuilderFactory; import org.elasticsearch.xpack.core.security.action.apikey.CreateCrossClusterApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.UpdateApiKeyRequestTranslator; import org.elasticsearch.xpack.core.security.action.apikey.UpdateCrossClusterApiKeyAction; import org.elasticsearch.xpack.core.security.action.enrollment.KibanaEnrollmentAction; import org.elasticsearch.xpack.core.security.action.enrollment.NodeEnrollmentAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.privilege.ClearPrivilegesCacheAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesResponseTranslator; import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfilesAction; import org.elasticsearch.xpack.core.security.action.profile.SetProfileEnabledAction; import org.elasticsearch.xpack.core.security.action.profile.SuggestProfilesAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.GetRolesAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequestBuilderFactory; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlSpMetadataAction; import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.GetServiceAccountAction; import org.elasticsearch.xpack.core.security.action.service.GetServiceAccountCredentialsAction; import org.elasticsearch.xpack.core.security.action.service.GetServiceAccountNodesCredentialsAction; import org.elasticsearch.xpack.core.security.action.settings.GetSecuritySettingsAction; import org.elasticsearch.xpack.core.security.action.settings.UpdateSecuritySettingsAction; import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequestBuilderFactory; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authc.InternalRealmsSettings; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexReaderWrapper; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.SimpleRole; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.TransportTLSBootstrapCheck; import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction; import org.elasticsearch.xpack.core.ssl.action.TransportGetCertificateInfoAction; import org.elasticsearch.xpack.core.ssl.rest.RestGetCertificateInfoAction; import org.elasticsearch.xpack.security.action.TransportClearSecurityCacheAction; import org.elasticsearch.xpack.security.action.TransportDelegatePkiAuthenticationAction; import org.elasticsearch.xpack.security.action.apikey.TransportBulkUpdateApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportCreateApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportCreateCrossClusterApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportGetApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportGrantApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportInvalidateApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportUpdateApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportUpdateCrossClusterApiKeyAction; import org.elasticsearch.xpack.security.action.enrollment.TransportKibanaEnrollmentAction; import org.elasticsearch.xpack.security.action.enrollment.TransportNodeEnrollmentAction; import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter; import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectLogoutAction; import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.security.action.privilege.TransportClearPrivilegesCacheAction; import org.elasticsearch.xpack.security.action.privilege.TransportDeletePrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportGetBuiltinPrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportGetPrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; import org.elasticsearch.xpack.security.action.profile.TransportActivateProfileAction; import org.elasticsearch.xpack.security.action.profile.TransportGetProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportProfileHasPrivilegesAction; import org.elasticsearch.xpack.security.action.profile.TransportSetProfileEnabledAction; import org.elasticsearch.xpack.security.action.profile.TransportSuggestProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportUpdateProfileDataAction; import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; import org.elasticsearch.xpack.security.action.role.TransportClearRolesCacheAction; import org.elasticsearch.xpack.security.action.role.TransportDeleteRoleAction; import org.elasticsearch.xpack.security.action.role.TransportGetRolesAction; import org.elasticsearch.xpack.security.action.role.TransportPutRoleAction; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.elasticsearch.xpack.security.action.rolemapping.TransportDeleteRoleMappingAction; import org.elasticsearch.xpack.security.action.rolemapping.TransportGetRoleMappingsAction; import org.elasticsearch.xpack.security.action.rolemapping.TransportPutRoleMappingAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlAuthenticateAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlCompleteLogoutAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlInvalidateSessionAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlLogoutAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlPrepareAuthenticationAction; import org.elasticsearch.xpack.security.action.saml.TransportSamlSpMetadataAction; import org.elasticsearch.xpack.security.action.service.TransportCreateServiceAccountTokenAction; import org.elasticsearch.xpack.security.action.service.TransportDeleteServiceAccountTokenAction; import org.elasticsearch.xpack.security.action.service.TransportGetServiceAccountAction; import org.elasticsearch.xpack.security.action.service.TransportGetServiceAccountCredentialsAction; import org.elasticsearch.xpack.security.action.service.TransportGetServiceAccountNodesCredentialsAction; import org.elasticsearch.xpack.security.action.settings.TransportGetSecuritySettingsAction; import org.elasticsearch.xpack.security.action.settings.TransportReloadRemoteClusterCredentialsAction; import org.elasticsearch.xpack.security.action.settings.TransportUpdateSecuritySettingsAction; import org.elasticsearch.xpack.security.action.token.TransportCreateTokenAction; import org.elasticsearch.xpack.security.action.token.TransportInvalidateTokenAction; import org.elasticsearch.xpack.security.action.token.TransportRefreshTokenAction; import org.elasticsearch.xpack.security.action.user.TransportAuthenticateAction; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; import org.elasticsearch.xpack.security.action.user.TransportDeleteUserAction; import org.elasticsearch.xpack.security.action.user.TransportGetUserPrivilegesAction; import org.elasticsearch.xpack.security.action.user.TransportGetUsersAction; import org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction; import org.elasticsearch.xpack.security.action.user.TransportPutUserAction; import org.elasticsearch.xpack.security.action.user.TransportQueryUserAction; import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import org.elasticsearch.xpack.security.authc.InternalRealms; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.jwt.JwtRealm; import org.elasticsearch.xpack.security.authc.service.CachingServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.FileServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.IndexServiceAccountTokenStore; import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthActions; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.CompositeRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.DlsFlsRequestCacheDifferentiator; import org.elasticsearch.xpack.security.authz.FileRoleValidator; import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.interceptor.BulkShardRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.DlsFlsLicenseRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.IndicesAliasesRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.RequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.ResizeRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.SearchRequestCacheDisablingInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.SearchRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.ShardSearchRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.UpdateRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.ValidateRequestInterceptor; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.DeprecationRoleDescriptorConsumer; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.RoleProviders; import org.elasticsearch.xpack.security.ingest.SetSecurityUserProcessor; import org.elasticsearch.xpack.security.operator.DefaultOperatorOnlyRegistry; import org.elasticsearch.xpack.security.operator.FileOperatorUsersStore; import org.elasticsearch.xpack.security.operator.OperatorOnlyRegistry; import org.elasticsearch.xpack.security.operator.OperatorPrivileges; import org.elasticsearch.xpack.security.profile.ProfileService; import org.elasticsearch.xpack.security.rest.RemoteHostHeader; import org.elasticsearch.xpack.security.rest.SecurityRestFilter; import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.RestDelegatePkiAuthenticationAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestBulkUpdateApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestClearApiKeyCacheAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestCreateApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestCreateCrossClusterApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestGetApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestGrantApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestInvalidateApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestQueryApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestUpdateApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestUpdateCrossClusterApiKeyAction; import org.elasticsearch.xpack.security.rest.action.enrollment.RestKibanaEnrollAction; import org.elasticsearch.xpack.security.rest.action.enrollment.RestNodeEnrollmentAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction; import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectLogoutAction; import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestClearPrivilegesCacheAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestDeletePrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestGetBuiltinPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestGetPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestActivateProfileAction; import org.elasticsearch.xpack.security.rest.action.profile.RestDisableProfileAction; import org.elasticsearch.xpack.security.rest.action.profile.RestEnableProfileAction; import org.elasticsearch.xpack.security.rest.action.profile.RestGetProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestSuggestProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestUpdateProfileDataAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction; import org.elasticsearch.xpack.security.rest.action.role.RestGetRolesAction; import org.elasticsearch.xpack.security.rest.action.role.RestPutRoleAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestDeleteRoleMappingAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestGetRoleMappingsAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestPutRoleMappingAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlCompleteLogoutAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlInvalidateSessionAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlLogoutAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlPrepareAuthenticationAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlSpMetadataAction; import org.elasticsearch.xpack.security.rest.action.service.RestClearServiceAccountTokenStoreCacheAction; import org.elasticsearch.xpack.security.rest.action.service.RestCreateServiceAccountTokenAction; import org.elasticsearch.xpack.security.rest.action.service.RestDeleteServiceAccountTokenAction; import org.elasticsearch.xpack.security.rest.action.service.RestGetServiceAccountAction; import org.elasticsearch.xpack.security.rest.action.service.RestGetServiceAccountCredentialsAction; import org.elasticsearch.xpack.security.rest.action.settings.RestGetSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.settings.RestUpdateSecuritySettingsAction; import org.elasticsearch.xpack.security.rest.action.user.RestChangePasswordAction; import org.elasticsearch.xpack.security.rest.action.user.RestDeleteUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestGetUserPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestGetUsersAction; import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestProfileHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestQueryUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry; import org.elasticsearch.xpack.security.support.ExtensionComponents; import org.elasticsearch.xpack.security.support.ReloadableSecurityComponent; import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.transport.SecurityHttpSettings; import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport; import java.io.IOException; import java.net.InetSocketAddress; import java.security.Provider; import java.time.Clock; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.XPackSettings.API_KEY_SERVICE_ENABLED_SETTING; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; import static org.elasticsearch.xpack.core.security.SecurityField.FIELD_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore.INCLUDED_RESERVED_ROLES_SETTING; import static org.elasticsearch.xpack.security.operator.OperatorPrivileges.OPERATOR_PRIVILEGES_ENABLED; import static org.elasticsearch.xpack.security.transport.SSLEngineUtils.extractClientCertificates; public class Security extends Plugin implements SystemIndexPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin, ClusterCoordinationPlugin, MapperPlugin, ExtensiblePlugin, SearchPlugin, RestServerActionPlugin, ReloadablePlugin { public static final String SECURITY_CRYPTO_THREAD_POOL_NAME = XPackField.SECURITY + "-crypto"; // TODO: ip filtering does not actually track license usage yet public static final LicensedFeature.Momentary IP_FILTERING_FEATURE = LicensedFeature.momentaryLenient( null, "security-ip-filtering", License.OperationMode.GOLD ); public static final LicensedFeature.Momentary AUDITING_FEATURE = LicensedFeature.momentary( null, "security-auditing", License.OperationMode.GOLD ); public static final LicensedFeature.Momentary TOKEN_SERVICE_FEATURE = LicensedFeature.momentary( null, "security-token-service", License.OperationMode.STANDARD ); private static final String REALMS_FEATURE_FAMILY = "security-realms"; // Builtin realms (file/native) realms are Basic licensed, so don't need to be checked or tracked // Some realms (LDAP, AD, PKI) are Gold+ public static final LicensedFeature.Persistent LDAP_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "ldap", License.OperationMode.GOLD ); public static final LicensedFeature.Persistent AD_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "active-directory", License.OperationMode.GOLD ); public static final LicensedFeature.Persistent PKI_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "pki", License.OperationMode.GOLD ); // SSO realms are Platinum+ public static final LicensedFeature.Persistent SAML_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "saml", License.OperationMode.PLATINUM ); public static final LicensedFeature.Persistent OIDC_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "oidc", License.OperationMode.PLATINUM ); public static final LicensedFeature.Persistent JWT_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "jwt", License.OperationMode.PLATINUM ); public static final LicensedFeature.Persistent KERBEROS_REALM_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "kerberos", License.OperationMode.PLATINUM ); // Custom realms are Platinum+ public static final LicensedFeature.Persistent CUSTOM_REALMS_FEATURE = LicensedFeature.persistent( REALMS_FEATURE_FAMILY, "custom", License.OperationMode.PLATINUM ); public static final LicensedFeature.Momentary DELEGATED_AUTHORIZATION_FEATURE = LicensedFeature.momentary( null, "security-delegated-authorization", License.OperationMode.PLATINUM ); public static final LicensedFeature.Momentary AUTHORIZATION_ENGINE_FEATURE = LicensedFeature.momentary( null, "security-authorization-engine", License.OperationMode.PLATINUM ); // Custom role providers are Platinum+ public static final LicensedFeature.Persistent CUSTOM_ROLE_PROVIDERS_FEATURE = LicensedFeature.persistent( null, "security-roles-provider", License.OperationMode.PLATINUM ); public static final LicensedFeature.Momentary OPERATOR_PRIVILEGES_FEATURE = LicensedFeature.momentary( null, "operator-privileges", License.OperationMode.ENTERPRISE ); public static final LicensedFeature.Momentary USER_PROFILE_COLLABORATION_FEATURE = LicensedFeature.momentary( null, "user-profile-collaboration", License.OperationMode.STANDARD ); /** * Configurable cross cluster access is Enterprise feature. */ public static final LicensedFeature.Momentary ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE = LicensedFeature.momentary( null, "advanced-remote-cluster-security", License.OperationMode.ENTERPRISE ); private static final Logger logger = LogManager.getLogger(Security.class); private Settings settings; private final boolean enabled; private final SecuritySystemIndices systemIndices; private final ListenableFuture<Void> nodeStartedListenable; /* what a PITA that we need an extra indirection to initialize this. Yet, once we got rid of guice we can thing about how * to fix this or make it simpler. Today we need several service that are created in createComponents but we need to register * an instance of TransportInterceptor way earlier before createComponents is called. */ private final SetOnce<TransportInterceptor> securityInterceptor = new SetOnce<>(); private final SetOnce<IPFilter> ipFilter = new SetOnce<>(); private final SetOnce<AuthenticationService> authcService = new SetOnce<>(); private final SetOnce<SecondaryAuthenticator> secondayAuthc = new SetOnce<>(); private final SetOnce<AuditTrailService> auditTrailService = new SetOnce<>(); private final SetOnce<SecurityContext> securityContext = new SetOnce<>(); private final SetOnce<ThreadContext> threadContext = new SetOnce<>(); private final SetOnce<TokenService> tokenService = new SetOnce<>(); private final SetOnce<SecurityActionFilter> securityActionFilter = new SetOnce<>(); private final SetOnce<CrossClusterAccessAuthenticationService> crossClusterAccessAuthcService = new SetOnce<>(); private final SetOnce<SharedGroupFactory> sharedGroupFactory = new SetOnce<>(); private final SetOnce<DocumentSubsetBitsetCache> dlsBitsetCache = new SetOnce<>(); private final SetOnce<List<BootstrapCheck>> bootstrapChecks = new SetOnce<>(); private final List<SecurityExtension> securityExtensions = new ArrayList<>(); private final SetOnce<Transport> transportReference = new SetOnce<>(); private final SetOnce<ScriptService> scriptServiceReference = new SetOnce<>(); private final SetOnce<OperatorOnlyRegistry> operatorOnlyRegistry = new SetOnce<>(); private final SetOnce<PutRoleRequestBuilderFactory> putRoleRequestBuilderFactory = new SetOnce<>(); private final SetOnce<CreateApiKeyRequestBuilderFactory> createApiKeyRequestBuilderFactory = new SetOnce<>(); private final SetOnce<UpdateApiKeyRequestTranslator> updateApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce<BulkUpdateApiKeyRequestTranslator> bulkUpdateApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce<RestGrantApiKeyAction.RequestTranslator> grantApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce<GetBuiltinPrivilegesResponseTranslator> getBuiltinPrivilegesResponseTranslator = new SetOnce<>(); private final SetOnce<HasPrivilegesRequestBuilderFactory> hasPrivilegesRequestBuilderFactory = new SetOnce<>(); private final SetOnce<FileRolesStore> fileRolesStore = new SetOnce<>(); private final SetOnce<OperatorPrivileges.OperatorPrivilegesService> operatorPrivilegesService = new SetOnce<>(); private final SetOnce<ReservedRoleMappingAction> reservedRoleMappingAction = new SetOnce<>(); private final SetOnce<Realms> realms = new SetOnce<>(); private final SetOnce<Client> client = new SetOnce<>(); private final SetOnce<List<ReloadableSecurityComponent>> reloadableComponents = new SetOnce<>(); private final SetOnce<AuthorizationDenialMessages> authorizationDenialMessages = new SetOnce<>(); private final SetOnce<ReservedRoleNameChecker.Factory> reservedRoleNameCheckerFactory = new SetOnce<>(); private final SetOnce<FileRoleValidator> fileRoleValidator = new SetOnce<>(); private final SetOnce<SecondaryAuthActions> secondaryAuthActions = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); } Security(Settings settings, List<SecurityExtension> extensions) { // Note: The settings that are passed in here might not be the final values - things like Plugin.additionalSettings() // will be called after the plugins are constructed, and may introduce new setting values. // Accordingly we should avoid using this settings object for very much and mostly rely on Environment.setting() as provided // to createComponents. this.settings = settings; // TODO this is wrong, we should only use the environment that is provided to createComponents this.enabled = XPackSettings.SECURITY_ENABLED.get(settings); this.systemIndices = new SecuritySystemIndices(settings); this.nodeStartedListenable = new ListenableFuture<>(); if (enabled) { runStartupChecks(settings); Automatons.updateConfiguration(settings); } else { ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings); this.bootstrapChecks.set(Collections.emptyList()); } this.securityExtensions.addAll(extensions); } private void ensureNoRemoteClusterCredentialsOnDisabledSecurity(Settings settings) { assert false == enabled; final List<String> remoteClusterCredentialsSettingKeys = RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS.getAllConcreteSettings( settings ).map(Setting::getKey).sorted().toList(); if (false == remoteClusterCredentialsSettingKeys.isEmpty()) { throw new IllegalArgumentException( format( "Found [%s] remote clusters with credentials [%s]. Security [%s] must be enabled to connect to them. " + "Please either enable security or remove these settings from the keystore.", remoteClusterCredentialsSettingKeys.size(), Strings.collectionToCommaDelimitedString(remoteClusterCredentialsSettingKeys), XPackSettings.SECURITY_ENABLED.getKey() ) ); } } private static void runStartupChecks(Settings settings) { validateRealmSettings(settings); if (XPackSettings.FIPS_MODE_ENABLED.get(settings)) { validateForFips(settings); } } // overridable by tests protected Clock getClock() { return Clock.systemUTC(); } protected SSLService getSslService() { return XPackPlugin.getSharedSslService(); } protected LicenseService getLicenseService() { return XPackPlugin.getSharedLicenseService(); } protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } protected Client getClient() { return client.get(); } protected List<ReloadableSecurityComponent> getReloadableSecurityComponents() { return this.reloadableComponents.get(); } @Override public Collection<?> createComponents(PluginServices services) { try { return createComponents( services.client(), services.threadPool(), services.clusterService(), services.featureService(), services.resourceWatcherService(), services.scriptService(), services.xContentRegistry(), services.environment(), services.nodeEnvironment().nodeMetadata(), services.indexNameExpressionResolver(), services.telemetryProvider() ); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); } } // pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly Collection<Object> createComponents( Client client, ThreadPool threadPool, ClusterService clusterService, FeatureService featureService, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeMetadata nodeMetadata, IndexNameExpressionResolver expressionResolver, TelemetryProvider telemetryProvider ) throws Exception { logger.info("Security is {}", enabled ? "enabled" : "disabled"); if (enabled == false) { return Collections.singletonList(new SecurityUsageServices(null, null, null, null, null, null)); } this.client.set(client); // The settings in `environment` may have additional values over what was provided during construction // See Plugin#additionalSettings() this.settings = environment.settings(); systemIndices.init(client, clusterService); scriptServiceReference.set(scriptService); // We need to construct the checks here while the secure settings are still available. // If we wait until #getBoostrapChecks the secure settings will have been cleared/closed. final List<BootstrapCheck> checks = new ArrayList<>(); checks.addAll( Arrays.asList( new TokenSSLBootstrapCheck(), new PkiRealmBootstrapCheck(getSslService()), new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, getLicenseService()), new TransportTLSBootstrapCheck() ) ); checks.addAll(InternalRealms.getBootstrapChecks(settings, environment)); this.bootstrapChecks.set(Collections.unmodifiableList(checks)); threadContext.set(threadPool.getThreadContext()); List<Object> components = new ArrayList<>(); securityContext.set(new SecurityContext(settings, threadPool.getThreadContext())); components.add(securityContext.get()); final RestrictedIndices restrictedIndices = new RestrictedIndices(expressionResolver); // audit trail service construction final AuditTrail auditTrail = XPackSettings.AUDIT_ENABLED.get(settings) ? new LoggingAuditTrail(settings, clusterService, threadPool) : null; final AuditTrailService auditTrailService = new AuditTrailService(auditTrail, getLicenseState()); components.add(auditTrailService); this.auditTrailService.set(auditTrailService); final TokenService tokenService = new TokenService( settings, Clock.systemUTC(), client, getLicenseState(), securityContext.get(), systemIndices.getMainIndexManager(), systemIndices.getTokenIndexManager(), clusterService ); this.tokenService.set(tokenService); components.add(tokenService); // realms construction final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client, systemIndices.getMainIndexManager()); final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore( settings, client, systemIndices.getMainIndexManager(), scriptService ); final ClusterStateRoleMapper clusterStateRoleMapper = new ClusterStateRoleMapper(settings, scriptService, clusterService); final UserRoleMapper userRoleMapper = new CompositeRoleMapper(nativeRoleMappingStore, clusterStateRoleMapper); final AnonymousUser anonymousUser = new AnonymousUser(settings); components.add(anonymousUser); final ReservedRealm reservedRealm = new ReservedRealm(environment, settings, nativeUsersStore, anonymousUser, threadPool); final SecurityExtension.SecurityComponents extensionComponents = new ExtensionComponents( environment, client, clusterService, resourceWatcherService, userRoleMapper ); Map<String, Realm.Factory> realmFactories = new HashMap<>( InternalRealms.getFactories( threadPool, settings, resourceWatcherService, getSslService(), nativeUsersStore, userRoleMapper, systemIndices.getMainIndexManager() ) ); for (SecurityExtension extension : securityExtensions) { Map<String, Realm.Factory> newRealms = extension.getRealms(extensionComponents); for (Map.Entry<String, Realm.Factory> entry : newRealms.entrySet()) { if (realmFactories.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("Realm type [" + entry.getKey() + "] is already registered"); } } } final Realms realms = new Realms( settings, environment, realmFactories, getLicenseState(), threadPool.getThreadContext(), reservedRealm ); components.add(nativeUsersStore); components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore)); components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper)); components.add(reservedRealm); components.add(realms); this.realms.set(realms); systemIndices.getMainIndexManager().addStateListener(nativeRoleMappingStore::onSecurityIndexStateChange); final CacheInvalidatorRegistry cacheInvalidatorRegistry = new CacheInvalidatorRegistry(); cacheInvalidatorRegistry.registerAlias("service", Set.of("file_service_account_token", "index_service_account_token")); components.add(cacheInvalidatorRegistry); systemIndices.getMainIndexManager().addStateListener(cacheInvalidatorRegistry::onSecurityIndexStateChange); final NativePrivilegeStore privilegeStore = new NativePrivilegeStore( settings, client, systemIndices.getMainIndexManager(), cacheInvalidatorRegistry, clusterService ); components.add(privilegeStore); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(Set.copyOf(INCLUDED_RESERVED_ROLES_SETTING.get(settings))); dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings, threadPool)); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); final NativeRolesStore nativeRolesStore = new NativeRolesStore( settings, client, getLicenseState(), systemIndices.getMainIndexManager(), clusterService ); RoleDescriptor.setFieldPermissionsCache(fieldPermissionsCache); // Need to set to default if it wasn't set by an extension if (putRoleRequestBuilderFactory.get() == null) { putRoleRequestBuilderFactory.set(new PutRoleRequestBuilderFactory.Default()); } if (createApiKeyRequestBuilderFactory.get() == null) { createApiKeyRequestBuilderFactory.set(new CreateApiKeyRequestBuilderFactory.Default()); } if (getBuiltinPrivilegesResponseTranslator.get() == null) { getBuiltinPrivilegesResponseTranslator.set(new GetBuiltinPrivilegesResponseTranslator.Default()); } if (updateApiKeyRequestTranslator.get() == null) { updateApiKeyRequestTranslator.set(new UpdateApiKeyRequestTranslator.Default()); } if (bulkUpdateApiKeyRequestTranslator.get() == null) { bulkUpdateApiKeyRequestTranslator.set(new BulkUpdateApiKeyRequestTranslator.Default()); } if (grantApiKeyRequestTranslator.get() == null) { grantApiKeyRequestTranslator.set(new RestGrantApiKeyAction.RequestTranslator.Default()); } if (hasPrivilegesRequestBuilderFactory.get() == null) { hasPrivilegesRequestBuilderFactory.trySet(new HasPrivilegesRequestBuilderFactory.Default()); } if (reservedRoleNameCheckerFactory.get() == null) { reservedRoleNameCheckerFactory.set(new ReservedRoleNameChecker.Factory.Default()); } if (fileRoleValidator.get() == null) { fileRoleValidator.set(new FileRoleValidator.Default()); } this.fileRolesStore.set( new FileRolesStore(settings, environment, resourceWatcherService, getLicenseState(), xContentRegistry, fileRoleValidator.get()) ); final ReservedRoleNameChecker reservedRoleNameChecker = reservedRoleNameCheckerFactory.get().create(fileRolesStore.get()::exists); components.add(new PluginComponentBinding<>(ReservedRoleNameChecker.class, reservedRoleNameChecker)); final Map<String, List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>>> customRoleProviders = new LinkedHashMap<>(); for (SecurityExtension extension : securityExtensions) { final List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> providers = extension.getRolesProviders( extensionComponents ); if (providers != null && providers.isEmpty() == false) { customRoleProviders.put(extension.extensionName(), providers); } } final ApiKeyService apiKeyService = new ApiKeyService( settings, Clock.systemUTC(), client, systemIndices.getMainIndexManager(), clusterService, cacheInvalidatorRegistry, threadPool ); components.add(apiKeyService); final IndexServiceAccountTokenStore indexServiceAccountTokenStore = new IndexServiceAccountTokenStore( settings, threadPool, getClock(), client, systemIndices.getMainIndexManager(), clusterService, cacheInvalidatorRegistry ); components.add(indexServiceAccountTokenStore); final FileServiceAccountTokenStore fileServiceAccountTokenStore = new FileServiceAccountTokenStore( environment, resourceWatcherService, threadPool, clusterService, cacheInvalidatorRegistry ); components.add(fileServiceAccountTokenStore); final ServiceAccountService serviceAccountService = new ServiceAccountService( client, fileServiceAccountTokenStore, indexServiceAccountTokenStore ); components.add(serviceAccountService); final RoleProviders roleProviders = new RoleProviders( reservedRolesStore, fileRolesStore.get(), nativeRolesStore, customRoleProviders, getLicenseState() ); final CompositeRolesStore allRolesStore = new CompositeRolesStore( settings, roleProviders, privilegeStore, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache, apiKeyService, serviceAccountService, dlsBitsetCache.get(), restrictedIndices, new DeprecationRoleDescriptorConsumer(clusterService, threadPool) ); systemIndices.getMainIndexManager().addStateListener(allRolesStore::onSecurityIndexStateChange); final ProfileService profileService = new ProfileService( settings, getClock(), client, systemIndices.getProfileIndexManager(), clusterService, featureService, realms ); components.add(profileService); // We use the value of the {@code ENROLLMENT_ENABLED} setting to determine if the node is starting up with auto-generated // certificates (which have been generated by pre-startup scripts). In this case, and further if the node forms a new cluster by // itself, rather than joining an existing one, we complete the auto-configuration by generating and printing credentials and // enrollment tokens (when the .security index becomes available). // The generated information is output on node's standard out (if InitialNodeSecurityAutoConfiguration.maybeGenerateEnrollmentTokensAndElasticCredentialsOnNodeStartup( nativeUsersStore, systemIndices.getMainIndexManager(), getSslService(), client, environment, (runnable -> nodeStartedListenable.addListener(ActionListener.running(runnable))), threadPool ); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal getLicenseState().addListener(allRolesStore::invalidateAll); final AuthenticationFailureHandler failureHandler = createAuthenticationFailureHandler(realms, extensionComponents); final boolean operatorPrivilegesEnabled = OPERATOR_PRIVILEGES_ENABLED.get(settings); if (operatorPrivilegesEnabled) { logger.info("operator privileges are enabled"); if (operatorOnlyRegistry.get() == null) { operatorOnlyRegistry.set(new DefaultOperatorOnlyRegistry(clusterService.getClusterSettings())); } operatorPrivilegesService.set( new OperatorPrivileges.DefaultOperatorPrivilegesService( getLicenseState(), new FileOperatorUsersStore(environment, resourceWatcherService), operatorOnlyRegistry.get() ) ); } else { operatorPrivilegesService.set(OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE); } authcService.set( new AuthenticationService( settings, realms, auditTrailService, failureHandler, threadPool, anonymousUser, tokenService, apiKeyService, serviceAccountService, operatorPrivilegesService.get(), telemetryProvider.getMeterRegistry() ) ); components.add(authcService.get()); systemIndices.getMainIndexManager().addStateListener(authcService.get()::onSecurityIndexStateChange); Set<RequestInterceptor> requestInterceptors = Sets.newHashSet( new ResizeRequestInterceptor(threadPool, getLicenseState(), auditTrailService), new IndicesAliasesRequestInterceptor(threadPool.getThreadContext(), getLicenseState(), auditTrailService) ); if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { requestInterceptors.addAll( Arrays.asList( new SearchRequestInterceptor(threadPool, getLicenseState()), new ShardSearchRequestInterceptor(threadPool, getLicenseState()), new UpdateRequestInterceptor(threadPool, getLicenseState()), new BulkShardRequestInterceptor(threadPool, getLicenseState()), new DlsFlsLicenseRequestInterceptor(threadPool.getThreadContext(), getLicenseState()), new SearchRequestCacheDisablingInterceptor(threadPool, getLicenseState()), new ValidateRequestInterceptor(threadPool, getLicenseState()) ) ); } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); if (authorizationDenialMessages.get() == null) { authorizationDenialMessages.set(new AuthorizationDenialMessages.Default()); } final AuthorizationService authzService = new AuthorizationService( settings, allRolesStore, fieldPermissionsCache, clusterService, auditTrailService, failureHandler, threadPool, anonymousUser, getAuthorizationEngine(), requestInterceptors, getLicenseState(), expressionResolver, operatorPrivilegesService.get(), restrictedIndices, authorizationDenialMessages.get() ); components.add(nativeRolesStore); // used by roles actions components.add(reservedRolesStore); // used by roles actions components.add(allRolesStore); // for SecurityInfoTransportAction and clear roles cache components.add(authzService); final SecondaryAuthenticator secondaryAuthenticator = new SecondaryAuthenticator( securityContext.get(), authcService.get(), auditTrailService ); this.secondayAuthc.set(secondaryAuthenticator); components.add(secondaryAuthenticator); ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState())); components.add(ipFilter.get()); DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); crossClusterAccessAuthcService.set(new CrossClusterAccessAuthenticationService(clusterService, apiKeyService, authcService.get())); components.add(crossClusterAccessAuthcService.get()); securityInterceptor.set( new SecurityServerTransportInterceptor( settings, threadPool, authcService.get(), authzService, getSslService(), securityContext.get(), destructiveOperations, crossClusterAccessAuthcService.get(), getLicenseState() ) ); securityActionFilter.set( new SecurityActionFilter( authcService.get(), authzService, auditTrailService, getLicenseState(), threadPool, securityContext.get(), destructiveOperations, secondaryAuthActions.get() == null ? Set::of : secondaryAuthActions.get() ) ); components.add( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); cacheInvalidatorRegistry.validate(); this.reloadableComponents.set( components.stream() .filter(ReloadableSecurityComponent.class::isInstance) .map(ReloadableSecurityComponent.class::cast) .collect(Collectors.toUnmodifiableList()) ); return components; } private AuthorizationEngine getAuthorizationEngine() { return findValueFromExtensions("authorization engine", extension -> extension.getAuthorizationEngine(settings)); } private AuthenticationFailureHandler createAuthenticationFailureHandler( final Realms realms, final SecurityExtension.SecurityComponents components ) { AuthenticationFailureHandler failureHandler = findValueFromExtensions( "authentication failure handler", extension -> extension.getAuthenticationFailureHandler(components) ); if (failureHandler == null) { logger.debug("Using default authentication failure handler"); Supplier<Map<String, List<String>>> headersSupplier = () -> { final Map<String, List<String>> defaultFailureResponseHeaders = new HashMap<>(); realms.getActiveRealms().forEach((realm) -> { Map<String, List<String>> realmFailureHeaders = realm.getAuthenticationFailureHeaders(); realmFailureHeaders.forEach( (key, value) -> value.stream() .filter(v -> defaultFailureResponseHeaders.computeIfAbsent(key, x -> new ArrayList<>()).contains(v) == false) .forEach(v -> defaultFailureResponseHeaders.get(key).add(v)) ); }); if (TokenService.isTokenServiceEnabled(settings)) { String bearerScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; if (defaultFailureResponseHeaders.computeIfAbsent("WWW-Authenticate", x -> new ArrayList<>()) .contains(bearerScheme) == false) { defaultFailureResponseHeaders.get("WWW-Authenticate").add(bearerScheme); } } if (API_KEY_SERVICE_ENABLED_SETTING.get(settings)) { final String apiKeyScheme = "ApiKey"; if (defaultFailureResponseHeaders.computeIfAbsent("WWW-Authenticate", x -> new ArrayList<>()) .contains(apiKeyScheme) == false) { defaultFailureResponseHeaders.get("WWW-Authenticate").add(apiKeyScheme); } } return defaultFailureResponseHeaders; }; DefaultAuthenticationFailureHandler finalDefaultFailureHandler = new DefaultAuthenticationFailureHandler(headersSupplier.get()); failureHandler = finalDefaultFailureHandler; getLicenseState().addListener(() -> { finalDefaultFailureHandler.setHeaders(headersSupplier.get()); }); } return failureHandler; } /** * Calls the provided function for each configured extension and return the value that was generated by the extensions. * If multiple extensions provide a value, throws {@link IllegalStateException}. * If no extensions provide a value (or if there are no extensions) returns {@code null}. */ @Nullable private <T> T findValueFromExtensions(String valueType, Function<SecurityExtension, T> method) { T foundValue = null; String fromExtension = null; for (SecurityExtension extension : securityExtensions) { final T extensionValue = method.apply(extension); if (extensionValue == null) { continue; } if (foundValue == null) { foundValue = extensionValue; fromExtension = extension.extensionName(); } else { throw new IllegalStateException( "Extensions [" + fromExtension + "] and [" + extension.extensionName() + "] " + " both attempted to provide a value for [" + valueType + "]" ); } } if (foundValue == null) { return null; } else { logger.debug("Using [{}] [{}] from extension [{}]", valueType, foundValue, fromExtension); return foundValue; } } @Override public Settings additionalSettings() { return additionalSettings(settings, enabled); } // visible for tests static Settings additionalSettings(final Settings settings, final boolean enabled) { if (enabled) { final Settings.Builder builder = Settings.builder(); builder.put(SecuritySettings.addTransportSettings(settings)); if (NetworkModule.HTTP_TYPE_SETTING.exists(settings)) { final String httpType = NetworkModule.HTTP_TYPE_SETTING.get(settings); if (httpType.equals(SecurityField.NAME4)) { SecurityHttpSettings.overrideSettings(builder, settings); } else { final String message = String.format( Locale.ROOT, "http type setting [%s] must be [%s] but is [%s]", NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4, httpType ); throw new IllegalArgumentException(message); } } else { // default to security4 builder.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4); SecurityHttpSettings.overrideSettings(builder, settings); } builder.put(SecuritySettings.addUserSettings(settings)); return builder.build(); } else { return Settings.EMPTY; } } @Override public List<Setting<?>> getSettings() { return getSettings(securityExtensions); } /** * Get the {@link Setting setting configuration} for all security components, including those defined in extensions. */ public static List<Setting<?>> getSettings(List<SecurityExtension> securityExtensions) { List<Setting<?>> settingsList = new ArrayList<>(); // The following just apply in node mode settingsList.add(XPackSettings.FIPS_MODE_ENABLED); settingsList.add(XPackSettings.FIPS_REQUIRED_PROVIDERS); SSLService.registerSettings(settingsList); // IP Filter settings IPFilter.addSettings(settingsList); // audit settings LoggingAuditTrail.registerSettings(settingsList); // authentication and authorization settings AnonymousUser.addSettings(settingsList); settingsList.addAll(InternalRealmsSettings.getSettings()); ReservedRealm.addSettings(settingsList); AuthenticationService.addSettings(settingsList); AuthorizationService.addSettings(settingsList); Automatons.addSettings(settingsList); settingsList.addAll(CompositeRolesStore.getSettings()); settingsList.addAll(DocumentSubsetBitsetCache.getSettings()); settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING); settingsList.add(TokenService.TOKEN_EXPIRATION); settingsList.add(TokenService.DELETE_INTERVAL); settingsList.add(TokenService.DELETE_TIMEOUT); settingsList.addAll(SSLConfigurationSettings.getProfileSettings()); settingsList.add(ApiKeyService.PASSWORD_HASHING_ALGORITHM); settingsList.add(ApiKeyService.DELETE_TIMEOUT); settingsList.add(ApiKeyService.DELETE_INTERVAL); settingsList.add(ApiKeyService.DELETE_RETENTION_PERIOD); settingsList.add(ApiKeyService.CACHE_HASH_ALGO_SETTING); settingsList.add(ApiKeyService.CACHE_MAX_KEYS_SETTING); settingsList.add(ApiKeyService.CACHE_TTL_SETTING); settingsList.add(ApiKeyService.DOC_CACHE_TTL_SETTING); settingsList.add(NativePrivilegeStore.CACHE_MAX_APPLICATIONS_SETTING); settingsList.add(NativePrivilegeStore.CACHE_TTL_SETTING); settingsList.add(OPERATOR_PRIVILEGES_ENABLED); settingsList.add(CachingServiceAccountTokenStore.CACHE_TTL_SETTING); settingsList.add(CachingServiceAccountTokenStore.CACHE_HASH_ALGO_SETTING); settingsList.add(CachingServiceAccountTokenStore.CACHE_MAX_TOKENS_SETTING); settingsList.add(SimpleRole.CACHE_SIZE_SETTING); settingsList.add(NativeRoleMappingStore.LAST_LOAD_CACHE_ENABLED_SETTING); // hide settings settingsList.add(Setting.stringListSetting(SecurityField.setting("hide_settings"), Property.NodeScope, Property.Filtered)); return settingsList; } @Override public Collection<RestHeaderDefinition> getRestHeaders() { Set<RestHeaderDefinition> headers = new HashSet<>(); headers.add(new RestHeaderDefinition(UsernamePasswordToken.BASIC_AUTH_HEADER, false)); headers.add(new RestHeaderDefinition(SecondaryAuthenticator.SECONDARY_AUTH_HEADER_NAME, false)); if (XPackSettings.AUDIT_ENABLED.get(settings)) { headers.add(new RestHeaderDefinition(AuditTrail.X_FORWARDED_FOR_HEADER, true)); } if (AuthenticationServiceField.RUN_AS_ENABLED.get(settings)) { headers.add(new RestHeaderDefinition(AuthenticationServiceField.RUN_AS_USER_HEADER, false)); } headers.add(new RestHeaderDefinition(JwtRealm.HEADER_CLIENT_AUTHENTICATION, false)); return headers; } @Override public List<String> getSettingsFilter() { List<String> asArray = settings.getAsList(SecurityField.setting("hide_settings")); ArrayList<String> settingsFilter = new ArrayList<>(asArray); // hide settings where we don't define them - they are part of a group... settingsFilter.add("transport.profiles.*." + SecurityField.setting("*")); return settingsFilter; } @Override public List<BootstrapCheck> getBootstrapChecks() { return bootstrapChecks.get(); } @Override public void onIndexModule(IndexModule module) { if (enabled) { assert getLicenseState() != null; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { assert dlsBitsetCache.get() != null; module.setReaderWrapper( indexService -> new SecurityIndexReaderWrapper( shardId -> indexService.newSearchExecutionContext( shardId.id(), 0, // we pass a null index reader, which is legal and will disable rewrite optimizations // based on index statistics, which is probably safer... null, () -> { throw new IllegalArgumentException("permission filters are not allowed to use the current timestamp"); }, null, // Don't use runtime mappings in the security query emptyMap() ), dlsBitsetCache.get(), securityContext.get(), getLicenseState(), indexService.getScriptService() ) ); /* * We need to forcefully overwrite the query cache implementation to use security's opt-out query cache implementation. This * implementation disables the query cache if field level security is used for a particular request. We have to forcefully * overwrite the query cache implementation to prevent data leakage to unauthorized users. */ module.forceQueryCacheProvider( (indexSettings, cache) -> new OptOutQueryCache(indexSettings.getIndex(), cache, threadContext.get()) ); } // in order to prevent scroll ids from being maliciously crafted and/or guessed, a listener is added that // attaches information to the scroll context so that we can validate the user that created the scroll against // the user that is executing a scroll operation module.addSearchOperationListener(new SecuritySearchOperationListener(securityContext.get(), auditTrailService.get())); } } @Override public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { var usageAction = new ActionHandler<>(XPackUsageFeatureAction.SECURITY, SecurityUsageTransportAction.class); var infoAction = new ActionHandler<>(XPackInfoFeatureAction.SECURITY, SecurityInfoTransportAction.class); if (enabled == false) { return Arrays.asList(usageAction, infoAction); } return Stream.of( new ActionHandler<>(ClearRealmCacheAction.INSTANCE, TransportClearRealmCacheAction.class), new ActionHandler<>(ClearRolesCacheAction.INSTANCE, TransportClearRolesCacheAction.class), new ActionHandler<>(ClearPrivilegesCacheAction.INSTANCE, TransportClearPrivilegesCacheAction.class), new ActionHandler<>(ClearSecurityCacheAction.INSTANCE, TransportClearSecurityCacheAction.class), new ActionHandler<>(GetUsersAction.INSTANCE, TransportGetUsersAction.class), new ActionHandler<>(ActionTypes.QUERY_USER_ACTION, TransportQueryUserAction.class), new ActionHandler<>(PutUserAction.INSTANCE, TransportPutUserAction.class), new ActionHandler<>(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class), new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), new ActionHandler<>(TransportChangePasswordAction.TYPE, TransportChangePasswordAction.class), new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class), new ActionHandler<>(TransportSetEnabledAction.TYPE, TransportSetEnabledAction.class), new ActionHandler<>(HasPrivilegesAction.INSTANCE, TransportHasPrivilegesAction.class), new ActionHandler<>(GetUserPrivilegesAction.INSTANCE, TransportGetUserPrivilegesAction.class), new ActionHandler<>(GetRoleMappingsAction.INSTANCE, TransportGetRoleMappingsAction.class), new ActionHandler<>(PutRoleMappingAction.INSTANCE, TransportPutRoleMappingAction.class), new ActionHandler<>(DeleteRoleMappingAction.INSTANCE, TransportDeleteRoleMappingAction.class), new ActionHandler<>(CreateTokenAction.INSTANCE, TransportCreateTokenAction.class), new ActionHandler<>(InvalidateTokenAction.INSTANCE, TransportInvalidateTokenAction.class), new ActionHandler<>(GetCertificateInfoAction.INSTANCE, TransportGetCertificateInfoAction.class), new ActionHandler<>(RefreshTokenAction.INSTANCE, TransportRefreshTokenAction.class), new ActionHandler<>(SamlPrepareAuthenticationAction.INSTANCE, TransportSamlPrepareAuthenticationAction.class), new ActionHandler<>(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class), new ActionHandler<>(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class), new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class), new ActionHandler<>(TransportSamlCompleteLogoutAction.TYPE, TransportSamlCompleteLogoutAction.class), new ActionHandler<>(SamlSpMetadataAction.INSTANCE, TransportSamlSpMetadataAction.class), new ActionHandler<>(OpenIdConnectPrepareAuthenticationAction.INSTANCE, TransportOpenIdConnectPrepareAuthenticationAction.class), new ActionHandler<>(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class), new ActionHandler<>(OpenIdConnectLogoutAction.INSTANCE, TransportOpenIdConnectLogoutAction.class), new ActionHandler<>(GetBuiltinPrivilegesAction.INSTANCE, TransportGetBuiltinPrivilegesAction.class), new ActionHandler<>(GetPrivilegesAction.INSTANCE, TransportGetPrivilegesAction.class), new ActionHandler<>(PutPrivilegesAction.INSTANCE, TransportPutPrivilegesAction.class), new ActionHandler<>(DeletePrivilegesAction.INSTANCE, TransportDeletePrivilegesAction.class), new ActionHandler<>(CreateApiKeyAction.INSTANCE, TransportCreateApiKeyAction.class), new ActionHandler<>(CreateCrossClusterApiKeyAction.INSTANCE, TransportCreateCrossClusterApiKeyAction.class), new ActionHandler<>(GrantApiKeyAction.INSTANCE, TransportGrantApiKeyAction.class), new ActionHandler<>(InvalidateApiKeyAction.INSTANCE, TransportInvalidateApiKeyAction.class), new ActionHandler<>(GetApiKeyAction.INSTANCE, TransportGetApiKeyAction.class), new ActionHandler<>(QueryApiKeyAction.INSTANCE, TransportQueryApiKeyAction.class), new ActionHandler<>(UpdateApiKeyAction.INSTANCE, TransportUpdateApiKeyAction.class), new ActionHandler<>(BulkUpdateApiKeyAction.INSTANCE, TransportBulkUpdateApiKeyAction.class), new ActionHandler<>(UpdateCrossClusterApiKeyAction.INSTANCE, TransportUpdateCrossClusterApiKeyAction.class), new ActionHandler<>(DelegatePkiAuthenticationAction.INSTANCE, TransportDelegatePkiAuthenticationAction.class), new ActionHandler<>(CreateServiceAccountTokenAction.INSTANCE, TransportCreateServiceAccountTokenAction.class), new ActionHandler<>(DeleteServiceAccountTokenAction.INSTANCE, TransportDeleteServiceAccountTokenAction.class), new ActionHandler<>(GetServiceAccountCredentialsAction.INSTANCE, TransportGetServiceAccountCredentialsAction.class), new ActionHandler<>(GetServiceAccountNodesCredentialsAction.INSTANCE, TransportGetServiceAccountNodesCredentialsAction.class), new ActionHandler<>(GetServiceAccountAction.INSTANCE, TransportGetServiceAccountAction.class), new ActionHandler<>(KibanaEnrollmentAction.INSTANCE, TransportKibanaEnrollmentAction.class), new ActionHandler<>(NodeEnrollmentAction.INSTANCE, TransportNodeEnrollmentAction.class), new ActionHandler<>(ProfileHasPrivilegesAction.INSTANCE, TransportProfileHasPrivilegesAction.class), new ActionHandler<>(GetProfilesAction.INSTANCE, TransportGetProfilesAction.class), new ActionHandler<>(ActivateProfileAction.INSTANCE, TransportActivateProfileAction.class), new ActionHandler<>(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class), new ActionHandler<>(SuggestProfilesAction.INSTANCE, TransportSuggestProfilesAction.class), new ActionHandler<>(SetProfileEnabledAction.INSTANCE, TransportSetProfileEnabledAction.class), new ActionHandler<>(GetSecuritySettingsAction.INSTANCE, TransportGetSecuritySettingsAction.class), new ActionHandler<>(UpdateSecuritySettingsAction.INSTANCE, TransportUpdateSecuritySettingsAction.class), new ActionHandler<>(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, TransportReloadRemoteClusterCredentialsAction.class), usageAction, infoAction ).filter(Objects::nonNull).toList(); } @Override public List<ActionFilter> getActionFilters() { if (enabled == false) { return emptyList(); } return singletonList(securityActionFilter.get()); } @Override public List<RestHandler> getRestHandlers( Settings settings, NamedWriteableRegistry namedWriteableRegistry, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier<DiscoveryNodes> nodesInCluster, Predicate<NodeFeature> clusterSupportsFeature ) { if (enabled == false) { return emptyList(); } return Stream.<RestHandler>of( new RestAuthenticateAction(settings, securityContext.get(), getLicenseState()), new RestClearRealmCacheAction(settings, getLicenseState()), new RestClearRolesCacheAction(settings, getLicenseState()), new RestClearPrivilegesCacheAction(settings, getLicenseState()), new RestClearApiKeyCacheAction(settings, getLicenseState()), new RestClearServiceAccountTokenStoreCacheAction(settings, getLicenseState()), new RestGetUsersAction(settings, getLicenseState()), new RestQueryUserAction(settings, getLicenseState()), new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()), new RestDeleteRoleAction(settings, getLicenseState()), new RestChangePasswordAction(settings, securityContext.get(), getLicenseState()), new RestSetEnabledAction(settings, getLicenseState()), new RestHasPrivilegesAction(settings, securityContext.get(), getLicenseState(), hasPrivilegesRequestBuilderFactory.get()), new RestGetUserPrivilegesAction(settings, securityContext.get(), getLicenseState()), new RestGetRoleMappingsAction(settings, getLicenseState()), new RestPutRoleMappingAction(settings, getLicenseState()), new RestDeleteRoleMappingAction(settings, getLicenseState()), new RestGetTokenAction(settings, getLicenseState()), new RestInvalidateTokenAction(settings, getLicenseState()), new RestGetCertificateInfoAction(), new RestSamlPrepareAuthenticationAction(settings, getLicenseState()), new RestSamlAuthenticateAction(settings, getLicenseState()), new RestSamlLogoutAction(settings, getLicenseState()), new RestSamlInvalidateSessionAction(settings, getLicenseState()), new RestSamlCompleteLogoutAction(settings, getLicenseState()), new RestSamlSpMetadataAction(settings, getLicenseState()), new RestOpenIdConnectPrepareAuthenticationAction(settings, getLicenseState()), new RestOpenIdConnectAuthenticateAction(settings, getLicenseState()), new RestOpenIdConnectLogoutAction(settings, getLicenseState()), new RestGetBuiltinPrivilegesAction(settings, getLicenseState(), getBuiltinPrivilegesResponseTranslator.get()), new RestGetPrivilegesAction(settings, getLicenseState()), new RestPutPrivilegesAction(settings, getLicenseState()), new RestDeletePrivilegesAction(settings, getLicenseState()), new RestCreateApiKeyAction(settings, getLicenseState(), createApiKeyRequestBuilderFactory.get()), new RestCreateCrossClusterApiKeyAction(settings, getLicenseState()), new RestUpdateApiKeyAction(settings, getLicenseState(), updateApiKeyRequestTranslator.get()), new RestBulkUpdateApiKeyAction(settings, getLicenseState(), bulkUpdateApiKeyRequestTranslator.get()), new RestUpdateCrossClusterApiKeyAction(settings, getLicenseState()), new RestGrantApiKeyAction(settings, getLicenseState(), grantApiKeyRequestTranslator.get()), new RestInvalidateApiKeyAction(settings, getLicenseState()), new RestGetApiKeyAction(settings, getLicenseState()), new RestQueryApiKeyAction(settings, getLicenseState()), new RestDelegatePkiAuthenticationAction(settings, getLicenseState()), new RestCreateServiceAccountTokenAction(settings, getLicenseState()), new RestDeleteServiceAccountTokenAction(settings, getLicenseState()), new RestGetServiceAccountCredentialsAction(settings, getLicenseState()), new RestGetServiceAccountAction(settings, getLicenseState()), new RestKibanaEnrollAction(settings, getLicenseState()), new RestNodeEnrollmentAction(settings, getLicenseState()), new RestProfileHasPrivilegesAction(settings, getLicenseState()), new RestGetProfilesAction(settings, getLicenseState()), new RestActivateProfileAction(settings, getLicenseState()), new RestUpdateProfileDataAction(settings, getLicenseState()), new RestSuggestProfilesAction(settings, getLicenseState()), new RestEnableProfileAction(settings, getLicenseState()), new RestDisableProfileAction(settings, getLicenseState()), new RestGetSecuritySettingsAction(settings, getLicenseState()), new RestUpdateSecuritySettingsAction(settings, getLicenseState()) ).filter(Objects::nonNull).toList(); } @Override public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) { return Map.of(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(securityContext::get, settings)); } @Override public void onNodeStarted() { this.nodeStartedListenable.onResponse(null); } /** * Realm settings were changed in 7.0. This method validates that the settings in use on this node match the new style of setting. * In 6.x a realm config would be * <pre> * xpack.security.authc.realms.file1.type: file * xpack.security.authc.realms.file1.order: 0 * </pre> * In 7.x this realm should be * <pre> * xpack.security.authc.realms.file.file1.order: 0 * </pre> * If confronted with an old style config, the ES Settings validation would simply fail with an error such as * <em>unknown setting [xpack.security.authc.realms.file1.order]</em>. This validation method provides an error that is easier to * understand and take action on. */ static void validateRealmSettings(Settings settings) { final Set<String> badRealmSettings = settings.keySet().stream().filter(k -> k.startsWith(RealmSettings.PREFIX)).filter(key -> { final String suffix = key.substring(RealmSettings.PREFIX.length()); // suffix-part, only contains a single '.' return suffix.indexOf('.') == suffix.lastIndexOf('.'); }).collect(Collectors.toSet()); if (badRealmSettings.isEmpty() == false) { String sampleRealmSetting = RealmSettings.realmSettingPrefix(new RealmConfig.RealmIdentifier("file", "my_file")) + "order"; throw new IllegalArgumentException( "Incorrect realm settings found. " + "Realm settings have been changed to include the type as part of the setting key.\n" + "For example '" + sampleRealmSetting + "'\n" + "Found invalid config: " + Strings.collectionToDelimitedString(badRealmSettings, ", ") + "\n" + "Please see the breaking changes documentation." ); } } static void validateForFips(Settings settings) { final List<String> validationErrors = new ArrayList<>(); Settings keystoreTypeSettings = settings.filter(k -> k.endsWith("keystore.type")) .filter(k -> settings.get(k).equalsIgnoreCase("jks")); if (keystoreTypeSettings.isEmpty() == false) { validationErrors.add( "JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " + "revisit [" + keystoreTypeSettings.toDelimitedString(',') + "] settings" ); } Settings keystorePathSettings = settings.filter(k -> k.endsWith("keystore.path")) .filter(k -> settings.hasValue(k.replace(".path", ".type")) == false) .filter(k -> KeyStoreUtil.inferKeyStoreType(settings.get(k)).equals("jks")); if (keystorePathSettings.isEmpty() == false) { validationErrors.add( "JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " + "revisit [" + keystorePathSettings.toDelimitedString(',') + "] settings" ); } final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings); if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { validationErrors.add( "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. Please set the " + "appropriate value for [ " + XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey() + " ] setting." ); } Stream.of(ApiKeyService.PASSWORD_HASHING_ALGORITHM, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM).forEach((setting) -> { final var storedHashAlgo = setting.get(settings); if (storedHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { // log instead of validation error for backwards compatibility logger.warn( "Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. " + "Please set the appropriate value for [{}] setting.", setting.getKey() ); } }); final var cacheHashAlgoSettings = settings.filter(k -> k.endsWith(".cache.hash_algo")); cacheHashAlgoSettings.keySet().forEach((key) -> { final var setting = cacheHashAlgoSettings.get(key); assert setting != null; final var hashAlgoName = setting.toLowerCase(Locale.ROOT); if (hashAlgoName.equals("ssha256") == false && hashAlgoName.startsWith("pbkdf2") == false) { logger.warn( "[{}] is not recommended for in-memory credential hashing in a FIPS 140 JVM. " + "The recommended hasher for [{}] is SSHA256.", setting, key ); } }); Set<String> foundProviders = new HashSet<>(); for (Provider provider : java.security.Security.getProviders()) { foundProviders.add(provider.getName().toLowerCase(Locale.ROOT)); if (logger.isTraceEnabled()) { logger.trace("Security Provider: " + provider.getName() + ", Version: " + provider.getVersionStr()); provider.entrySet().forEach(entry -> { logger.trace("\t" + entry.getKey()); }); } } final List<String> requiredProviders = XPackSettings.FIPS_REQUIRED_PROVIDERS.get(settings); logger.info("JVM Security Providers: " + foundProviders); if (requiredProviders != null && requiredProviders.isEmpty() == false) { List<String> unsatisfiedProviders = requiredProviders.stream() .map(s -> s.toLowerCase(Locale.ROOT)) .filter(element -> foundProviders.contains(element) == false) .toList(); if (unsatisfiedProviders.isEmpty() == false) { String errorMessage = "Could not find required FIPS security provider: " + unsatisfiedProviders; logger.error(errorMessage); validationErrors.add(errorMessage); } } if (validationErrors.isEmpty() == false) { final StringBuilder sb = new StringBuilder(); sb.append("Validation for FIPS 140 mode failed: \n"); int index = 0; for (String error : validationErrors) { sb.append(++index).append(": ").append(error).append(";\n"); } throw new IllegalArgumentException(sb.toString()); } } @Override public List<TransportInterceptor> getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyList(); } return Collections.singletonList(new TransportInterceptor() { @Override public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler( String action, Executor executor, boolean forceExecution, TransportRequestHandler<T> actualHandler ) { assert securityInterceptor.get() != null; return securityInterceptor.get().interceptHandler(action, executor, forceExecution, actualHandler); } @Override public AsyncSender interceptSender(AsyncSender sender) { assert securityInterceptor.get() != null; return securityInterceptor.get().interceptSender(sender); } }); } @Override public Map<String, Supplier<Transport>> getTransports( Settings settings, ThreadPool threadPool, PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService ) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyMap(); } IPFilter ipFilter = this.ipFilter.get(); return Map.of( // security based on Netty 4 SecurityField.NAME4, () -> { transportReference.set( new SecurityNetty4ServerTransport( settings, TransportVersion.current(), threadPool, networkService, pageCacheRecycler, namedWriteableRegistry, circuitBreakerService, ipFilter, getSslService(), getNettySharedGroupFactory(settings), crossClusterAccessAuthcService.get() ) ); return transportReference.get(); } ); } @Override public Map<String, Supplier<HttpServerTransport>> getHttpTransports( Settings settings, ThreadPool threadPool, BigArrays bigArrays, PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, BiConsumer<HttpPreRequest, ThreadContext> perRequestThreadContext, ClusterSettings clusterSettings, Tracer tracer ) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyMap(); } final IPFilter ipFilter = this.ipFilter.get(); final AcceptChannelHandler.AcceptPredicate acceptPredicate = new AcceptChannelHandler.AcceptPredicate() { @Override public void setBoundAddress(BoundTransportAddress boundHttpTransportAddress) { ipFilter.setBoundHttpTransportAddress(boundHttpTransportAddress); } @Override public boolean test(String profile, InetSocketAddress peerAddress) { return ipFilter.accept(profile, peerAddress); } }; Map<String, Supplier<HttpServerTransport>> httpTransports = new HashMap<>(); httpTransports.put(SecurityField.NAME4, () -> { final boolean ssl = HTTP_SSL_ENABLED.get(settings); final SSLService sslService = getSslService(); final SslConfiguration sslConfiguration; final BiConsumer<Channel, ThreadContext> populateClientCertificate; if (ssl) { sslConfiguration = sslService.getHttpTransportSSLConfiguration(); if (SSLService.isConfigurationValidForServerUsage(sslConfiguration) == false) { throw new IllegalArgumentException( "a key must be provided to run as a server. the key should be configured using the " + "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting" ); } if (SSLService.isSSLClientAuthEnabled(sslConfiguration)) { populateClientCertificate = (channel, threadContext) -> extractClientCertificates(logger, threadContext, channel); } else { populateClientCertificate = (channel, threadContext) -> {}; } } else { sslConfiguration = null; populateClientCertificate = (channel, threadContext) -> {}; } final AuthenticationService authenticationService = this.authcService.get(); final ThreadContext threadContext = this.threadContext.get(); return getHttpServerTransportWithHeadersValidator( settings, networkService, threadPool, xContentRegistry, dispatcher, clusterSettings, getNettySharedGroupFactory(settings), tracer, new TLSConfig(sslConfiguration, sslService::createSSLEngine), acceptPredicate, (httpRequest, channel, listener) -> { HttpPreRequest httpPreRequest = HttpHeadersAuthenticatorUtils.asHttpPreRequest(httpRequest); // step 1: Populate the thread context with credentials and any other HTTP request header values (eg run-as) that the // authentication process looks for while doing its duty. perRequestThreadContext.accept(httpPreRequest, threadContext); populateClientCertificate.accept(channel, threadContext); RemoteHostHeader.process(channel, threadContext); // step 2: Run authentication on the now properly prepared thread-context. // This inspects and modifies the thread context. authenticationService.authenticate(httpPreRequest, listener.delegateFailureAndWrap((l, ignored) -> l.onResponse(null))); }, (httpRequest, channel, listener) -> { // allow unauthenticated OPTIONS request through // this includes CORS preflight, and regular OPTIONS that return permitted methods for a given path // But still populate the thread context with the usual request headers (as for any other request that is dispatched) HttpPreRequest httpPreRequest = HttpHeadersAuthenticatorUtils.asHttpPreRequest(httpRequest); perRequestThreadContext.accept(httpPreRequest, threadContext); populateClientCertificate.accept(channel, threadContext); RemoteHostHeader.process(channel, threadContext); listener.onResponse(null); } ); }); return httpTransports; } // "public" so it can be used in tests public static Netty4HttpServerTransport getHttpServerTransportWithHeadersValidator( Settings settings, NetworkService networkService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, SharedGroupFactory sharedGroupFactory, Tracer tracer, TLSConfig tlsConfig, @Nullable AcceptChannelHandler.AcceptPredicate acceptPredicate, HttpValidator httpValidator, HttpValidator httpOptionsValidator ) { return getHttpServerTransportWithHeadersValidator( settings, networkService, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory, tracer, tlsConfig, acceptPredicate, (httpRequest, channel, listener) -> { if (httpRequest.method() == HttpMethod.OPTIONS) { if (HttpUtil.getContentLength(httpRequest, -1L) > 1 || HttpUtil.isTransferEncodingChunked(httpRequest)) { // OPTIONS requests with a body are not supported listener.onFailure( new ElasticsearchStatusException( "OPTIONS requests with a payload body are not supported", RestStatus.BAD_REQUEST ) ); } else { httpOptionsValidator.validate(httpRequest, channel, listener); } } else { httpValidator.validate(httpRequest, channel, listener); } } ); } // "public" so it can be used in tests public static Netty4HttpServerTransport getHttpServerTransportWithHeadersValidator( Settings settings, NetworkService networkService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, SharedGroupFactory sharedGroupFactory, Tracer tracer, TLSConfig tlsConfig, @Nullable AcceptChannelHandler.AcceptPredicate acceptPredicate, HttpValidator httpValidator ) { return new Netty4HttpServerTransport( settings, networkService, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory, tracer, tlsConfig, acceptPredicate, Objects.requireNonNull(httpValidator) ) { @Override protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadContext threadContext) { ThreadContext.StoredContext authenticationThreadContext = HttpHeadersAuthenticatorUtils.extractAuthenticationContext( restRequest.getHttpRequest() ); if (authenticationThreadContext != null) { authenticationThreadContext.restore(); } else { // this is an unexpected internal error condition where {@code Netty4HttpHeaderValidator} does not work correctly throw new ElasticsearchSecurityException("Request is not authenticated"); } } }; } @Override public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) { return new SecurityRestFilter( enabled, threadContext, secondayAuthc.get(), auditTrailService.get(), operatorPrivilegesService.get() ); } @Override public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) { if (enabled) { final int allocatedProcessors = EsExecutors.allocatedProcessors(settings); return List.of( new FixedExecutorBuilder( settings, TokenService.THREAD_POOL_NAME, 1, 1000, "xpack.security.authc.token.thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ), new FixedExecutorBuilder( settings, SECURITY_CRYPTO_THREAD_POOL_NAME, (allocatedProcessors + 1) / 2, 1000, "xpack.security.crypto.thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ) ); } return Collections.emptyList(); } @Override public UnaryOperator<Map<String, IndexTemplateMetadata>> getIndexTemplateMetadataUpgrader() { return templates -> { // .security index is not managed by using templates anymore templates.remove("security_audit_log"); // .security is a system index now. deleting another legacy template that's not used anymore templates.remove("security-index-template"); return templates; }; } @Override public Function<String, FieldPredicate> getFieldFilter() { if (enabled) { return index -> { XPackLicenseState licenseState = getLicenseState(); IndicesAccessControl indicesAccessControl = threadContext.get() .getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); if (indicesAccessControl == null) { return FieldPredicate.ACCEPT_ALL; } assert indicesAccessControl.isGranted(); IndicesAccessControl.IndexAccessControl indexPermissions = indicesAccessControl.getIndexPermissions(index); if (indexPermissions == null) { return FieldPredicate.ACCEPT_ALL; } FieldPermissions fieldPermissions = indexPermissions.getFieldPermissions(); if (fieldPermissions.hasFieldLevelSecurity() == false) { return FieldPredicate.ACCEPT_ALL; } if (FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState) == false) { // check license last, once we know FLS is actually used return FieldPredicate.ACCEPT_ALL; } return fieldPermissions.fieldPredicate(); }; } return MapperPlugin.super.getFieldFilter(); } @Override public BiConsumer<DiscoveryNode, ClusterState> getJoinValidator() { if (enabled) { return new ValidateLicenseForFIPS(XPackSettings.FIPS_MODE_ENABLED.get(settings), getLicenseService()); } return null; } @Override public void reload(Settings settings) throws Exception { if (enabled) { final List<Exception> reloadExceptions = new ArrayList<>(); try { reloadRemoteClusterCredentials(settings); } catch (Exception ex) { reloadExceptions.add(ex); } this.getReloadableSecurityComponents().forEach(component -> { try { component.reload(settings); } catch (Exception ex) { reloadExceptions.add(ex); } }); if (false == reloadExceptions.isEmpty()) { final var combinedException = new ElasticsearchException( "secure settings reload failed for one or more security components" ); reloadExceptions.forEach(combinedException::addSuppressed); throw combinedException; } } else { ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings); } } /** * This method uses a transport action internally to access classes that are injectable but not part of the plugin contract. * See {@link TransportReloadRemoteClusterCredentialsAction} for more context. */ private void reloadRemoteClusterCredentials(Settings settingsWithKeystore) { // Using `settings` instead of `settingsWithKeystore` is deliberate: we are not interested in secure settings here if (DiscoveryNode.isStateless(settings)) { // Stateless does not support remote cluster operations. Skip. return; } final PlainActionFuture<ActionResponse.Empty> future = new PlainActionFuture<>(); getClient().execute( ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, new TransportReloadRemoteClusterCredentialsAction.Request(settingsWithKeystore), future ); future.actionGet(); } public Map<String, String> getAuthContextForSlowLog() { if (this.securityContext.get() != null && this.securityContext.get().getAuthentication() != null) { Authentication authentication = this.securityContext.get().getAuthentication(); Subject authenticatingSubject = authentication.getAuthenticatingSubject(); Subject effetctiveSubject = authentication.getEffectiveSubject(); Map<String, String> authContext = new HashMap<>(); if (authenticatingSubject.getUser() != null) { authContext.put("user.name", authenticatingSubject.getUser().principal()); authContext.put("user.realm", authenticatingSubject.getRealm().getName()); if (authenticatingSubject.getUser().fullName() != null) { authContext.put("user.full_name", authenticatingSubject.getUser().fullName()); } } // Only include effective user if different from authenticating user (run-as) if (effetctiveSubject.getUser() != null && effetctiveSubject.equals(authenticatingSubject) == false) { authContext.put("user.effective.name", effetctiveSubject.getUser().principal()); authContext.put("user.effective.realm", effetctiveSubject.getRealm().getName()); if (effetctiveSubject.getUser().fullName() != null) { authContext.put("user.effective.full_name", effetctiveSubject.getUser().fullName()); } } authContext.put("auth.type", authentication.getAuthenticationType().name()); if (authentication.isApiKey()) { authContext.put("apikey.id", authenticatingSubject.getMetadata().get(AuthenticationField.API_KEY_ID_KEY).toString()); authContext.put("apikey.name", authenticatingSubject.getMetadata().get(AuthenticationField.API_KEY_NAME_KEY).toString()); } return authContext; } return Map.of(); } static final class ValidateLicenseForFIPS implements BiConsumer<DiscoveryNode, ClusterState> { private final boolean inFipsMode; private final LicenseService licenseService; ValidateLicenseForFIPS(boolean inFipsMode, LicenseService licenseService) { this.inFipsMode = inFipsMode; this.licenseService = licenseService; } @Override public void accept(DiscoveryNode node, ClusterState state) { if (inFipsMode) { License license; if (licenseService instanceof ClusterStateLicenseService clusterStateLicenseService) { license = clusterStateLicenseService.getLicense(state.metadata()); } else { license = licenseService.getLicense(); } if (license != null && XPackLicenseState.isFipsAllowedForOperationMode(license.operationMode()) == false) { throw new IllegalStateException( "FIPS mode cannot be used with a [" + license.operationMode() + "] license. It is only allowed with a Platinum or Trial license." ); } } } } @Override public void loadExtensions(ExtensionLoader loader) { securityExtensions.addAll(loader.loadExtensions(SecurityExtension.class)); loadSingletonExtensionAndSetOnce(loader, operatorOnlyRegistry, OperatorOnlyRegistry.class); loadSingletonExtensionAndSetOnce(loader, putRoleRequestBuilderFactory, PutRoleRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, getBuiltinPrivilegesResponseTranslator, GetBuiltinPrivilegesResponseTranslator.class); loadSingletonExtensionAndSetOnce(loader, updateApiKeyRequestTranslator, UpdateApiKeyRequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, bulkUpdateApiKeyRequestTranslator, BulkUpdateApiKeyRequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, createApiKeyRequestBuilderFactory, CreateApiKeyRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); loadSingletonExtensionAndSetOnce(loader, reservedRoleNameCheckerFactory, ReservedRoleNameChecker.Factory.class); loadSingletonExtensionAndSetOnce(loader, grantApiKeyRequestTranslator, RestGrantApiKeyAction.RequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, fileRoleValidator, FileRoleValidator.class); loadSingletonExtensionAndSetOnce(loader, secondaryAuthActions, SecondaryAuthActions.class); } private <T> void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce<T> setOnce, Class<T> clazz) { final List<T> loaded = loader.loadExtensions(clazz); if (loaded.size() > 1) { throw new IllegalStateException(clazz + " may not have multiple implementations"); } else if (loaded.size() == 1) { final T singleLoaded = loaded.get(0); setOnce.set(singleLoaded); logger.debug("Loaded implementation [{}] for interface [{}]", singleLoaded.getClass().getCanonicalName(), clazz); } else { logger.debug("Will fall back on default implementation for interface [{}]", clazz); } } private synchronized SharedGroupFactory getNettySharedGroupFactory(Settings settings) { if (sharedGroupFactory.get() != null) { assert sharedGroupFactory.get().getSettings().equals(settings) : "Different settings than originally provided"; return sharedGroupFactory.get(); } else { sharedGroupFactory.set(new SharedGroupFactory(settings)); return sharedGroupFactory.get(); } } @Override public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) { return systemIndices.getSystemIndexDescriptors(); } @Override public String getFeatureName() { return "security"; } @Override public String getFeatureDescription() { return "Manages configuration for Security features, such as users and roles"; } @Override public CheckedBiConsumer<ShardSearchRequest, StreamOutput, IOException> getRequestCacheKeyDifferentiator() { if (enabled == false) { return null; } return new DlsFlsRequestCacheDifferentiator(getLicenseState(), securityContext, scriptServiceReference); } List<ReservedClusterStateHandler<?>> reservedClusterStateHandlers() { // If security is disabled we never call the plugin createComponents if (enabled == false) { return Collections.emptyList(); } return List.of(reservedRoleMappingAction.get()); } // visible for testing OperatorPrivileges.OperatorPrivilegesService getOperatorPrivilegesService() { return operatorPrivilegesService.get(); } }
mhl-b/elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
971
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import java.util.Objects; /** Similar to Point - implement locally to avoid depending on GWT. */ public class Dimension { public final int width; public final int height; public Dimension(int width, int height) { this.width = width; this.height = height; } public int getWidth() { return width; } public int getHeight() { return height; } @Override public boolean equals(Object o) { if (!(o instanceof Dimension)) { return false; } Dimension other = (Dimension) o; return other.width == width && other.height == height; } @Override public int hashCode() { return Objects.hash(width, height); } @Override public String toString() { return String.format("(%d, %d)", width, height); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/Dimension.java
972
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import java.util.Objects; public class Rectangle { public final int x; public final int y; public final int height; public final int width; public Rectangle(int x, int y, int height, int width) { this.x = x; this.y = y; this.height = height; this.width = width; } public Rectangle(Point p, Dimension d) { x = p.x; y = p.y; height = d.height; width = d.width; } public int getX() { return x; } public int getY() { return y; } public int getHeight() { return height; } public int getWidth() { return width; } public Point getPoint() { return new Point(x, y); } public Dimension getDimension() { return new Dimension(width, height); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Rectangle rectangle = (Rectangle) o; return x == rectangle.x && y == rectangle.y && height == rectangle.height && width == rectangle.width; } @Override public int hashCode() { return Objects.hash(x, y, height, width); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/Rectangle.java
974
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.json; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; class Types { private Types() { // Utility class } static Class<?> narrow(Type type) { if (type instanceof Class) { return (Class<?>) type; } if (type instanceof ParameterizedType) { return narrow(((ParameterizedType) type).getRawType()); } throw new JsonException("Unable to narrow " + type.getClass()); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/json/Types.java
975
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; /** Represents the type of new browser window that may be created. */ public enum WindowType { WINDOW("window"), TAB("tab"), ; private final String text; WindowType(String text) { this.text = text; } @Override public String toString() { return String.valueOf(text); } public static WindowType fromString(String text) { if (text != null) { for (WindowType b : WindowType.values()) { if (text.equalsIgnoreCase(b.text)) { return b; } } } return null; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/WindowType.java
976
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.bidi; import java.util.Map; import java.util.function.Function; import org.openqa.selenium.internal.Require; public class Event<X> { private final String method; private final Function<Map<String, Object>, X> mapper; public Event(String method, Function<Map<String, Object>, X> mapper) { this.method = Require.nonNull("Event method", method); this.mapper = Require.nonNull("Result mapper", mapper); } public String getMethod() { return method; } public Function<Map<String, Object>, X> getMapper() { return mapper; } @Override public String toString() { return method; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/bidi/Event.java
977
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import org.openqa.selenium.print.PrintOptions; public interface PrintsPage { Pdf print(PrintOptions printOptions) throws WebDriverException; }
SeleniumHQ/selenium
java/src/org/openqa/selenium/PrintsPage.java
978
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; /** * Marker interface for credentials used for authenticating a browser to a site, typically via * {@link UsernameAndPassword} and Basic or Digest authentication. */ public interface Credentials {}
SeleniumHQ/selenium
java/src/org/openqa/selenium/Credentials.java
979
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.docker; import org.openqa.selenium.internal.Require; public class Port { private final String protocol; private final int port; private Port(String protocol, int port) { this.protocol = Require.nonNull("Protocol", protocol); this.port = port; } public static Port tcp(int port) { return new Port("tcp", port); } public String getProtocol() { return protocol; } public int getPort() { return port; } public String toString() { return port + "/" + protocol; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/docker/Port.java
980
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; /** * This interface indicates that the implementing class knows about the driver that contains it and * can export it. */ @FunctionalInterface public interface WrapsDriver { /** * @return The driver that contains this element. */ WebDriver getWrappedDriver(); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/WrapsDriver.java
981
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.logging; import java.util.Set; import org.openqa.selenium.Beta; /** Interface for providing logs. */ @Beta public interface Logs { /** * Fetches available log entries for the given log type. * * <p>Note that log buffers are reset after each call, meaning that available log entries * correspond to those entries not yet returned for a given log type. In practice, this means that * this call will return the available log entries since the last call, or from the start of the * session. * * <p>For more info on enabling logging, look at {@link LoggingPreferences}. * * @param logType The log type. * @return Available log entries for the specified log type. */ LogEntries get(String logType); /** * Queries for available log types. * * @return A set of available log types. */ Set<String> getAvailableLogTypes(); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/logging/Logs.java
982
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.docker; import java.util.Set; import org.openqa.selenium.docker.internal.ImageSummary; import org.openqa.selenium.internal.Require; import org.openqa.selenium.json.Json; public class Image { private final ImageSummary summary; public Image(ImageSummary summary) { this.summary = Require.nonNull("Container image summary", summary); } public String getName() { return summary.getRepoTags().stream() .findFirst() .orElseThrow(() -> new DockerException("Unable to find name")); } public ImageId getId() { return summary.getId(); } public Set<String> getTags() { return summary.getRepoTags(); } @Override public String toString() { new Json().toJson(summary); return "Image{" + "summary=" + summary + '}'; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/docker/Image.java
983
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import java.util.Set; /** * Some implementations of WebDriver, notably those that support native testing, need the ability to * switch between the native and web-based contexts. This can be achieved by using this interface. */ @Deprecated public interface ContextAware { /** * Switch the focus of future commands for this driver to the context with the given name. * * @param name The name of the context as returned by {@link #getContextHandles()}. * @return This driver focused on the given window. * @throws NoSuchContextException If the context cannot be found. */ WebDriver context(String name); /** * Return a set of context handles which can be used to iterate over all contexts of this * WebDriver instance. * * @return A set of context handles which can be used to iterate over available contexts. */ Set<String> getContextHandles(); /** * Return an opaque handle to this context that uniquely identifies it within this driver * instance. This can be used to switch to this context at a later date. * * @return The current context handle. */ String getContext(); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/ContextAware.java
984
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.json; /** Used to specify the pending JSON element type. */ public enum JsonType { /** Boolean value */ BOOLEAN, /** property name */ NAME, /** {@code null} value */ NULL, /** numeric value */ NUMBER, /** start of object */ START_MAP, /** end of object */ END_MAP, /** start of array */ START_COLLECTION, /** end of array */ END_COLLECTION, /** string value */ STRING, /** end of input */ END }
SeleniumHQ/selenium
java/src/org/openqa/selenium/json/JsonType.java
985
/* * Copyright 2002-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.stereotype; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.core.annotation.AliasFor; /** * Indicates that an annotated class is a "Service", originally defined by Domain-Driven * Design (Evans, 2003) as "an operation offered as an interface that stands alone in the * model, with no encapsulated state." * * <p>May also indicate that a class is a "Business Service Facade" (in the Core J2EE * patterns sense), or something similar. This annotation is a general-purpose stereotype * and individual teams may narrow their semantics and use as appropriate. * * <p>This annotation serves as a specialization of {@link Component @Component}, * allowing for implementation classes to be autodetected through classpath scanning. * * @author Juergen Hoeller * @since 2.5 * @see Component * @see Repository */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Component public @interface Service { /** * Alias for {@link Component#value}. */ @AliasFor(annotation = Component.class) String value() default ""; }
spring-projects/spring-framework
spring-context/src/main/java/org/springframework/stereotype/Service.java
986
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.print; import java.util.HashMap; import java.util.Map; public class PageSize { private final double height; private final double width; public PageSize() { // Initialize with defaults. A4 paper size defaults in cms. this.height = 27.94; this.width = 21.59; } public PageSize(double height, double width) { this.height = height; this.width = width; } public double getHeight() { return height; } public double getWidth() { return width; } public Map<String, Object> toMap() { final Map<String, Object> options = new HashMap<>(7); options.put("height", getHeight()); options.put("width", getWidth()); return options; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/print/PageSize.java
987
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.cli; import java.io.PrintStream; import java.util.Set; import org.openqa.selenium.grid.config.Role; public interface CliCommand { String getName(); String getDescription(); /** * Allows a command to indicate that certain aspects are configurable. Any roles that are exposed * here will be matched against flag objects which implement {@link * org.openqa.selenium.grid.config.HasRoles} to allow configuration via command line flags. */ Set<Role> getConfigurableRoles(); /** * Allows the set of objects used for finding command-line flags to be augmented with default * implementations. */ Set<Object> getFlagObjects(); Executable configure(PrintStream out, PrintStream err, String... args); default boolean isShown() { return true; } interface Executable { void run(); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/cli/CliCommand.java
988
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.edge; import static org.openqa.selenium.remote.Browser.EDGE; import com.google.auto.service.AutoService; import java.util.Map; import java.util.function.Predicate; import org.openqa.selenium.Capabilities; import org.openqa.selenium.remote.AdditionalHttpCommands; import org.openqa.selenium.remote.AugmenterProvider; import org.openqa.selenium.remote.CommandInfo; import org.openqa.selenium.remote.http.HttpMethod; @SuppressWarnings({"rawtypes", "RedundantSuppression"}) @AutoService({AdditionalHttpCommands.class, AugmenterProvider.class}) public class AddHasCdp extends org.openqa.selenium.chromium.AddHasCdp { private static final Map<String, CommandInfo> COMMANDS = Map.of(EXECUTE_CDP, new CommandInfo("session/:sessionId/ms/cdp/execute", HttpMethod.POST)); @Override public Map<String, CommandInfo> getAdditionalCommands() { return COMMANDS; } @Override public Predicate<Capabilities> isApplicable() { return EDGE::is; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/edge/AddHasCdp.java
989
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.html5; /** Represents the physical location of the browser. */ @Deprecated public class Location { private final double latitude; private final double longitude; private final double altitude; public Location(double latitude, double longitude, double altitude) { this.latitude = latitude; this.longitude = longitude; this.altitude = altitude; } public double getLatitude() { return latitude; } public double getLongitude() { return longitude; } public double getAltitude() { return altitude; } @Override public String toString() { return String.format( "Latitude: %s, Longitude: %s, Altitude: %s", latitude, longitude, altitude); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/html5/Location.java
990
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.json; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; public abstract class TypeToken<T> { private final Type type; public TypeToken() { // This code is taken from Guava's TypeToken class. Type superclass = getClass().getGenericSuperclass(); if (!(superclass instanceof ParameterizedType)) { throw new IllegalStateException(String.format("%s isn't parameterized", superclass)); } type = ((ParameterizedType) superclass).getActualTypeArguments()[0]; } public Type getType() { return type; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/json/TypeToken.java
991
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.internal; import java.lang.management.ManagementFactory; import java.util.logging.Level; /** Used to provide information about whether Selenium is running under debug mode. */ public class Debug { private static final boolean IS_DEBUG; static { boolean debugFlag = ManagementFactory.getRuntimeMXBean().getInputArguments().stream() .anyMatch(str -> str.contains("-agentlib:jdwp")); boolean simpleProperty = Boolean.getBoolean("selenium.debug"); boolean longerProperty = Boolean.getBoolean("selenium.webdriver.verbose"); IS_DEBUG = debugFlag || simpleProperty || longerProperty; } private Debug() { // Utility class } public static boolean isDebugging() { return IS_DEBUG; } public static Level getDebugLogLevel() { return isDebugging() ? Level.INFO : Level.FINE; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/internal/Debug.java
992
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.devtools; public class Reply {}
SeleniumHQ/selenium
java/src/org/openqa/selenium/devtools/Reply.java
993
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.docker; import java.util.Objects; import org.openqa.selenium.internal.Require; public class ImageId { private final String id; public ImageId(String id) { this.id = Require.nonNull("Image id", id); } @Override public String toString() { return id; } @Override public boolean equals(Object o) { if (!(o instanceof ImageId)) { return false; } ImageId that = (ImageId) o; return Objects.equals(this.id, that.id); } @Override public int hashCode() { return Objects.hash(id); } private String toJson() { return id; } private static ImageId fromJson(String raw) { return new ImageId(raw); } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/docker/ImageId.java
994
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import java.io.IOException; import java.nio.file.Path; import java.util.List; /** Indicates that a driver supports downloading remote files. */ public interface HasDownloads { /** * Requires downloads to be enabled. * * <p>TODO: Create an example in the documentation and provide a link to it. * * @param capabilities the capabilities object * @throws WebDriverException if capability to enable downloads is not set */ default void requireDownloadsEnabled(Capabilities capabilities) { boolean downloadsEnabled = capabilities.is("se:downloadsEnabled"); if (!downloadsEnabled) { throw new WebDriverException( "You must enable downloads in order to work with downloadable files."); } } /** * Gets the downloadable files. * * @return a list of downloadable files for each key */ List<String> getDownloadableFiles(); /** * Downloads a file to a given location. * * @param fileName the name of the file to be downloaded * @param targetLocation the location where the file will be downloaded to * @throws IOException if an I/O error occurs while downloading the file */ void downloadFile(String fileName, Path targetLocation) throws IOException; /** Deletes the downloadable files. */ void deleteDownloadableFiles(); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/HasDownloads.java
995
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.chromium; import java.util.Map; import org.openqa.selenium.Beta; /** Used by classes to indicate that they can execute Command DevTools commands. */ @Beta public interface HasCdp { /** * Execute a Chrome DevTools Protocol command and get returned result. The command and command * args should follow <a href="https://chromedevtools.github.io/devtools-protocol/">chrome * devtools protocol domains/commands</a>. * * <p>It is strongly encouraged to use {@link org.openqa.selenium.devtools.DevTools} API instead * of this * * @param commandName the command to execute with Chrome Dev Tools. * @param parameters any information needed to execute the Dev Tools command. * @return the name and value of the response. */ Map<String, Object> executeCdpCommand(String commandName, Map<String, Object> parameters); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/chromium/HasCdp.java
996
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.events; import java.io.Closeable; import org.openqa.selenium.status.HasReadyState; public interface EventBus extends Closeable, HasReadyState { void addListener(EventListener<?> listener); void fire(Event event); void close(); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/events/EventBus.java
997
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid; public class ActualMain {}
SeleniumHQ/selenium
java/src/org/openqa/selenium/grid/ActualMain.java
998
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid.node; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_OK; import java.io.UncheckedIOException; import java.util.Objects; import org.openqa.selenium.json.Json; import org.openqa.selenium.remote.http.HttpHandler; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; public class Drain implements HttpHandler { private final Node node; private final Json json; public Drain(Node node, Json json) { this.node = Objects.requireNonNull(node); this.json = Objects.requireNonNull(json); } @Override public HttpResponse execute(HttpRequest req) throws UncheckedIOException { this.node.drain(); HttpResponse response = new HttpResponse(); if (this.node.isDraining()) { response.setStatus(HTTP_OK); } else { response.setStatus(HTTP_INTERNAL_ERROR); } return response; } }
SeleniumHQ/selenium
java/src/org/openqa/selenium/grid/node/Drain.java
999
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.support.ui; import java.util.function.Function; /** * A generic interface for waiting until a condition is true or not null. The condition may take a * single argument of type . * * @param <F> the argument to pass to any function called */ public interface Wait<F> { /** * Implementations should wait until the condition evaluates to a value that is neither null nor * false. Because of this contract, the return type must not be Void. * * <p>If the condition does not become true within a certain time (as defined by the implementing * class), this method will throw a non-specified {@link Throwable}. This is so that an * implementor may throw whatever is idiomatic for a given test infrastructure (e.g. JUnit4 would * throw {@link AssertionError}). * * @param <T> the return type of the method, which must not be Void * @param isTrue the parameter to pass to the {@link ExpectedCondition} * @return truthy value from the isTrue condition */ <T> T until(Function<? super F, T> isTrue); }
SeleniumHQ/selenium
java/src/org/openqa/selenium/support/ui/Wait.java
1,000
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.math; import java.io.Serializable; /** Encapsulates a <a href="http://en.wikipedia.org/wiki/Row-major_order#Column-major_order">column major</a> 4 by 4 matrix. Like * the {@link Vector3} class it allows the chaining of methods by returning a reference to itself. For example: * * <pre> * Matrix4 mat = new Matrix4().trn(position).mul(camera.combined); * </pre> * * @author [email protected] */ public class Matrix4 implements Serializable { private static final long serialVersionUID = -2717655254359579617L; /** XX: Typically the unrotated X component for scaling, also the cosine of the angle when rotated on the Y and/or Z axis. On * Vector3 multiplication this value is multiplied with the source X component and added to the target X component. */ public static final int M00 = 0; /** XY: Typically the negative sine of the angle when rotated on the Z axis. On Vector3 multiplication this value is multiplied * with the source Y component and added to the target X component. */ public static final int M01 = 4; /** XZ: Typically the sine of the angle when rotated on the Y axis. On Vector3 multiplication this value is multiplied with the * source Z component and added to the target X component. */ public static final int M02 = 8; /** XW: Typically the translation of the X component. On Vector3 multiplication this value is added to the target X * component. */ public static final int M03 = 12; /** YX: Typically the sine of the angle when rotated on the Z axis. On Vector3 multiplication this value is multiplied with the * source X component and added to the target Y component. */ public static final int M10 = 1; /** YY: Typically the unrotated Y component for scaling, also the cosine of the angle when rotated on the X and/or Z axis. On * Vector3 multiplication this value is multiplied with the source Y component and added to the target Y component. */ public static final int M11 = 5; /** YZ: Typically the negative sine of the angle when rotated on the X axis. On Vector3 multiplication this value is multiplied * with the source Z component and added to the target Y component. */ public static final int M12 = 9; /** YW: Typically the translation of the Y component. On Vector3 multiplication this value is added to the target Y * component. */ public static final int M13 = 13; /** ZX: Typically the negative sine of the angle when rotated on the Y axis. On Vector3 multiplication this value is multiplied * with the source X component and added to the target Z component. */ public static final int M20 = 2; /** ZY: Typical the sine of the angle when rotated on the X axis. On Vector3 multiplication this value is multiplied with the * source Y component and added to the target Z component. */ public static final int M21 = 6; /** ZZ: Typically the unrotated Z component for scaling, also the cosine of the angle when rotated on the X and/or Y axis. On * Vector3 multiplication this value is multiplied with the source Z component and added to the target Z component. */ public static final int M22 = 10; /** ZW: Typically the translation of the Z component. On Vector3 multiplication this value is added to the target Z * component. */ public static final int M23 = 14; /** WX: Typically the value zero. On Vector3 multiplication this value is ignored. */ public static final int M30 = 3; /** WY: Typically the value zero. On Vector3 multiplication this value is ignored. */ public static final int M31 = 7; /** WZ: Typically the value zero. On Vector3 multiplication this value is ignored. */ public static final int M32 = 11; /** WW: Typically the value one. On Vector3 multiplication this value is ignored. */ public static final int M33 = 15; static final Quaternion quat = new Quaternion(); static final Quaternion quat2 = new Quaternion(); static final Vector3 l_vez = new Vector3(); static final Vector3 l_vex = new Vector3(); static final Vector3 l_vey = new Vector3(); static final Vector3 tmpVec = new Vector3(); static final Matrix4 tmpMat = new Matrix4(); static final Vector3 right = new Vector3(); static final Vector3 tmpForward = new Vector3(); static final Vector3 tmpUp = new Vector3(); public final float val[] = new float[16]; /** Constructs an identity matrix */ public Matrix4 () { val[M00] = 1f; val[M11] = 1f; val[M22] = 1f; val[M33] = 1f; } /** Constructs a matrix from the given matrix. * @param matrix The matrix to copy. (This matrix is not modified) */ public Matrix4 (Matrix4 matrix) { set(matrix); } /** Constructs a matrix from the given float array. The array must have at least 16 elements; the first 16 will be copied. * @param values The float array to copy. Remember that this matrix is in * <a href="http://en.wikipedia.org/wiki/Row-major_order">column major</a> order. (The float array is not * modified) */ public Matrix4 (float[] values) { set(values); } /** Constructs a rotation matrix from the given {@link Quaternion}. * @param quaternion The quaternion to be copied. (The quaternion is not modified) */ public Matrix4 (Quaternion quaternion) { set(quaternion); } /** Construct a matrix from the given translation, rotation and scale. * @param position The translation * @param rotation The rotation, must be normalized * @param scale The scale */ public Matrix4 (Vector3 position, Quaternion rotation, Vector3 scale) { set(position, rotation, scale); } /** Sets the matrix to the given matrix. * @param matrix The matrix that is to be copied. (The given matrix is not modified) * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (Matrix4 matrix) { return set(matrix.val); } /** Sets the matrix to the given matrix as a float array. The float array must have at least 16 elements; the first 16 will be * copied. * * @param values The matrix, in float form, that is to be copied. Remember that this matrix is in * <a href="http://en.wikipedia.org/wiki/Row-major_order">column major</a> order. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (float[] values) { System.arraycopy(values, 0, val, 0, val.length); return this; } /** Sets the matrix to a rotation matrix representing the quaternion. * @param quaternion The quaternion that is to be used to set this matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (Quaternion quaternion) { return set(quaternion.x, quaternion.y, quaternion.z, quaternion.w); } /** Sets the matrix to a rotation matrix representing the quaternion. * * @param quaternionX The X component of the quaternion that is to be used to set this matrix. * @param quaternionY The Y component of the quaternion that is to be used to set this matrix. * @param quaternionZ The Z component of the quaternion that is to be used to set this matrix. * @param quaternionW The W component of the quaternion that is to be used to set this matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (float quaternionX, float quaternionY, float quaternionZ, float quaternionW) { return set(0f, 0f, 0f, quaternionX, quaternionY, quaternionZ, quaternionW); } /** Set this matrix to the specified translation and rotation. * @param position The translation * @param orientation The rotation, must be normalized * @return This matrix for chaining */ public Matrix4 set (Vector3 position, Quaternion orientation) { return set(position.x, position.y, position.z, orientation.x, orientation.y, orientation.z, orientation.w); } /** Sets the matrix to a rotation matrix representing the translation and quaternion. * @param translationX The X component of the translation that is to be used to set this matrix. * @param translationY The Y component of the translation that is to be used to set this matrix. * @param translationZ The Z component of the translation that is to be used to set this matrix. * @param quaternionX The X component of the quaternion that is to be used to set this matrix. * @param quaternionY The Y component of the quaternion that is to be used to set this matrix. * @param quaternionZ The Z component of the quaternion that is to be used to set this matrix. * @param quaternionW The W component of the quaternion that is to be used to set this matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (float translationX, float translationY, float translationZ, float quaternionX, float quaternionY, float quaternionZ, float quaternionW) { final float xs = quaternionX * 2f, ys = quaternionY * 2f, zs = quaternionZ * 2f; final float wx = quaternionW * xs, wy = quaternionW * ys, wz = quaternionW * zs; final float xx = quaternionX * xs, xy = quaternionX * ys, xz = quaternionX * zs; final float yy = quaternionY * ys, yz = quaternionY * zs, zz = quaternionZ * zs; val[M00] = 1f - (yy + zz); val[M01] = xy - wz; val[M02] = xz + wy; val[M03] = translationX; val[M10] = xy + wz; val[M11] = 1f - (xx + zz); val[M12] = yz - wx; val[M13] = translationY; val[M20] = xz - wy; val[M21] = yz + wx; val[M22] = 1f - (xx + yy); val[M23] = translationZ; val[M30] = 0f; val[M31] = 0f; val[M32] = 0f; val[M33] = 1f; return this; } /** Set this matrix to the specified translation, rotation and scale. * @param position The translation * @param orientation The rotation, must be normalized * @param scale The scale * @return This matrix for chaining */ public Matrix4 set (Vector3 position, Quaternion orientation, Vector3 scale) { return set(position.x, position.y, position.z, orientation.x, orientation.y, orientation.z, orientation.w, scale.x, scale.y, scale.z); } /** Sets the matrix to a rotation matrix representing the translation and quaternion. * @param translationX The X component of the translation that is to be used to set this matrix. * @param translationY The Y component of the translation that is to be used to set this matrix. * @param translationZ The Z component of the translation that is to be used to set this matrix. * @param quaternionX The X component of the quaternion that is to be used to set this matrix. * @param quaternionY The Y component of the quaternion that is to be used to set this matrix. * @param quaternionZ The Z component of the quaternion that is to be used to set this matrix. * @param quaternionW The W component of the quaternion that is to be used to set this matrix. * @param scaleX The X component of the scaling that is to be used to set this matrix. * @param scaleY The Y component of the scaling that is to be used to set this matrix. * @param scaleZ The Z component of the scaling that is to be used to set this matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 set (float translationX, float translationY, float translationZ, float quaternionX, float quaternionY, float quaternionZ, float quaternionW, float scaleX, float scaleY, float scaleZ) { final float xs = quaternionX * 2f, ys = quaternionY * 2f, zs = quaternionZ * 2f; final float wx = quaternionW * xs, wy = quaternionW * ys, wz = quaternionW * zs; final float xx = quaternionX * xs, xy = quaternionX * ys, xz = quaternionX * zs; final float yy = quaternionY * ys, yz = quaternionY * zs, zz = quaternionZ * zs; val[M00] = scaleX * (1.0f - (yy + zz)); val[M01] = scaleY * (xy - wz); val[M02] = scaleZ * (xz + wy); val[M03] = translationX; val[M10] = scaleX * (xy + wz); val[M11] = scaleY * (1.0f - (xx + zz)); val[M12] = scaleZ * (yz - wx); val[M13] = translationY; val[M20] = scaleX * (xz - wy); val[M21] = scaleY * (yz + wx); val[M22] = scaleZ * (1.0f - (xx + yy)); val[M23] = translationZ; val[M30] = 0f; val[M31] = 0f; val[M32] = 0f; val[M33] = 1f; return this; } /** Sets the four columns of the matrix which correspond to the x-, y- and z-axis of the vector space this matrix creates as * well as the 4th column representing the translation of any point that is multiplied by this matrix. * @param xAxis The x-axis. * @param yAxis The y-axis. * @param zAxis The z-axis. * @param pos The translation vector. */ public Matrix4 set (Vector3 xAxis, Vector3 yAxis, Vector3 zAxis, Vector3 pos) { val[M00] = xAxis.x; val[M01] = xAxis.y; val[M02] = xAxis.z; val[M10] = yAxis.x; val[M11] = yAxis.y; val[M12] = yAxis.z; val[M20] = zAxis.x; val[M21] = zAxis.y; val[M22] = zAxis.z; val[M03] = pos.x; val[M13] = pos.y; val[M23] = pos.z; val[M30] = 0f; val[M31] = 0f; val[M32] = 0f; val[M33] = 1f; return this; } /** @return a copy of this matrix */ public Matrix4 cpy () { return new Matrix4(this); } /** Adds a translational component to the matrix in the 4th column. The other columns are untouched. * @param vector The translation vector to add to the current matrix. (This vector is not modified) * @return This matrix for the purpose of chaining methods together. */ public Matrix4 trn (Vector3 vector) { val[M03] += vector.x; val[M13] += vector.y; val[M23] += vector.z; return this; } /** Adds a translational component to the matrix in the 4th column. The other columns are untouched. * @param x The x-component of the translation vector. * @param y The y-component of the translation vector. * @param z The z-component of the translation vector. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 trn (float x, float y, float z) { val[M03] += x; val[M13] += y; val[M23] += z; return this; } /** @return the backing float array */ public float[] getValues () { return val; } /** Postmultiplies this matrix with the given matrix, storing the result in this matrix. For example: * * <pre> * A.mul(B) results in A := AB. * </pre> * * @param matrix The other matrix to multiply by. * @return This matrix for the purpose of chaining operations together. */ public Matrix4 mul (Matrix4 matrix) { mul(val, matrix.val); return this; } /** Premultiplies this matrix with the given matrix, storing the result in this matrix. For example: * * <pre> * A.mulLeft(B) results in A := BA. * </pre> * * @param matrix The other matrix to multiply by. * @return This matrix for the purpose of chaining operations together. */ public Matrix4 mulLeft (Matrix4 matrix) { tmpMat.set(matrix); mul(tmpMat.val, val); return set(tmpMat); } /** Transposes the matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 tra () { float m01 = val[M01]; float m02 = val[M02]; float m03 = val[M03]; float m12 = val[M12]; float m13 = val[M13]; float m23 = val[M23]; val[M01] = val[M10]; val[M02] = val[M20]; val[M03] = val[M30]; val[M10] = m01; val[M12] = val[M21]; val[M13] = val[M31]; val[M20] = m02; val[M21] = m12; val[M23] = val[M32]; val[M30] = m03; val[M31] = m13; val[M32] = m23; return this; } /** Sets the matrix to an identity matrix. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 idt () { val[M00] = 1f; val[M01] = 0f; val[M02] = 0f; val[M03] = 0f; val[M10] = 0f; val[M11] = 1f; val[M12] = 0f; val[M13] = 0f; val[M20] = 0f; val[M21] = 0f; val[M22] = 1f; val[M23] = 0f; val[M30] = 0f; val[M31] = 0f; val[M32] = 0f; val[M33] = 1f; return this; } /** Inverts the matrix. Stores the result in this matrix. * @return This matrix for the purpose of chaining methods together. * @throws RuntimeException if the matrix is singular (not invertible) */ public Matrix4 inv () { float l_det = val[M30] * val[M21] * val[M12] * val[M03] - val[M20] * val[M31] * val[M12] * val[M03] - val[M30] * val[M11] * val[M22] * val[M03] + val[M10] * val[M31] * val[M22] * val[M03] + val[M20] * val[M11] * val[M32] * val[M03] - val[M10] * val[M21] * val[M32] * val[M03] - val[M30] * val[M21] * val[M02] * val[M13] + val[M20] * val[M31] * val[M02] * val[M13] + val[M30] * val[M01] * val[M22] * val[M13] - val[M00] * val[M31] * val[M22] * val[M13] - val[M20] * val[M01] * val[M32] * val[M13] + val[M00] * val[M21] * val[M32] * val[M13] + val[M30] * val[M11] * val[M02] * val[M23] - val[M10] * val[M31] * val[M02] * val[M23] - val[M30] * val[M01] * val[M12] * val[M23] + val[M00] * val[M31] * val[M12] * val[M23] + val[M10] * val[M01] * val[M32] * val[M23] - val[M00] * val[M11] * val[M32] * val[M23] - val[M20] * val[M11] * val[M02] * val[M33] + val[M10] * val[M21] * val[M02] * val[M33] + val[M20] * val[M01] * val[M12] * val[M33] - val[M00] * val[M21] * val[M12] * val[M33] - val[M10] * val[M01] * val[M22] * val[M33] + val[M00] * val[M11] * val[M22] * val[M33]; if (l_det == 0f) throw new RuntimeException("non-invertible matrix"); float m00 = val[M12] * val[M23] * val[M31] - val[M13] * val[M22] * val[M31] + val[M13] * val[M21] * val[M32] - val[M11] * val[M23] * val[M32] - val[M12] * val[M21] * val[M33] + val[M11] * val[M22] * val[M33]; float m01 = val[M03] * val[M22] * val[M31] - val[M02] * val[M23] * val[M31] - val[M03] * val[M21] * val[M32] + val[M01] * val[M23] * val[M32] + val[M02] * val[M21] * val[M33] - val[M01] * val[M22] * val[M33]; float m02 = val[M02] * val[M13] * val[M31] - val[M03] * val[M12] * val[M31] + val[M03] * val[M11] * val[M32] - val[M01] * val[M13] * val[M32] - val[M02] * val[M11] * val[M33] + val[M01] * val[M12] * val[M33]; float m03 = val[M03] * val[M12] * val[M21] - val[M02] * val[M13] * val[M21] - val[M03] * val[M11] * val[M22] + val[M01] * val[M13] * val[M22] + val[M02] * val[M11] * val[M23] - val[M01] * val[M12] * val[M23]; float m10 = val[M13] * val[M22] * val[M30] - val[M12] * val[M23] * val[M30] - val[M13] * val[M20] * val[M32] + val[M10] * val[M23] * val[M32] + val[M12] * val[M20] * val[M33] - val[M10] * val[M22] * val[M33]; float m11 = val[M02] * val[M23] * val[M30] - val[M03] * val[M22] * val[M30] + val[M03] * val[M20] * val[M32] - val[M00] * val[M23] * val[M32] - val[M02] * val[M20] * val[M33] + val[M00] * val[M22] * val[M33]; float m12 = val[M03] * val[M12] * val[M30] - val[M02] * val[M13] * val[M30] - val[M03] * val[M10] * val[M32] + val[M00] * val[M13] * val[M32] + val[M02] * val[M10] * val[M33] - val[M00] * val[M12] * val[M33]; float m13 = val[M02] * val[M13] * val[M20] - val[M03] * val[M12] * val[M20] + val[M03] * val[M10] * val[M22] - val[M00] * val[M13] * val[M22] - val[M02] * val[M10] * val[M23] + val[M00] * val[M12] * val[M23]; float m20 = val[M11] * val[M23] * val[M30] - val[M13] * val[M21] * val[M30] + val[M13] * val[M20] * val[M31] - val[M10] * val[M23] * val[M31] - val[M11] * val[M20] * val[M33] + val[M10] * val[M21] * val[M33]; float m21 = val[M03] * val[M21] * val[M30] - val[M01] * val[M23] * val[M30] - val[M03] * val[M20] * val[M31] + val[M00] * val[M23] * val[M31] + val[M01] * val[M20] * val[M33] - val[M00] * val[M21] * val[M33]; float m22 = val[M01] * val[M13] * val[M30] - val[M03] * val[M11] * val[M30] + val[M03] * val[M10] * val[M31] - val[M00] * val[M13] * val[M31] - val[M01] * val[M10] * val[M33] + val[M00] * val[M11] * val[M33]; float m23 = val[M03] * val[M11] * val[M20] - val[M01] * val[M13] * val[M20] - val[M03] * val[M10] * val[M21] + val[M00] * val[M13] * val[M21] + val[M01] * val[M10] * val[M23] - val[M00] * val[M11] * val[M23]; float m30 = val[M12] * val[M21] * val[M30] - val[M11] * val[M22] * val[M30] - val[M12] * val[M20] * val[M31] + val[M10] * val[M22] * val[M31] + val[M11] * val[M20] * val[M32] - val[M10] * val[M21] * val[M32]; float m31 = val[M01] * val[M22] * val[M30] - val[M02] * val[M21] * val[M30] + val[M02] * val[M20] * val[M31] - val[M00] * val[M22] * val[M31] - val[M01] * val[M20] * val[M32] + val[M00] * val[M21] * val[M32]; float m32 = val[M02] * val[M11] * val[M30] - val[M01] * val[M12] * val[M30] - val[M02] * val[M10] * val[M31] + val[M00] * val[M12] * val[M31] + val[M01] * val[M10] * val[M32] - val[M00] * val[M11] * val[M32]; float m33 = val[M01] * val[M12] * val[M20] - val[M02] * val[M11] * val[M20] + val[M02] * val[M10] * val[M21] - val[M00] * val[M12] * val[M21] - val[M01] * val[M10] * val[M22] + val[M00] * val[M11] * val[M22]; float inv_det = 1.0f / l_det; val[M00] = m00 * inv_det; val[M10] = m10 * inv_det; val[M20] = m20 * inv_det; val[M30] = m30 * inv_det; val[M01] = m01 * inv_det; val[M11] = m11 * inv_det; val[M21] = m21 * inv_det; val[M31] = m31 * inv_det; val[M02] = m02 * inv_det; val[M12] = m12 * inv_det; val[M22] = m22 * inv_det; val[M32] = m32 * inv_det; val[M03] = m03 * inv_det; val[M13] = m13 * inv_det; val[M23] = m23 * inv_det; val[M33] = m33 * inv_det; return this; } /** @return The determinant of this matrix */ public float det () { return val[M30] * val[M21] * val[M12] * val[M03] - val[M20] * val[M31] * val[M12] * val[M03] - val[M30] * val[M11] * val[M22] * val[M03] + val[M10] * val[M31] * val[M22] * val[M03] + val[M20] * val[M11] * val[M32] * val[M03] - val[M10] * val[M21] * val[M32] * val[M03] - val[M30] * val[M21] * val[M02] * val[M13] + val[M20] * val[M31] * val[M02] * val[M13] + val[M30] * val[M01] * val[M22] * val[M13] - val[M00] * val[M31] * val[M22] * val[M13] - val[M20] * val[M01] * val[M32] * val[M13] + val[M00] * val[M21] * val[M32] * val[M13] + val[M30] * val[M11] * val[M02] * val[M23] - val[M10] * val[M31] * val[M02] * val[M23] - val[M30] * val[M01] * val[M12] * val[M23] + val[M00] * val[M31] * val[M12] * val[M23] + val[M10] * val[M01] * val[M32] * val[M23] - val[M00] * val[M11] * val[M32] * val[M23] - val[M20] * val[M11] * val[M02] * val[M33] + val[M10] * val[M21] * val[M02] * val[M33] + val[M20] * val[M01] * val[M12] * val[M33] - val[M00] * val[M21] * val[M12] * val[M33] - val[M10] * val[M01] * val[M22] * val[M33] + val[M00] * val[M11] * val[M22] * val[M33]; } /** @return The determinant of the 3x3 upper left matrix */ public float det3x3 () { return val[M00] * val[M11] * val[M22] + val[M01] * val[M12] * val[M20] + val[M02] * val[M10] * val[M21] - val[M00] * val[M12] * val[M21] - val[M01] * val[M10] * val[M22] - val[M02] * val[M11] * val[M20]; } /** Sets the matrix to a projection matrix with a near- and far plane, a field of view in degrees and an aspect ratio. Note * that the field of view specified is the angle in degrees for the height, the field of view for the width will be calculated * according to the aspect ratio. * @param near The near plane * @param far The far plane * @param fovy The field of view of the height in degrees * @param aspectRatio The "width over height" aspect ratio * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToProjection (float near, float far, float fovy, float aspectRatio) { idt(); float l_fd = (float)(1.0 / Math.tan((fovy * (Math.PI / 180)) / 2.0)); float l_a1 = (far + near) / (near - far); float l_a2 = (2 * far * near) / (near - far); val[M00] = l_fd / aspectRatio; val[M10] = 0; val[M20] = 0; val[M30] = 0; val[M01] = 0; val[M11] = l_fd; val[M21] = 0; val[M31] = 0; val[M02] = 0; val[M12] = 0; val[M22] = l_a1; val[M32] = -1; val[M03] = 0; val[M13] = 0; val[M23] = l_a2; val[M33] = 0; return this; } /** Sets the matrix to a projection matrix with a near/far plane, and left, bottom, right and top specifying the points on the * near plane that are mapped to the lower left and upper right corners of the viewport. This allows to create projection * matrix with off-center vanishing point. * @param left * @param right * @param bottom * @param top * @param near The near plane * @param far The far plane * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToProjection (float left, float right, float bottom, float top, float near, float far) { float x = 2.0f * near / (right - left); float y = 2.0f * near / (top - bottom); float a = (right + left) / (right - left); float b = (top + bottom) / (top - bottom); float l_a1 = (far + near) / (near - far); float l_a2 = (2 * far * near) / (near - far); val[M00] = x; val[M10] = 0; val[M20] = 0; val[M30] = 0; val[M01] = 0; val[M11] = y; val[M21] = 0; val[M31] = 0; val[M02] = a; val[M12] = b; val[M22] = l_a1; val[M32] = -1; val[M03] = 0; val[M13] = 0; val[M23] = l_a2; val[M33] = 0; return this; } /** Sets this matrix to an orthographic projection matrix with the origin at (x,y) extending by width and height. The near * plane is set to 0, the far plane is set to 1. * @param x The x-coordinate of the origin * @param y The y-coordinate of the origin * @param width The width * @param height The height * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToOrtho2D (float x, float y, float width, float height) { setToOrtho(x, x + width, y, y + height, 0, 1); return this; } /** Sets this matrix to an orthographic projection matrix with the origin at (x,y) extending by width and height, having a near * and far plane. * @param x The x-coordinate of the origin * @param y The y-coordinate of the origin * @param width The width * @param height The height * @param near The near plane * @param far The far plane * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToOrtho2D (float x, float y, float width, float height, float near, float far) { setToOrtho(x, x + width, y, y + height, near, far); return this; } /** Sets the matrix to an orthographic projection like glOrtho (http://www.opengl.org/sdk/docs/man/xhtml/glOrtho.xml) following * the OpenGL equivalent * @param left The left clipping plane * @param right The right clipping plane * @param bottom The bottom clipping plane * @param top The top clipping plane * @param near The near clipping plane * @param far The far clipping plane * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToOrtho (float left, float right, float bottom, float top, float near, float far) { float x_orth = 2 / (right - left); float y_orth = 2 / (top - bottom); float z_orth = -2 / (far - near); float tx = -(right + left) / (right - left); float ty = -(top + bottom) / (top - bottom); float tz = -(far + near) / (far - near); val[M00] = x_orth; val[M10] = 0; val[M20] = 0; val[M30] = 0; val[M01] = 0; val[M11] = y_orth; val[M21] = 0; val[M31] = 0; val[M02] = 0; val[M12] = 0; val[M22] = z_orth; val[M32] = 0; val[M03] = tx; val[M13] = ty; val[M23] = tz; val[M33] = 1; return this; } /** Sets the 4th column to the translation vector. * @param vector The translation vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setTranslation (Vector3 vector) { val[M03] = vector.x; val[M13] = vector.y; val[M23] = vector.z; return this; } /** Sets the 4th column to the translation vector. * @param x The X coordinate of the translation vector * @param y The Y coordinate of the translation vector * @param z The Z coordinate of the translation vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setTranslation (float x, float y, float z) { val[M03] = x; val[M13] = y; val[M23] = z; return this; } /** Sets this matrix to a translation matrix, overwriting it first by an identity matrix and then setting the 4th column to the * translation vector. * @param vector The translation vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToTranslation (Vector3 vector) { idt(); val[M03] = vector.x; val[M13] = vector.y; val[M23] = vector.z; return this; } /** Sets this matrix to a translation matrix, overwriting it first by an identity matrix and then setting the 4th column to the * translation vector. * @param x The x-component of the translation vector. * @param y The y-component of the translation vector. * @param z The z-component of the translation vector. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToTranslation (float x, float y, float z) { idt(); val[M03] = x; val[M13] = y; val[M23] = z; return this; } /** Sets this matrix to a translation and scaling matrix by first overwriting it with an identity and then setting the * translation vector in the 4th column and the scaling vector in the diagonal. * @param translation The translation vector * @param scaling The scaling vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToTranslationAndScaling (Vector3 translation, Vector3 scaling) { idt(); val[M03] = translation.x; val[M13] = translation.y; val[M23] = translation.z; val[M00] = scaling.x; val[M11] = scaling.y; val[M22] = scaling.z; return this; } /** Sets this matrix to a translation and scaling matrix by first overwriting it with an identity and then setting the * translation vector in the 4th column and the scaling vector in the diagonal. * @param translationX The x-component of the translation vector * @param translationY The y-component of the translation vector * @param translationZ The z-component of the translation vector * @param scalingX The x-component of the scaling vector * @param scalingY The x-component of the scaling vector * @param scalingZ The x-component of the scaling vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToTranslationAndScaling (float translationX, float translationY, float translationZ, float scalingX, float scalingY, float scalingZ) { idt(); val[M03] = translationX; val[M13] = translationY; val[M23] = translationZ; val[M00] = scalingX; val[M11] = scalingY; val[M22] = scalingZ; return this; } /** Sets the matrix to a rotation matrix around the given axis. * @param axis The axis * @param degrees The angle in degrees * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToRotation (Vector3 axis, float degrees) { if (degrees == 0) { idt(); return this; } return set(quat.set(axis, degrees)); } /** Sets the matrix to a rotation matrix around the given axis. * @param axis The axis * @param radians The angle in radians * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToRotationRad (Vector3 axis, float radians) { if (radians == 0) { idt(); return this; } return set(quat.setFromAxisRad(axis, radians)); } /** Sets the matrix to a rotation matrix around the given axis. * @param axisX The x-component of the axis * @param axisY The y-component of the axis * @param axisZ The z-component of the axis * @param degrees The angle in degrees * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToRotation (float axisX, float axisY, float axisZ, float degrees) { if (degrees == 0) { idt(); return this; } return set(quat.setFromAxis(axisX, axisY, axisZ, degrees)); } /** Sets the matrix to a rotation matrix around the given axis. * @param axisX The x-component of the axis * @param axisY The y-component of the axis * @param axisZ The z-component of the axis * @param radians The angle in radians * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToRotationRad (float axisX, float axisY, float axisZ, float radians) { if (radians == 0) { idt(); return this; } return set(quat.setFromAxisRad(axisX, axisY, axisZ, radians)); } /** Set the matrix to a rotation matrix between two vectors. * @param v1 The base vector * @param v2 The target vector * @return This matrix for the purpose of chaining methods together */ public Matrix4 setToRotation (final Vector3 v1, final Vector3 v2) { return set(quat.setFromCross(v1, v2)); } /** Set the matrix to a rotation matrix between two vectors. * @param x1 The base vectors x value * @param y1 The base vectors y value * @param z1 The base vectors z value * @param x2 The target vector x value * @param y2 The target vector y value * @param z2 The target vector z value * @return This matrix for the purpose of chaining methods together */ public Matrix4 setToRotation (final float x1, final float y1, final float z1, final float x2, final float y2, final float z2) { return set(quat.setFromCross(x1, y1, z1, x2, y2, z2)); } /** Sets this matrix to a rotation matrix from the given euler angles. * @param yaw the yaw in degrees * @param pitch the pitch in degrees * @param roll the roll in degrees * @return This matrix */ public Matrix4 setFromEulerAngles (float yaw, float pitch, float roll) { quat.setEulerAngles(yaw, pitch, roll); return set(quat); } /** Sets this matrix to a rotation matrix from the given euler angles. * @param yaw the yaw in radians * @param pitch the pitch in radians * @param roll the roll in radians * @return This matrix */ public Matrix4 setFromEulerAnglesRad (float yaw, float pitch, float roll) { quat.setEulerAnglesRad(yaw, pitch, roll); return set(quat); } /** Sets this matrix to a scaling matrix * @param vector The scaling vector * @return This matrix for chaining. */ public Matrix4 setToScaling (Vector3 vector) { idt(); val[M00] = vector.x; val[M11] = vector.y; val[M22] = vector.z; return this; } /** Sets this matrix to a scaling matrix * @param x The x-component of the scaling vector * @param y The y-component of the scaling vector * @param z The z-component of the scaling vector * @return This matrix for chaining. */ public Matrix4 setToScaling (float x, float y, float z) { idt(); val[M00] = x; val[M11] = y; val[M22] = z; return this; } /** Sets the matrix to a look at matrix with a direction and an up vector. Multiply with a translation matrix to get a camera * model view matrix. * @param direction The direction vector * @param up The up vector * @return This matrix for the purpose of chaining methods together. */ public Matrix4 setToLookAt (Vector3 direction, Vector3 up) { l_vez.set(direction).nor(); l_vex.set(direction).crs(up).nor(); l_vey.set(l_vex).crs(l_vez).nor(); idt(); val[M00] = l_vex.x; val[M01] = l_vex.y; val[M02] = l_vex.z; val[M10] = l_vey.x; val[M11] = l_vey.y; val[M12] = l_vey.z; val[M20] = -l_vez.x; val[M21] = -l_vez.y; val[M22] = -l_vez.z; return this; } /** Sets this matrix to a look at matrix with the given position, target and up vector. * @param position the position * @param target the target * @param up the up vector * @return This matrix */ public Matrix4 setToLookAt (Vector3 position, Vector3 target, Vector3 up) { tmpVec.set(target).sub(position); setToLookAt(tmpVec, up); mul(tmpMat.setToTranslation(-position.x, -position.y, -position.z)); return this; } public Matrix4 setToWorld (Vector3 position, Vector3 forward, Vector3 up) { tmpForward.set(forward).nor(); right.set(tmpForward).crs(up).nor(); tmpUp.set(right).crs(tmpForward).nor(); set(right, tmpUp, tmpForward.scl(-1), position); return this; } /** Linearly interpolates between this matrix and the given matrix mixing by alpha * @param matrix the matrix * @param alpha the alpha value in the range [0,1] * @return This matrix for the purpose of chaining methods together. */ public Matrix4 lerp (Matrix4 matrix, float alpha) { for (int i = 0; i < 16; i++) val[i] = val[i] * (1 - alpha) + matrix.val[i] * alpha; return this; } /** Averages the given transform with this one and stores the result in this matrix. Translations and scales are lerped while * rotations are slerped. * @param other The other transform * @param w Weight of this transform; weight of the other transform is (1 - w) * @return This matrix for chaining */ public Matrix4 avg (Matrix4 other, float w) { getScale(tmpVec); other.getScale(tmpForward); getRotation(quat); other.getRotation(quat2); getTranslation(tmpUp); other.getTranslation(right); setToScaling(tmpVec.scl(w).add(tmpForward.scl(1 - w))); rotate(quat.slerp(quat2, 1 - w)); setTranslation(tmpUp.scl(w).add(right.scl(1 - w))); return this; } /** Averages the given transforms and stores the result in this matrix. Translations and scales are lerped while rotations are * slerped. Does not destroy the data contained in t. * @param t List of transforms * @return This matrix for chaining */ public Matrix4 avg (Matrix4[] t) { final float w = 1.0f / t.length; tmpVec.set(t[0].getScale(tmpUp).scl(w)); quat.set(t[0].getRotation(quat2).exp(w)); tmpForward.set(t[0].getTranslation(tmpUp).scl(w)); for (int i = 1; i < t.length; i++) { tmpVec.add(t[i].getScale(tmpUp).scl(w)); quat.mul(t[i].getRotation(quat2).exp(w)); tmpForward.add(t[i].getTranslation(tmpUp).scl(w)); } quat.nor(); setToScaling(tmpVec); rotate(quat); setTranslation(tmpForward); return this; } /** Averages the given transforms with the given weights and stores the result in this matrix. Translations and scales are * lerped while rotations are slerped. Does not destroy the data contained in t or w; Sum of w_i must be equal to 1, or * unexpected results will occur. * @param t List of transforms * @param w List of weights * @return This matrix for chaining */ public Matrix4 avg (Matrix4[] t, float[] w) { tmpVec.set(t[0].getScale(tmpUp).scl(w[0])); quat.set(t[0].getRotation(quat2).exp(w[0])); tmpForward.set(t[0].getTranslation(tmpUp).scl(w[0])); for (int i = 1; i < t.length; i++) { tmpVec.add(t[i].getScale(tmpUp).scl(w[i])); quat.mul(t[i].getRotation(quat2).exp(w[i])); tmpForward.add(t[i].getTranslation(tmpUp).scl(w[i])); } quat.nor(); setToScaling(tmpVec); rotate(quat); setTranslation(tmpForward); return this; } /** Sets this matrix to the given 3x3 matrix. The third column of this matrix is set to (0,0,1,0). * @param mat the matrix */ public Matrix4 set (Matrix3 mat) { val[0] = mat.val[0]; val[1] = mat.val[1]; val[2] = mat.val[2]; val[3] = 0; val[4] = mat.val[3]; val[5] = mat.val[4]; val[6] = mat.val[5]; val[7] = 0; val[8] = 0; val[9] = 0; val[10] = 1; val[11] = 0; val[12] = mat.val[6]; val[13] = mat.val[7]; val[14] = 0; val[15] = mat.val[8]; return this; } /** Sets this matrix to the given affine matrix. The values are mapped as follows: * * <pre> * [ M00 M01 0 M02 ] * [ M10 M11 0 M12 ] * [ 0 0 1 0 ] * [ 0 0 0 1 ] * </pre> * * @param affine the affine matrix * @return This matrix for chaining */ public Matrix4 set (Affine2 affine) { val[M00] = affine.m00; val[M10] = affine.m10; val[M20] = 0; val[M30] = 0; val[M01] = affine.m01; val[M11] = affine.m11; val[M21] = 0; val[M31] = 0; val[M02] = 0; val[M12] = 0; val[M22] = 1; val[M32] = 0; val[M03] = affine.m02; val[M13] = affine.m12; val[M23] = 0; val[M33] = 1; return this; } /** Assumes that this matrix is a 2D affine transformation, copying only the relevant components. The values are mapped as * follows: * * <pre> * [ M00 M01 _ M02 ] * [ M10 M11 _ M12 ] * [ _ _ _ _ ] * [ _ _ _ _ ] * </pre> * * @param affine the source matrix * @return This matrix for chaining */ public Matrix4 setAsAffine (Affine2 affine) { val[M00] = affine.m00; val[M10] = affine.m10; val[M01] = affine.m01; val[M11] = affine.m11; val[M03] = affine.m02; val[M13] = affine.m12; return this; } /** Assumes that both matrices are 2D affine transformations, copying only the relevant components. The copied values are: * * <pre> * [ M00 M01 _ M03 ] * [ M10 M11 _ M13 ] * [ _ _ _ _ ] * [ _ _ _ _ ] * </pre> * * @param mat the source matrix * @return This matrix for chaining */ public Matrix4 setAsAffine (Matrix4 mat) { val[M00] = mat.val[M00]; val[M10] = mat.val[M10]; val[M01] = mat.val[M01]; val[M11] = mat.val[M11]; val[M03] = mat.val[M03]; val[M13] = mat.val[M13]; return this; } public Matrix4 scl (Vector3 scale) { val[M00] *= scale.x; val[M11] *= scale.y; val[M22] *= scale.z; return this; } public Matrix4 scl (float x, float y, float z) { val[M00] *= x; val[M11] *= y; val[M22] *= z; return this; } public Matrix4 scl (float scale) { val[M00] *= scale; val[M11] *= scale; val[M22] *= scale; return this; } public Vector3 getTranslation (Vector3 position) { position.x = val[M03]; position.y = val[M13]; position.z = val[M23]; return position; } /** Gets the rotation of this matrix. * @param rotation The {@link Quaternion} to receive the rotation * @param normalizeAxes True to normalize the axes, necessary when the matrix might also include scaling. * @return The provided {@link Quaternion} for chaining. */ public Quaternion getRotation (Quaternion rotation, boolean normalizeAxes) { return rotation.setFromMatrix(normalizeAxes, this); } /** Gets the rotation of this matrix. * @param rotation The {@link Quaternion} to receive the rotation * @return The provided {@link Quaternion} for chaining. */ public Quaternion getRotation (Quaternion rotation) { return rotation.setFromMatrix(this); } /** @return the squared scale factor on the X axis */ public float getScaleXSquared () { return val[M00] * val[M00] + val[M01] * val[M01] + val[M02] * val[M02]; } /** @return the squared scale factor on the Y axis */ public float getScaleYSquared () { return val[M10] * val[M10] + val[M11] * val[M11] + val[M12] * val[M12]; } /** @return the squared scale factor on the Z axis */ public float getScaleZSquared () { return val[M20] * val[M20] + val[M21] * val[M21] + val[M22] * val[M22]; } /** @return the scale factor on the X axis (non-negative) */ public float getScaleX () { return (MathUtils.isZero(val[M01]) && MathUtils.isZero(val[M02])) ? Math.abs(val[M00]) : (float)Math.sqrt(getScaleXSquared()); } /** @return the scale factor on the Y axis (non-negative) */ public float getScaleY () { return (MathUtils.isZero(val[M10]) && MathUtils.isZero(val[M12])) ? Math.abs(val[M11]) : (float)Math.sqrt(getScaleYSquared()); } /** @return the scale factor on the X axis (non-negative) */ public float getScaleZ () { return (MathUtils.isZero(val[M20]) && MathUtils.isZero(val[M21])) ? Math.abs(val[M22]) : (float)Math.sqrt(getScaleZSquared()); } /** @param scale The vector which will receive the (non-negative) scale components on each axis. * @return The provided vector for chaining. */ public Vector3 getScale (Vector3 scale) { return scale.set(getScaleX(), getScaleY(), getScaleZ()); } /** removes the translational part and transposes the matrix. */ public Matrix4 toNormalMatrix () { val[M03] = 0; val[M13] = 0; val[M23] = 0; return inv().tra(); } public String toString () { return "[" + val[M00] + "|" + val[M01] + "|" + val[M02] + "|" + val[M03] + "]\n" // + "[" + val[M10] + "|" + val[M11] + "|" + val[M12] + "|" + val[M13] + "]\n" // + "[" + val[M20] + "|" + val[M21] + "|" + val[M22] + "|" + val[M23] + "]\n" // + "[" + val[M30] + "|" + val[M31] + "|" + val[M32] + "|" + val[M33] + "]\n"; } // @off /*JNI #include <memory.h> #include <stdio.h> #include <string.h> #define M00 0 #define M01 4 #define M02 8 #define M03 12 #define M10 1 #define M11 5 #define M12 9 #define M13 13 #define M20 2 #define M21 6 #define M22 10 #define M23 14 #define M30 3 #define M31 7 #define M32 11 #define M33 15 static inline void matrix4_mul(float* mata, float* matb) { float tmp[16]; tmp[M00] = mata[M00] * matb[M00] + mata[M01] * matb[M10] + mata[M02] * matb[M20] + mata[M03] * matb[M30]; tmp[M01] = mata[M00] * matb[M01] + mata[M01] * matb[M11] + mata[M02] * matb[M21] + mata[M03] * matb[M31]; tmp[M02] = mata[M00] * matb[M02] + mata[M01] * matb[M12] + mata[M02] * matb[M22] + mata[M03] * matb[M32]; tmp[M03] = mata[M00] * matb[M03] + mata[M01] * matb[M13] + mata[M02] * matb[M23] + mata[M03] * matb[M33]; tmp[M10] = mata[M10] * matb[M00] + mata[M11] * matb[M10] + mata[M12] * matb[M20] + mata[M13] * matb[M30]; tmp[M11] = mata[M10] * matb[M01] + mata[M11] * matb[M11] + mata[M12] * matb[M21] + mata[M13] * matb[M31]; tmp[M12] = mata[M10] * matb[M02] + mata[M11] * matb[M12] + mata[M12] * matb[M22] + mata[M13] * matb[M32]; tmp[M13] = mata[M10] * matb[M03] + mata[M11] * matb[M13] + mata[M12] * matb[M23] + mata[M13] * matb[M33]; tmp[M20] = mata[M20] * matb[M00] + mata[M21] * matb[M10] + mata[M22] * matb[M20] + mata[M23] * matb[M30]; tmp[M21] = mata[M20] * matb[M01] + mata[M21] * matb[M11] + mata[M22] * matb[M21] + mata[M23] * matb[M31]; tmp[M22] = mata[M20] * matb[M02] + mata[M21] * matb[M12] + mata[M22] * matb[M22] + mata[M23] * matb[M32]; tmp[M23] = mata[M20] * matb[M03] + mata[M21] * matb[M13] + mata[M22] * matb[M23] + mata[M23] * matb[M33]; tmp[M30] = mata[M30] * matb[M00] + mata[M31] * matb[M10] + mata[M32] * matb[M20] + mata[M33] * matb[M30]; tmp[M31] = mata[M30] * matb[M01] + mata[M31] * matb[M11] + mata[M32] * matb[M21] + mata[M33] * matb[M31]; tmp[M32] = mata[M30] * matb[M02] + mata[M31] * matb[M12] + mata[M32] * matb[M22] + mata[M33] * matb[M32]; tmp[M33] = mata[M30] * matb[M03] + mata[M31] * matb[M13] + mata[M32] * matb[M23] + mata[M33] * matb[M33]; memcpy(mata, tmp, sizeof(float) * 16); } static inline void matrix4_mulVec(float* mat, float* vec) { float x = vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02] + mat[M03]; float y = vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12] + mat[M13]; float z = vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22] + mat[M23]; vec[0] = x; vec[1] = y; vec[2] = z; } static inline void matrix4_proj(float* mat, float* vec) { float inv_w = 1.0f / (vec[0] * mat[M30] + vec[1] * mat[M31] + vec[2] * mat[M32] + mat[M33]); float x = (vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02] + mat[M03]) * inv_w; float y = (vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12] + mat[M13]) * inv_w; float z = (vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22] + mat[M23]) * inv_w; vec[0] = x; vec[1] = y; vec[2] = z; } static inline void matrix4_rot(float* mat, float* vec) { float x = vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02]; float y = vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12]; float z = vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22]; vec[0] = x; vec[1] = y; vec[2] = z; } */ /** Multiplies the vectors with the given matrix. The matrix array is assumed to hold a 4x4 column major matrix as you can get * from {@link Matrix4#val}. The vectors array is assumed to hold 3-component vectors. Offset specifies the offset into the * array where the x-component of the first vector is located. The numVecs parameter specifies the number of vectors stored in * the vectors array. The stride parameter specifies the number of floats between subsequent vectors and must be >= 3. This is * the same as {@link Vector3#mul(Matrix4)} applied to multiple vectors. * @param mat the matrix * @param vecs the vectors * @param offset the offset into the vectors array * @param numVecs the number of vectors * @param stride the stride between vectors in floats */ public static native void mulVec (float[] mat, float[] vecs, int offset, int numVecs, int stride) /*-{ }-*/; /* float* vecPtr = vecs + offset; for(int i = 0; i < numVecs; i++) { matrix4_mulVec(mat, vecPtr); vecPtr += stride; } */ /** Multiplies the vectors with the given matrix, , performing a division by w. The matrix array is assumed to hold a 4x4 column * major matrix as you can get from {@link Matrix4#val}. The vectors array is assumed to hold 3-component vectors. Offset * specifies the offset into the array where the x-component of the first vector is located. The numVecs parameter specifies * the number of vectors stored in the vectors array. The stride parameter specifies the number of floats between subsequent * vectors and must be >= 3. This is the same as {@link Vector3#prj(Matrix4)} applied to multiple vectors. * @param mat the matrix * @param vecs the vectors * @param offset the offset into the vectors array * @param numVecs the number of vectors * @param stride the stride between vectors in floats */ public static native void prj (float[] mat, float[] vecs, int offset, int numVecs, int stride) /*-{ }-*/; /* float* vecPtr = vecs + offset; for(int i = 0; i < numVecs; i++) { matrix4_proj(mat, vecPtr); vecPtr += stride; } */ /** Multiplies the vectors with the top most 3x3 sub-matrix of the given matrix. The matrix array is assumed to hold a 4x4 * column major matrix as you can get from {@link Matrix4#val}. The vectors array is assumed to hold 3-component vectors. * Offset specifies the offset into the array where the x-component of the first vector is located. The numVecs parameter * specifies the number of vectors stored in the vectors array. The stride parameter specifies the number of floats between * subsequent vectors and must be >= 3. This is the same as {@link Vector3#rot(Matrix4)} applied to multiple vectors. * @param mat the matrix * @param vecs the vectors * @param offset the offset into the vectors array * @param numVecs the number of vectors * @param stride the stride between vectors in floats */ public static native void rot (float[] mat, float[] vecs, int offset, int numVecs, int stride) /*-{ }-*/; /* float* vecPtr = vecs + offset; for(int i = 0; i < numVecs; i++) { matrix4_rot(mat, vecPtr); vecPtr += stride; } */ // @on /** Multiplies the matrix mata with matrix matb, storing the result in mata. The arrays are assumed to hold 4x4 column major * matrices as you can get from {@link Matrix4#val}. This is the same as {@link Matrix4#mul(Matrix4)}. * * @param mata the first matrix. * @param matb the second matrix. */ public static void mul (float[] mata, float[] matb) { float m00 = mata[M00] * matb[M00] + mata[M01] * matb[M10] + mata[M02] * matb[M20] + mata[M03] * matb[M30]; float m01 = mata[M00] * matb[M01] + mata[M01] * matb[M11] + mata[M02] * matb[M21] + mata[M03] * matb[M31]; float m02 = mata[M00] * matb[M02] + mata[M01] * matb[M12] + mata[M02] * matb[M22] + mata[M03] * matb[M32]; float m03 = mata[M00] * matb[M03] + mata[M01] * matb[M13] + mata[M02] * matb[M23] + mata[M03] * matb[M33]; float m10 = mata[M10] * matb[M00] + mata[M11] * matb[M10] + mata[M12] * matb[M20] + mata[M13] * matb[M30]; float m11 = mata[M10] * matb[M01] + mata[M11] * matb[M11] + mata[M12] * matb[M21] + mata[M13] * matb[M31]; float m12 = mata[M10] * matb[M02] + mata[M11] * matb[M12] + mata[M12] * matb[M22] + mata[M13] * matb[M32]; float m13 = mata[M10] * matb[M03] + mata[M11] * matb[M13] + mata[M12] * matb[M23] + mata[M13] * matb[M33]; float m20 = mata[M20] * matb[M00] + mata[M21] * matb[M10] + mata[M22] * matb[M20] + mata[M23] * matb[M30]; float m21 = mata[M20] * matb[M01] + mata[M21] * matb[M11] + mata[M22] * matb[M21] + mata[M23] * matb[M31]; float m22 = mata[M20] * matb[M02] + mata[M21] * matb[M12] + mata[M22] * matb[M22] + mata[M23] * matb[M32]; float m23 = mata[M20] * matb[M03] + mata[M21] * matb[M13] + mata[M22] * matb[M23] + mata[M23] * matb[M33]; float m30 = mata[M30] * matb[M00] + mata[M31] * matb[M10] + mata[M32] * matb[M20] + mata[M33] * matb[M30]; float m31 = mata[M30] * matb[M01] + mata[M31] * matb[M11] + mata[M32] * matb[M21] + mata[M33] * matb[M31]; float m32 = mata[M30] * matb[M02] + mata[M31] * matb[M12] + mata[M32] * matb[M22] + mata[M33] * matb[M32]; float m33 = mata[M30] * matb[M03] + mata[M31] * matb[M13] + mata[M32] * matb[M23] + mata[M33] * matb[M33]; mata[M00] = m00; mata[M10] = m10; mata[M20] = m20; mata[M30] = m30; mata[M01] = m01; mata[M11] = m11; mata[M21] = m21; mata[M31] = m31; mata[M02] = m02; mata[M12] = m12; mata[M22] = m22; mata[M32] = m32; mata[M03] = m03; mata[M13] = m13; mata[M23] = m23; mata[M33] = m33; } /** Multiplies the vector with the given matrix. The matrix array is assumed to hold a 4x4 column major matrix as you can get * from {@link Matrix4#val}. The vector array is assumed to hold a 3-component vector, with x being the first element, y being * the second and z being the last component. The result is stored in the vector array. This is the same as * {@link Vector3#mul(Matrix4)}. * @param mat the matrix * @param vec the vector. */ public static void mulVec (float[] mat, float[] vec) { float x = vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02] + mat[M03]; float y = vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12] + mat[M13]; float z = vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22] + mat[M23]; vec[0] = x; vec[1] = y; vec[2] = z; } /** Multiplies the vector with the given matrix, performing a division by w. The matrix array is assumed to hold a 4x4 column * major matrix as you can get from {@link Matrix4#val}. The vector array is assumed to hold a 3-component vector, with x being * the first element, y being the second and z being the last component. The result is stored in the vector array. This is the * same as {@link Vector3#prj(Matrix4)}. * @param mat the matrix * @param vec the vector. */ public static void prj (float[] mat, float[] vec) { float inv_w = 1.0f / (vec[0] * mat[M30] + vec[1] * mat[M31] + vec[2] * mat[M32] + mat[M33]); float x = (vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02] + mat[M03]) * inv_w; float y = (vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12] + mat[M13]) * inv_w; float z = (vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22] + mat[M23]) * inv_w; vec[0] = x; vec[1] = y; vec[2] = z; } /** Multiplies the vector with the top most 3x3 sub-matrix of the given matrix. The matrix array is assumed to hold a 4x4 * column major matrix as you can get from {@link Matrix4#val}. The vector array is assumed to hold a 3-component vector, with * x being the first element, y being the second and z being the last component. The result is stored in the vector array. This * is the same as {@link Vector3#rot(Matrix4)}. * @param mat the matrix * @param vec the vector. */ public static void rot (float[] mat, float[] vec) { float x = vec[0] * mat[M00] + vec[1] * mat[M01] + vec[2] * mat[M02]; float y = vec[0] * mat[M10] + vec[1] * mat[M11] + vec[2] * mat[M12]; float z = vec[0] * mat[M20] + vec[1] * mat[M21] + vec[2] * mat[M22]; vec[0] = x; vec[1] = y; vec[2] = z; } /** Computes the inverse of the given matrix. The matrix array is assumed to hold a 4x4 column major matrix as you can get from * {@link Matrix4#val}. * @param values the matrix values. * @return false in case the inverse could not be calculated, true otherwise. */ public static boolean inv (float[] values) { float l_det = det(values); if (l_det == 0) return false; float m00 = values[M12] * values[M23] * values[M31] - values[M13] * values[M22] * values[M31] + values[M13] * values[M21] * values[M32] - values[M11] * values[M23] * values[M32] - values[M12] * values[M21] * values[M33] + values[M11] * values[M22] * values[M33]; float m01 = values[M03] * values[M22] * values[M31] - values[M02] * values[M23] * values[M31] - values[M03] * values[M21] * values[M32] + values[M01] * values[M23] * values[M32] + values[M02] * values[M21] * values[M33] - values[M01] * values[M22] * values[M33]; float m02 = values[M02] * values[M13] * values[M31] - values[M03] * values[M12] * values[M31] + values[M03] * values[M11] * values[M32] - values[M01] * values[M13] * values[M32] - values[M02] * values[M11] * values[M33] + values[M01] * values[M12] * values[M33]; float m03 = values[M03] * values[M12] * values[M21] - values[M02] * values[M13] * values[M21] - values[M03] * values[M11] * values[M22] + values[M01] * values[M13] * values[M22] + values[M02] * values[M11] * values[M23] - values[M01] * values[M12] * values[M23]; float m10 = values[M13] * values[M22] * values[M30] - values[M12] * values[M23] * values[M30] - values[M13] * values[M20] * values[M32] + values[M10] * values[M23] * values[M32] + values[M12] * values[M20] * values[M33] - values[M10] * values[M22] * values[M33]; float m11 = values[M02] * values[M23] * values[M30] - values[M03] * values[M22] * values[M30] + values[M03] * values[M20] * values[M32] - values[M00] * values[M23] * values[M32] - values[M02] * values[M20] * values[M33] + values[M00] * values[M22] * values[M33]; float m12 = values[M03] * values[M12] * values[M30] - values[M02] * values[M13] * values[M30] - values[M03] * values[M10] * values[M32] + values[M00] * values[M13] * values[M32] + values[M02] * values[M10] * values[M33] - values[M00] * values[M12] * values[M33]; float m13 = values[M02] * values[M13] * values[M20] - values[M03] * values[M12] * values[M20] + values[M03] * values[M10] * values[M22] - values[M00] * values[M13] * values[M22] - values[M02] * values[M10] * values[M23] + values[M00] * values[M12] * values[M23]; float m20 = values[M11] * values[M23] * values[M30] - values[M13] * values[M21] * values[M30] + values[M13] * values[M20] * values[M31] - values[M10] * values[M23] * values[M31] - values[M11] * values[M20] * values[M33] + values[M10] * values[M21] * values[M33]; float m21 = values[M03] * values[M21] * values[M30] - values[M01] * values[M23] * values[M30] - values[M03] * values[M20] * values[M31] + values[M00] * values[M23] * values[M31] + values[M01] * values[M20] * values[M33] - values[M00] * values[M21] * values[M33]; float m22 = values[M01] * values[M13] * values[M30] - values[M03] * values[M11] * values[M30] + values[M03] * values[M10] * values[M31] - values[M00] * values[M13] * values[M31] - values[M01] * values[M10] * values[M33] + values[M00] * values[M11] * values[M33]; float m23 = values[M03] * values[M11] * values[M20] - values[M01] * values[M13] * values[M20] - values[M03] * values[M10] * values[M21] + values[M00] * values[M13] * values[M21] + values[M01] * values[M10] * values[M23] - values[M00] * values[M11] * values[M23]; float m30 = values[M12] * values[M21] * values[M30] - values[M11] * values[M22] * values[M30] - values[M12] * values[M20] * values[M31] + values[M10] * values[M22] * values[M31] + values[M11] * values[M20] * values[M32] - values[M10] * values[M21] * values[M32]; float m31 = values[M01] * values[M22] * values[M30] - values[M02] * values[M21] * values[M30] + values[M02] * values[M20] * values[M31] - values[M00] * values[M22] * values[M31] - values[M01] * values[M20] * values[M32] + values[M00] * values[M21] * values[M32]; float m32 = values[M02] * values[M11] * values[M30] - values[M01] * values[M12] * values[M30] - values[M02] * values[M10] * values[M31] + values[M00] * values[M12] * values[M31] + values[M01] * values[M10] * values[M32] - values[M00] * values[M11] * values[M32]; float m33 = values[M01] * values[M12] * values[M20] - values[M02] * values[M11] * values[M20] + values[M02] * values[M10] * values[M21] - values[M00] * values[M12] * values[M21] - values[M01] * values[M10] * values[M22] + values[M00] * values[M11] * values[M22]; float inv_det = 1.0f / l_det; values[M00] = m00 * inv_det; values[M10] = m10 * inv_det; values[M20] = m20 * inv_det; values[M30] = m30 * inv_det; values[M01] = m01 * inv_det; values[M11] = m11 * inv_det; values[M21] = m21 * inv_det; values[M31] = m31 * inv_det; values[M02] = m02 * inv_det; values[M12] = m12 * inv_det; values[M22] = m22 * inv_det; values[M32] = m32 * inv_det; values[M03] = m03 * inv_det; values[M13] = m13 * inv_det; values[M23] = m23 * inv_det; values[M33] = m33 * inv_det; return true; } /** Computes the determinante of the given matrix. The matrix array is assumed to hold a 4x4 column major matrix as you can get * from {@link Matrix4#val}. * @param values the matrix values. * @return the determinante. */ public static float det (float[] values) { return values[M30] * values[M21] * values[M12] * values[M03] - values[M20] * values[M31] * values[M12] * values[M03] - values[M30] * values[M11] * values[M22] * values[M03] + values[M10] * values[M31] * values[M22] * values[M03] + values[M20] * values[M11] * values[M32] * values[M03] - values[M10] * values[M21] * values[M32] * values[M03] - values[M30] * values[M21] * values[M02] * values[M13] + values[M20] * values[M31] * values[M02] * values[M13] + values[M30] * values[M01] * values[M22] * values[M13] - values[M00] * values[M31] * values[M22] * values[M13] - values[M20] * values[M01] * values[M32] * values[M13] + values[M00] * values[M21] * values[M32] * values[M13] + values[M30] * values[M11] * values[M02] * values[M23] - values[M10] * values[M31] * values[M02] * values[M23] - values[M30] * values[M01] * values[M12] * values[M23] + values[M00] * values[M31] * values[M12] * values[M23] + values[M10] * values[M01] * values[M32] * values[M23] - values[M00] * values[M11] * values[M32] * values[M23] - values[M20] * values[M11] * values[M02] * values[M33] + values[M10] * values[M21] * values[M02] * values[M33] + values[M20] * values[M01] * values[M12] * values[M33] - values[M00] * values[M21] * values[M12] * values[M33] - values[M10] * values[M01] * values[M22] * values[M33] + values[M00] * values[M11] * values[M22] * values[M33]; } /** Postmultiplies this matrix by a translation matrix. Postmultiplication is also used by OpenGL ES' * glTranslate/glRotate/glScale * @param translation * @return This matrix for the purpose of chaining methods together. */ public Matrix4 translate (Vector3 translation) { return translate(translation.x, translation.y, translation.z); } /** Postmultiplies this matrix by a translation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale. * @param x Translation in the x-axis. * @param y Translation in the y-axis. * @param z Translation in the z-axis. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 translate (float x, float y, float z) { val[M03] += val[M00] * x + val[M01] * y + val[M02] * z; val[M13] += val[M10] * x + val[M11] * y + val[M12] * z; val[M23] += val[M20] * x + val[M21] * y + val[M22] * z; val[M33] += val[M30] * x + val[M31] * y + val[M32] * z; return this; } /** Postmultiplies this matrix with a (counter-clockwise) rotation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale. * @param axis The vector axis to rotate around. * @param degrees The angle in degrees. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 rotate (Vector3 axis, float degrees) { if (degrees == 0) return this; quat.set(axis, degrees); return rotate(quat); } /** Postmultiplies this matrix with a (counter-clockwise) rotation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale. * @param axis The vector axis to rotate around. * @param radians The angle in radians. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 rotateRad (Vector3 axis, float radians) { if (radians == 0) return this; quat.setFromAxisRad(axis, radians); return rotate(quat); } /** Postmultiplies this matrix with a (counter-clockwise) rotation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale * @param axisX The x-axis component of the vector to rotate around. * @param axisY The y-axis component of the vector to rotate around. * @param axisZ The z-axis component of the vector to rotate around. * @param degrees The angle in degrees * @return This matrix for the purpose of chaining methods together. */ public Matrix4 rotate (float axisX, float axisY, float axisZ, float degrees) { if (degrees == 0) return this; quat.setFromAxis(axisX, axisY, axisZ, degrees); return rotate(quat); } /** Postmultiplies this matrix with a (counter-clockwise) rotation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale * @param axisX The x-axis component of the vector to rotate around. * @param axisY The y-axis component of the vector to rotate around. * @param axisZ The z-axis component of the vector to rotate around. * @param radians The angle in radians * @return This matrix for the purpose of chaining methods together. */ public Matrix4 rotateRad (float axisX, float axisY, float axisZ, float radians) { if (radians == 0) return this; quat.setFromAxisRad(axisX, axisY, axisZ, radians); return rotate(quat); } /** Postmultiplies this matrix with a (counter-clockwise) rotation matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale. * @param rotation * @return This matrix for the purpose of chaining methods together. */ public Matrix4 rotate (Quaternion rotation) { float x = rotation.x, y = rotation.y, z = rotation.z, w = rotation.w; float xx = x * x; float xy = x * y; float xz = x * z; float xw = x * w; float yy = y * y; float yz = y * z; float yw = y * w; float zz = z * z; float zw = z * w; // Set matrix from quaternion float r00 = 1 - 2 * (yy + zz); float r01 = 2 * (xy - zw); float r02 = 2 * (xz + yw); float r10 = 2 * (xy + zw); float r11 = 1 - 2 * (xx + zz); float r12 = 2 * (yz - xw); float r20 = 2 * (xz - yw); float r21 = 2 * (yz + xw); float r22 = 1 - 2 * (xx + yy); float m00 = val[M00] * r00 + val[M01] * r10 + val[M02] * r20; float m01 = val[M00] * r01 + val[M01] * r11 + val[M02] * r21; float m02 = val[M00] * r02 + val[M01] * r12 + val[M02] * r22; float m10 = val[M10] * r00 + val[M11] * r10 + val[M12] * r20; float m11 = val[M10] * r01 + val[M11] * r11 + val[M12] * r21; float m12 = val[M10] * r02 + val[M11] * r12 + val[M12] * r22; float m20 = val[M20] * r00 + val[M21] * r10 + val[M22] * r20; float m21 = val[M20] * r01 + val[M21] * r11 + val[M22] * r21; float m22 = val[M20] * r02 + val[M21] * r12 + val[M22] * r22; float m30 = val[M30] * r00 + val[M31] * r10 + val[M32] * r20; float m31 = val[M30] * r01 + val[M31] * r11 + val[M32] * r21; float m32 = val[M30] * r02 + val[M31] * r12 + val[M32] * r22; val[M00] = m00; val[M10] = m10; val[M20] = m20; val[M30] = m30; val[M01] = m01; val[M11] = m11; val[M21] = m21; val[M31] = m31; val[M02] = m02; val[M12] = m12; val[M22] = m22; val[M32] = m32; return this; } /** Postmultiplies this matrix by the rotation between two vectors. * @param v1 The base vector * @param v2 The target vector * @return This matrix for the purpose of chaining methods together */ public Matrix4 rotate (final Vector3 v1, final Vector3 v2) { return rotate(quat.setFromCross(v1, v2)); } /** Post-multiplies this matrix by a rotation toward a direction. * @param direction direction to rotate toward * @param up up vector * @return This matrix for chaining */ public Matrix4 rotateTowardDirection (final Vector3 direction, final Vector3 up) { l_vez.set(direction).nor(); l_vex.set(direction).crs(up).nor(); l_vey.set(l_vex).crs(l_vez).nor(); float m00 = val[M00] * l_vex.x + val[M01] * l_vex.y + val[M02] * l_vex.z; float m01 = val[M00] * l_vey.x + val[M01] * l_vey.y + val[M02] * l_vey.z; float m02 = val[M00] * -l_vez.x + val[M01] * -l_vez.y + val[M02] * -l_vez.z; float m10 = val[M10] * l_vex.x + val[M11] * l_vex.y + val[M12] * l_vex.z; float m11 = val[M10] * l_vey.x + val[M11] * l_vey.y + val[M12] * l_vey.z; float m12 = val[M10] * -l_vez.x + val[M11] * -l_vez.y + val[M12] * -l_vez.z; float m20 = val[M20] * l_vex.x + val[M21] * l_vex.y + val[M22] * l_vex.z; float m21 = val[M20] * l_vey.x + val[M21] * l_vey.y + val[M22] * l_vey.z; float m22 = val[M20] * -l_vez.x + val[M21] * -l_vez.y + val[M22] * -l_vez.z; float m30 = val[M30] * l_vex.x + val[M31] * l_vex.y + val[M32] * l_vex.z; float m31 = val[M30] * l_vey.x + val[M31] * l_vey.y + val[M32] * l_vey.z; float m32 = val[M30] * -l_vez.x + val[M31] * -l_vez.y + val[M32] * -l_vez.z; val[M00] = m00; val[M10] = m10; val[M20] = m20; val[M30] = m30; val[M01] = m01; val[M11] = m11; val[M21] = m21; val[M31] = m31; val[M02] = m02; val[M12] = m12; val[M22] = m22; val[M32] = m32; return this; } /** Post-multiplies this matrix by a rotation toward a target. * @param target the target to rotate to * @param up the up vector * @return This matrix for chaining */ public Matrix4 rotateTowardTarget (final Vector3 target, final Vector3 up) { tmpVec.set(target.x - val[M03], target.y - val[M13], target.z - val[M23]); return rotateTowardDirection(tmpVec, up); } /** Postmultiplies this matrix with a scale matrix. Postmultiplication is also used by OpenGL ES' 1.x * glTranslate/glRotate/glScale. * @param scaleX The scale in the x-axis. * @param scaleY The scale in the y-axis. * @param scaleZ The scale in the z-axis. * @return This matrix for the purpose of chaining methods together. */ public Matrix4 scale (float scaleX, float scaleY, float scaleZ) { val[M00] *= scaleX; val[M01] *= scaleY; val[M02] *= scaleZ; val[M10] *= scaleX; val[M11] *= scaleY; val[M12] *= scaleZ; val[M20] *= scaleX; val[M21] *= scaleY; val[M22] *= scaleZ; val[M30] *= scaleX; val[M31] *= scaleY; val[M32] *= scaleZ; return this; } /** Copies the 4x3 upper-left sub-matrix into float array. The destination array is supposed to be a column major matrix. * @param dst the destination matrix */ public void extract4x3Matrix (float[] dst) { dst[0] = val[M00]; dst[1] = val[M10]; dst[2] = val[M20]; dst[3] = val[M01]; dst[4] = val[M11]; dst[5] = val[M21]; dst[6] = val[M02]; dst[7] = val[M12]; dst[8] = val[M22]; dst[9] = val[M03]; dst[10] = val[M13]; dst[11] = val[M23]; } /** @return True if this matrix has any rotation or scaling, false otherwise */ public boolean hasRotationOrScaling () { return !(MathUtils.isEqual(val[M00], 1) && MathUtils.isEqual(val[M11], 1) && MathUtils.isEqual(val[M22], 1) && MathUtils.isZero(val[M01]) && MathUtils.isZero(val[M02]) && MathUtils.isZero(val[M10]) && MathUtils.isZero(val[M12]) && MathUtils.isZero(val[M20]) && MathUtils.isZero(val[M21])); } }
tommyettinger/libgdx
gdx/src/com/badlogic/gdx/math/Matrix4.java