code
stringlengths
46
37.2k
language
stringclasses
9 values
AST_depth
int64
3
30
alphanumeric_fraction
float64
0.2
0.91
max_line_length
int64
13
399
avg_line_length
float64
5.67
140
num_lines
int64
7
299
original_docstring
stringlengths
22
42.6k
source
stringclasses
2 values
public void addToPrepearedStatement(PreparedStatement statement, int idx) throws SQLException { if (this.isNull()) { statement.setNull(idx, java.sql.Types.VARCHAR); return; } statement.setString(idx, this.textValue); }
java
11
0.739316
73
28.375
8
/*** * delegated to this class to avoid switch-case in our db-driver (dbHandle) * * @param statement * statement to which this value is to be added * @param idx * index at which to add this value * @throws SQLException */
function
public bool TryMakeLocalPart(TokenEnumerator enumerator, out string localPart) { if (TryMake(enumerator, TryMakeDotString, out localPart)) { return true; } return false; }
c#
8
0.545817
78
30.5
8
/// <summary> /// Try to make the local part of the path. /// </summary> /// <param name="enumerator">The enumerator to make the local part from.</param> /// <param name="localPart">The local part that was made, or undefined if it was not made.</param> /// <returns>true if the local part was made, false if not.</returns> /// <remarks><![CDATA[Dot-string / Quoted-string]]></remarks>
function
def emcc_binary(name, srcs = [], linkopts = [], html_shell = None, **kwargs): includehtml = False linkopts = list(linkopts) srcs = list(srcs) if name.endswith(".html"): basename = name[:-5] includehtml = True elif name.endswith(".js"): basename = name[:-3] outputs = [] outputs.append(basename + ".js") outputs.append(basename + ".wasm") if includehtml and not html_shell: outputs.append(basename + ".html") tarfile = name + ".tar" if html_shell: tarfile = basename + ".js.tar" native.cc_binary( name = tarfile, srcs = srcs, linkopts = linkopts, restricted_to = ["//tools:web"], **kwargs ) emcc_expand_files( name = basename, html_shell = html_shell, tarfile = tarfile, restricted_to = ["//tools:web"], )
python
11
0.547945
77
28.233333
30
Produces a deployable set of WebAssembly files. Depending on the settings, the exact format of the output varies. The output will be a a .js, .wasm, and optional .html file, all sharing the same basename. The .js file is the script that should be included in any webpage, and will handle calling the code compiled to the .wasm file. The optional .html file uses some existing template html file and adds the necessary <script> statement to import the .js script. This html file will be generated if the name of the rule ends with ".html"; if the html_shell argument is specified, then the provided html file is used to generate the output html. The only change made to the template html is to replace any instances of "{{{ SCRIPT }}}" with the appropriate <script> tags. This is consistent with how the "--shell-file" flag works in emscripten. However, we can't use the builtin flag with the script in its current form, because that would require making an html file an input to a cc_library rule, which bazel gets obnoxious about. This macro also defines a rule with a name equal to the basename of the name argument (e.g., if name = "foo.html", basename = "foo"). This rule is the rule that actually outputs the required files. Internally, this rule works by: 1) Generating a tarball that contains the .js and .wasm files, using a cc_binary that calls the emscripten compiler. 2) Extracting said tarball. 3) [if necessary] Generating the output html from the html shell template.
function
def find_max_patterns(patterns): max_pattern = None max_value = -1 for p in patterns: if max_pattern is None or max_value < patterns[p]: max_pattern = p max_value = patterns[p] return max_pattern
python
10
0.592593
58
29.5
8
This function simply returns the class value with the highest count of patterns, and is used in conjunction with get_num_patterns
function
def construct_kernel(ifg_dates, master_date): slc_dates = sorted(list(set([date for pairs in ifg_dates for date in pairs]))) date_lookup = {date: idx for (idx, date) in enumerate(slc_dates)} kernel = np.zeros((len(ifg_dates), len(slc_dates))) for idx, (ifg_master, ifg_slave) in enumerate(ifg_dates): kernel[idx, date_lookup[ifg_master]] = -1 kernel[idx, date_lookup[ifg_slave]] = 1 kernel[:, date_lookup[master_date]] = 0 return kernel
python
13
0.654088
82
52.111111
9
Constructs the kernel matrix for an unwrapped phase inverse problem. Arguments --------- ifg_dates : list(2-tuple(date)) A list of 2-tuples containing the (master, slave) dates for each interferogram used in the inversion. master_date : date The date of the master interferogram. The kernel will be constructed so the inversion produces results relative to it. Returns ------- A 2D `ndarray`.
function
private void createRandomGroupsForRoster( String providerID, int unit ) { String groupTitle = rosterGroupSplit ? rosterGroupTitleGroup: rosterGroupTitleUser; if( StringUtils.isNotBlank( groupTitle ) && StringUtils.isNotBlank( providerID ) ) { Set<String> userSet = new HashSet<>(); List<AuthzGroup> realms = authzGroupService.getAuthzGroups( providerID, null ); for( AuthzGroup realm : realms ) { if( providerID.equals( realm.getProviderGroupId() ) ) { Set<Member> members = realm.getMembers(); for( Member member : members ) { userSet.add( member.getUserId() ); } } } createRandomGroups( rosterGroupSplit, new ArrayList<>( userSet ), groupTitle, unit ); } }
java
16
0.541215
97
42.952381
21
/** * SAK-29373 - Create random groups of users from the given roster. * This could be random number of users in specified number of groups, * or vice versa. * * @param providerID the provider ID selected * @param unit the number of groups or users per group requested */
function
def configuration(request): form = make_config_form() if request.method == 'POST': if request.form.get('enable_editor'): request.session['ace_on'] = True elif request.form.get('disable_editor'): request.session['ace_on'] = False elif form.validate(request.form): form.apply() flash(_(u'Configuration updated successfully.'), 'configure') return redirect_to('admin/configuration') else: flash(_(u'Could not save the configuration because the ' u'configuration is invalid.'), 'error') return render_admin_response('admin/configuration.html', 'options.configuration', form=form.as_widget(), editor_enabled= request.session.get('ace_on', False))
python
15
0.550228
73
47.722222
18
Advanced configuration editor. This is useful for development or if a plugin doesn't ship an editor for the configuration values. Because all the values are not further checked it could easily be that Zine is left in an unusable state if a variable is set to something bad. Because of this the editor shows a warning and must be enabled by hand.
function
def network_country(mcc, mnc): try: c = networks[mcc] if isinstance(c, str): return c return c[mnc] except: raise InvalidNetwork('Invalid MCC {} MNC {}'.format(mcc, mnc))
python
12
0.545045
70
26.875
8
Get the country matching the MCC and MNC. In a few edgecases the MCC is not sufficient to identify the country, since some countries share MCC. However it's not often the case, so you could just specify MCC Returns an ISO-3166-1 alpha-2 code.
function
void init() { if (!initialized.getAndSet(true)) { if (scheduler != null) { Runnable updater = () -> { try { processAccountJsonFile(); } catch (Exception e) { logger.error("Exception occurred when loading JSON account data", e); accountServiceMetrics.fetchRemoteAccountErrorCount.inc(); } }; int initialDelay = new Random().nextInt(accountConfig.updaterPollingIntervalMs + 1); scheduler.scheduleAtFixedRate(updater, initialDelay, accountConfig.updaterPollingIntervalMs, TimeUnit.MILLISECONDS); logger.info( "Background account updater will fetch accounts from remote starting {} ms from now and repeat with interval={} ms", initialDelay, accountConfig.updaterPollingIntervalMs); } else { processAccountJsonFile(); } } }
java
18
0.625
128
39.772727
22
/** * Initializes this instance. Does all work which requires handing out references to this instance itself which can't * be done during normal instance construction without letting a reference to an incomplete constructed instance escape. */
function
public static boolean checkError() { boolean found = false; int error_code = GLES20.GL_NO_ERROR; do { /* If more than one flag has recorded an error, glGetError returns and clears an arbitrary error flag value. Thus, glGetError should always be called in a loop, until it returns GL_NO_ERROR, if all error flags are to be reset. */ error_code = GLES20.glGetError(); if(error_code != GLES20.GL_NO_ERROR) { found = true; Log.e(TAG, "Error: " + GLU.gluErrorString(error_code)); } } while(error_code != GLES20.GL_NO_ERROR); return found; }
java
13
0.67474
88
27.95
20
/** * As of version 4.2 Android offers an option called "Enable OpenGL traces" in the phone's * developer options. If you set this to "Call stack on glGetError" you'll see error logs if * OpenGL error happens. * * @return true if found, otherwise false. */
function
public boolean tryUpdate(String configVersionPath, String latestVersion) { this.updateAble = true; this.version = getConfig().getString(configVersionPath, "0.0"); if (!version.equals(latestVersion)) { try { updateComments(); getConfig().set(configVersionPath, latestVersion); saveConfig(registeredMemory != null); this.version = latestVersion; } catch (IOException e) { e.printStackTrace(); return false; } return true; } return false; }
java
11
0.5376
74
35.823529
17
/** * Mark the config as UpdateAble config * * @param configVersionPath The config version yaml path * @param latestVersion The latest version of the config * @return true if successfully updated, false otherwise */
function
public class HumanReadable { public static Map operatorDisplayMap = new HashMap(); public static Map ceDisplayMap = new HashMap(); public static Map actionDisplayMap = new HashMap(); public static final String[] CONDITIONAL_ELEMENTS = new String[]{"not", "exists", "or"}; static { operatorDisplayMap.put( "==", "is equal to" ); operatorDisplayMap.put( "!=", "is not equal to" ); operatorDisplayMap.put( "<", "is less than" ); operatorDisplayMap.put( "<=", "less than or equal to" ); operatorDisplayMap.put( ">", "greater than" ); operatorDisplayMap.put( ">=", "greater than or equal to" ); operatorDisplayMap.put( "soundslike", "sounds like" ); operatorDisplayMap.put( "|| ==", "or equal to" ); operatorDisplayMap.put( "|| !=", "or not equal to" ); operatorDisplayMap.put( "&& !=", "and not equal to" ); operatorDisplayMap.put( "&& >", "and greater than" ); operatorDisplayMap.put( "&& <", "and less than" ); operatorDisplayMap.put( "|| >", "or greater than" ); operatorDisplayMap.put( "|| <", "or less than" ); operatorDisplayMap.put( "|| >=", "or greater than (or equal to)" ); operatorDisplayMap.put( "|| <=", "or less than (or equal to)" ); operatorDisplayMap.put( "&& >=", "and greater than (or equal to)" ); operatorDisplayMap.put( "&& <=", "or less than (or equal to)" ); operatorDisplayMap.put( "&& contains", "and contains" ); operatorDisplayMap.put( "|| contains", "or contains" ); operatorDisplayMap.put( "&& matches", "and matches" ); operatorDisplayMap.put( "|| matches", "or matches" ); operatorDisplayMap.put( "|| excludes", "or excludes" ); operatorDisplayMap.put( "&& excludes", "and excludes" ); ceDisplayMap.put( "not", "There is no" ); ceDisplayMap.put( "exists", "There exists" ); ceDisplayMap.put( "or", "Any of" ); actionDisplayMap.put( "assert", "Insert" ); actionDisplayMap.put( "assertLogical", "Logically insert" ); actionDisplayMap.put( "retract", "Retract" ); actionDisplayMap.put( "set", "Set" ); actionDisplayMap.put( "modify", "Modify" ); } public static String getActionDisplayName(String action) { return lookup( action, actionDisplayMap ); } public static String getOperatorDisplayName(String op) { return lookup( op, operatorDisplayMap ); } public static String getCEDisplayName(String ce) { return lookup( ce, ceDisplayMap ); } private static String lookup(String ce, Map map) { if ( map.containsKey( ce ) ) { return (String) map.get( ce ); } else { return ce; } } /** * get operator by its display name * * @param op * operator display name * @return operator */ public static String getOperatorName(String op) { Set keys = operatorDisplayMap.keySet(); for ( Iterator iter = keys.iterator(); iter.hasNext(); ) { String key = (String) iter.next(); if ( op.equals( operatorDisplayMap.get( key ) ) ) { return key; } } throw new RuntimeException( "No operator display name '" + op + "' was found." ); } }
java
13
0.435606
92
35.201613
124
/** * This contains some simple mappings between operators, conditional elements * and the human readable equivalent. * * Yes, I am making the presumption that programmers are not human, but I think * they (we) are cool with that. * * @author Michael Neale */
class
@PostMapping(value = "/pin") @PinRateLimit @ApiOperation(value = "Generate a verification PIN and send it to the given phone number") @ApiResponses(value = { @ApiResponse(code = 201, message = "PIN created and request sent to the SMS service"), @ApiResponse(code = 429, message = SwaggerConstants.RATE_LIMIT_EXCEEDED_MESSAGE, response = ErrorDTO.class) }) @ApiImplicitParams({ @ApiImplicitParam(name = "clientId", value = "Client ID", paramType = "header", example = SwaggerConstants.CLIENT_ID, required = true) }) @ResponseStatus(HttpStatus.CREATED) public void createPin(final @Valid @RequestBody PinGenerationDTO pinGenerationDTO) { log.info("POST request for /pin. Phone number: {}", pinGenerationDTO.getPhoneNumber()); pinService.generatePin(pinGenerationDTO.getPhoneNumber()); }
java
11
0.692661
146
57.2
15
/** * POST /pin endpoint. Submits a PIN generation request for a passed phone number. * Rate limit defined by the {@link PinRateLimit} annotation. * * @param pinGenerationDTO the phone number to generate a PIN for */
function
func TestDescriptionLength(t *testing.T) { var errs []error for _, g := range cfg.Groups { description := g.Description len := utf8.RuneCountInString(description) if len > 300 { errs = append(errs, fmt.Errorf("Number of characters in description \"%s\" for group name \"%s\" "+ "should not exceed 300; is: %d", description, g.Name, len)) } } if len(errs) > 0 { for _, err := range errs { t.Error(err) } } }
go
15
0.621005
83
24.823529
17
// TestDescriptionLength tests that the number of characters in the // google groups description does not exceed 300. // // This validation is needed because gcloud allows apps:description // with length no greater than 300
function
def isosteric_enthalpy_raw(pressures, temperatures): if len(pressures[0]) != len(temperatures): raise ParameterError( "There are a different number of pressure points than temperature points") pressures = numpy.asarray(pressures) temperatures = numpy.asarray(temperatures) inv_t = 1 / temperatures iso_enth = [] slopes = [] correlations = [] for pressure in pressures: slope, intercept, corr_coef, p, stderr = stats.linregress( inv_t, numpy.log(pressure)) iso_enth.append(-const.gas_constant * slope / 1000) slopes.append(slope) correlations.append(corr_coef) return iso_enth, slopes, correlations
python
12
0.659971
86
40.058824
17
Calculate the isosteric enthalpy of adsorption using several isotherms recorded at different temperatures on the same material. This is a 'bare-bones' function to calculate isosteric enthalpy which is designed as a low-level alternative to the main function. Designed for advanced use, its parameters have to be manually specified. Parameters ---------- pressure : array of arrays A two dimensional array of pressures for each isotherm, in bar. For example, if using two isotherms to calculate the isosteric enthalpy:: [[l1_iso1, l1_iso2], [l2_iso1, l2_iso2], [l3_iso1, l3_iso2], ...] temperatures : array Temperatures of the isotherms are taken, kelvin. Returns ------- iso_enth : array Calculated isosteric enthalpy. slopes : array Slopes fitted for each point. correlations : array The correlation of the straight line of each fit.
function
public static PlayerReinforcement createPlayerReinforcement(Player player, Group g, Block block, ReinforcementType type, ItemStack reinfMat) { if (player == null || g == null || block == null || type == null) { Citadel.getInstance().getLogger().log(Level.WARNING, "Utility createPlayerReinforcement called with null: {0},{1},{2},{3}", new Object[] {player, g, block, type}); return null; } if (g.isDisciplined()) { player.sendMessage(ChatColor.RED + "This group is disciplined."); if (CitadelConfigManager.shouldLogInternal()) { Citadel.getInstance().getLogger().log(Level.WARNING, "Request to create reinforcement for disciplined group " + g.getName()); } return null; } if (NonReinforceableType.isNonReinforceable(block.getType())){ player.sendMessage(ChatColor.RED + "That block cannot be reinforced."); if (CitadelConfigManager.shouldLogInternal()) { Citadel.getInstance().getLogger().log(Level.WARNING, "Request to create reinforcement for unreinforceable block " + block.getType()); } return null; } if (!NameAPI.getGroupManager().hasAccess(g.getName(), player.getUniqueId(), PermissionType.getPermission("REINFORCE"))) { player.sendMessage(ChatColor.RED + "You don't have permission to reinforce on this group"); return null; } final PlayerInventory inv = player.getInventory(); final int invSize = inv.getSize(); final ItemStack itemType = type.getItemStack(); List<Integer> slots = new ArrayList<Integer>(type.getRequiredAmount()); int requirementscheck = type.getRequiredAmount(); if (requirementscheck <= 0) { Citadel.Log("Reinforcement requirements too low for " + itemType.getType().name()); return null; } if (reinfMat != null && itemType.isSimilar(reinfMat)){ requirementscheck++; } int requirements = requirementscheck; boolean consumeOffhand = false; try { ItemStack offSlot = inv.getItemInOffHand(); if (offSlot != null && offSlot.isSimilar(itemType)) { requirementscheck -= offSlot.getAmount(); consumeOffhand = true; } for (int slot = 0; slot < invSize && requirements > 0; ++slot) { final ItemStack slotItem = inv.getItem(slot); if (slotItem == null) { continue; } if (!slotItem.isSimilar(itemType)) { continue; } requirementscheck -= slotItem.getAmount(); slots.add(slot); } } catch (Exception ex) { } if (requirementscheck > 0) { return null; } PlayerReinforcement rein = new PlayerReinforcement(block.getLocation(), type.getHitPoints(), getIntFormofMaturation(System.currentTimeMillis(),type.getItemStack()), getIntFormofAcidMaturation(System.currentTimeMillis(),type.getItemStack()), g, type.getItemStack()); ReinforcementCreationEvent event = new ReinforcementCreationEvent(rein, block, player); Bukkit.getPluginManager().callEvent(event); if (event.isCancelled()) { throw new ReinforcemnetFortificationCancelException(); } if (CitadelConfigManager.shouldLogReinforcement()) { StringBuffer slb = new StringBuffer(); if (player != null) { slb.append("Player ").append(player.getName()).append(" [").append(player.getUniqueId()) .append("]"); } slb.append("reinforced a ").append(block.getType()).append(" with a ") .append(rein.getMaterial()).append(" reinforcement at ") .append(rein.getLocation()); Citadel.Log(slb.toString()); } if (type.getMaterial().isBlock()){ if (slots.size()>1){ if (inv.getItemInHand().isSimilar(itemType) && PlayerState.get(player).getMode() == ReinforcementMode.REINFORCEMENT_FORTIFICATION && slots.get(0) != inv.getHeldItemSlot()){ requirements--; } } } if (consumeOffhand) { ItemStack is = inv.getItemInOffHand(); int stackSize = is.getAmount(); int deduction = Math.min(stackSize, requirements); if (deduction < stackSize) { is.setAmount(stackSize - deduction); } else { inv.setItemInOffHand(null); } requirements -= deduction; } for (final int slot : slots) { if (requirements <= 0) { break; } final ItemStack slotItem = inv.getItem(slot); final int stackSize = slotItem.getAmount(); final int deduction = Math.min(stackSize, requirements); if (deduction < stackSize) { slotItem.setAmount(stackSize - deduction); } else { inv.clear(slot); } requirements -= deduction; } if (requirements != 0) { Citadel.Log(String.format( "Reinforcement material out of sync %d vs %d", requirements, type.getRequiredAmount())); } player.updateInventory(); rm.saveInitialReinforcement(rein); playReinforcementEffect(rein); return rein; }
java
15
0.579414
188
43.232
125
/** * Creates a PlayerReinforcement or returns null if if player doesn't have * the required requirements. * @param The Player who created the reinforcement. * @param The Group this reinforcement belongs too. * @param The Block this reinforcement is occurring on. * @param The ReinforcementType that is being reinforced on the block. * @param The ItemStack type of the block being placed (if CTF, null if CTR) * @return The PlayerReinforcement that comes from these parameters or null if certain checks failed. * @throws ReinforcemnetFortificationCancelException */
function
public static RocksPageStore open(RocksPageStoreOptions pageStoreOptions) throws IOException { Preconditions.checkArgument(pageStoreOptions.getMaxPageSize() > 0); RocksDB.loadLibrary(); Options rocksOptions = new Options() .setCreateIfMissing(true) .setWriteBufferSize(pageStoreOptions.getWriteBufferSize()) .setCompressionType(pageStoreOptions.getCompressionType()); RocksDB db = null; try { db = RocksDB.open(rocksOptions, pageStoreOptions.getRootDir()); byte[] confData = db.get(CONF_KEY); Cache.PRocksPageStoreOptions pOptions = pageStoreOptions.toProto(); if (confData != null) { Cache.PRocksPageStoreOptions persistedOptions = Cache.PRocksPageStoreOptions.parseFrom(confData); if (!persistedOptions.equals(pOptions)) { db.close(); throw new IOException("Inconsistent configuration for RocksPageStore"); } } db.put(CONF_KEY, pOptions.toByteArray()); } catch (RocksDBException e) { if (db != null) { db.close(); } rocksOptions.close(); throw new IOException("Couldn't open rocksDB database", e); } return new RocksPageStore(pageStoreOptions, db, rocksOptions); }
java
13
0.679487
94
40.633333
30
/** * @param pageStoreOptions options for the rocks page store * @return a new instance of {@link PageStore} backed by RocksDB * @throws IOException if I/O error happens */
function
public final boolean isCompatibleWith(AbstractSchemaItem that) { if (that == null) throw new IllegalArgumentException("null that"); if (this.storageId != that.storageId) return false; if (this.getClass() != that.getClass()) return false; return this.isCompatibleWithInternal(that); }
java
8
0.619718
64
38.555556
9
/** * Determine whether this instance is compatible with the given instance for use with the core API. * Two instances are compatible if they are identical in all respects except for object and field names * (to also include object and field names in the comparison, use {@link #equals equals()}). * The core API uses storage IDs, not names, to identify objects and fields. * * @param that other schema object * @return true if this and {@code that} are compatible * @throws IllegalArgumentException if {@code that} is null */
function
public void Decrypt(string filePath, string saveDir = "", bool deleteOriginal = false) { FileStream targetFile = OpenTargetFile(filePath); FileStream tempFile = CreateTempFile(); var writer = new BinaryWriter(tempFile); var reader = new BinaryReader(targetFile); if (reader.ReadInt32() != BitConverter.ToInt32(ToByte(_secureFileType), 0)) { throw new Exception("Not SECF FileType!"); } string encryptionMethod = ToString(reader.ReadBytes(3)); byte[] secureHeader = reader.ReadBytes(_secureHeaderSize); _cypher.Decrypt(ref secureHeader, 0); long fileLength = BitConverter.ToInt64(secureHeader, 0); string fileName = FromFixSizedByte(secureHeader, 8); string fileExtension = FromFixSizedByte(secureHeader, 48); string validation = ToString(secureHeader, 64); if (validation != _decryptionValidation) { throw new Exception("Incorrect Key!"); } TimeSpan totalTime = new TimeSpan(); var stopWatch = new Stopwatch(); stopWatch.Start(); while (targetFile.Length - targetFile.Position > 1) { byte[] state = reader.ReadBytes((int)Math.Min(targetFile.Length - targetFile.Position, _chunkSize)); _cypher.Decrypt(ref state, 0); writer.Write(state); totalTime = totalTime.Add(stopWatch.Elapsed); OnChunkUpdate(targetFile.Position, targetFile.Length, state.Length, totalTime, stopWatch.Elapsed, ChunkEventArgs.ProcessType.Decryption); stopWatch.Restart(); } tempFile.SetLength(fileLength); stopWatch.Stop(); OnProcessCompleted(targetFile.Length, totalTime, ChunkEventArgs.ProcessType.Decryption); Save(Path.Combine(saveDir, fileName + fileExtension), tempFile); DeleteTempFile(tempFile); CloseTargetFile(targetFile, deleteOriginal); }
c#
19
0.597008
153
52.5
40
/// <summary> /// Decrypts the file, which was provided to this instance. /// </summary> /// <param name="filePath">Path to the file</param> /// <param name="saveDir">Directory, where the decrypted file is saved</param> /// <param name="deleteOriginal">Determines if the file of filePath is deleted</param>
function
def sparse_balanced_crossentropy(logits, labels): epsilon = tf.constant(np.finfo(np.float32).tiny) num_classes = tf.cast(tf.shape(logits)[-1], tf.int32) probs = tf.nn.softmax(logits) probs += tf.cast(tf.less(probs, epsilon), tf.float32) * epsilon log = -1. * tf.log(probs) onehot_labels = tf.one_hot(labels, num_classes) class_frequencies = tf.stop_gradient(tf.bincount( labels, minlength=num_classes, dtype=tf.float32)) weights = (1. / (class_frequencies + tf.constant(1e-8))) weights *= (tf.cast(tf.reduce_prod(tf.shape(labels)), tf.float32) / tf.cast(num_classes, tf.float32)) new_shape = (([1, ] * len(labels.get_shape().as_list())) + [logits.get_shape().as_list()[-1]]) weights = tf.reshape(weights, new_shape) loss = tf.reduce_mean(tf.reduce_sum(onehot_labels * log * weights, axis=-1)) return loss
python
15
0.657407
105
56.666667
15
Calculates a class frequency balanced crossentropy loss from sparse labels. Args: logits (tf.Tensor): logits prediction for which to calculate crossentropy error labels (tf.Tensor): sparse labels used for crossentropy error calculation Returns: tf.Tensor: Tensor scalar representing the mean loss
function
public static void loadXmlClassLoaderResource(Class clazz, String path) throws Exception { if (path.indexOf('/') < 0) { loadXmlPackageResource(clazz, path); } else { loadXmlClassLoaderResource(clazz.getClassLoader(), path); } }
java
10
0.62724
90
39
7
/** * Loads the XML resource from the classloader, from the package of the specified class * if the class appears relative, or from the root of the classloader if it contains a slash * @param clazz the class whose package should be used to qualify the path * @param path the package-relative path of the XML resource * @throws Exception */
function
function formatDisplay(seconds, hideZero = false) { const format = (val) => `0${Math.floor(val)}`.slice(-2); const s = Math.abs(seconds); const hours = Math.floor((s / 3600) % 24); const minutes = Math.floor((s % 3600) / 60); if (hideZero && hours < 1) return [minutes, s % 60].map(format).join(':'); else return [hours, minutes, s % 60].map(format).join(':'); }
javascript
12
0.617647
76
45.875
8
/** * another go at simpler string formatting (counters) * @description Converts seconds to string representing time * @param {number} seconds - time in seconds * @param {boolean} [hideZero] - whether to show hours in case its 00 * @returns {string} String representing absolute time 00:12:02 */
function
public class MutableMessage<T> implements Message<T>, Serializable { private static final long serialVersionUID = -636635024258737500L; private final T payload; private final MutableMessageHeaders headers; public MutableMessage(T payload) { this(payload, (Map<String, Object>) null); } public MutableMessage(T payload, @Nullable Map<String, Object> headers) { this(payload, new MutableMessageHeaders(headers)); } protected MutableMessage(T payload, MutableMessageHeaders headers) { Assert.notNull(payload, "payload must not be null"); Assert.notNull(headers, "headers must not be null"); this.payload = payload; this.headers = headers; } @Override public MutableMessageHeaders getHeaders() { return this.headers; } @Override public T getPayload() { return this.payload; } Map<String, Object> getRawHeaders() { return this.headers.getRawHeaders(); } @Override public String toString() { StringBuilder sb = new StringBuilder(getClass().getSimpleName()); sb.append(" [payload="); if (this.payload instanceof byte[]) { sb.append("byte[").append(((byte[]) this.payload).length).append("]"); } else { sb.append(this.payload); } sb.append(", headers=").append(this.headers).append("]"); return sb.toString(); } @Override public int hashCode() { return this.headers.hashCode() * 23 + ObjectUtils.nullSafeHashCode(this.payload); // NOSONAR } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof MutableMessage<?>) { MutableMessage<?> other = (MutableMessage<?>) obj; UUID thisId = this.headers.getId(); UUID otherId = other.headers.getId(); return (ObjectUtils.nullSafeEquals(thisId, otherId) && this.headers.equals(other.headers) && this.payload.equals(other.payload)); } return false; } }
java
16
0.705114
94
24.541667
72
/** * An implementation of {@link Message} with a generic payload. Unlike * {@link org.springframework.messaging.support.GenericMessage}, * this message (or its headers) can be modified after creation. * Great care must be taken, when mutating messages, that some other element/thread is not * concurrently using the message. Also note that any in-memory stores (such as * {@link org.springframework.integration.store.SimpleMessageStore}) * may have a reference to the message and changes will be * reflected there too. * * <p> * <b>IMPORTANT: Mutable messages may share state (such as message headers); such messages * should never be exposed to other components or undesirable side-effects may result.</b> * <p> * <b>It is generally recommended that messages transferred between components should * always be immutable unless great care is taken with their use.</b> * * @author Gary Russell * @author Artem Bilan * @author Stuart Williams * @author David Turanski * * @since 4.0 * */
class
class AutoEncoder: """ CNN AutoEncoder Build Deep Convolutional AutoEncoder The user can defined the depth of the Network, The code latant dimension, learning rate, number of filters and kernels size """ def __init__(self, source, kernels, filters, latant_dim=2, epochs=100, lr=1e-3): """ initilize the AutoEncoder Object :param source: tuple of two ints, the shape of given input :param kernels: list of D ints, the kernel sizes (when D referred to Network depth) :param filters: int, number of filters to product each layer :param latant_dim: int, the encoded latant dimension (default: 2) :param epochs: int, number of epoch for training (default: 100) :param lr: float, learning rate (default: .001) """ self.latant_dim = latant_dim self.kernels = [(k, k) for k in kernels] self.filters = filters self.source = (*source,) + (1,) self.epochs = epochs self.lr = lr self.loss = [] self.model = None self.reshp = self.reshape_dim() self.encoder = self.build_encoder() self.decoder = self.build_decoder() def reshape_dim(self): """ calculate the dimension of the last encoded layer :return: last layer shape """ shape = self.source for k, _ in self.kernels: klen = k - 1 shape = (int((shape[0]-klen)/POOL), int((shape[1]-klen)/POOL), shape[2]*self.filters) return shape def build_encoder(self): """ build Deep Convolutional Encoder In each step we add also conv layer and maxpooling layer In the Last step, we also add flatten & dense layer to convert the tensor into single representation vector :return: Encoder (with untrained weights) """ layers = [] depth = 1 for kernel in self.kernels: depth *= self.filters layers.append(conv(depth, kernel, activation=RELU)) layers.append(maxpool()) layers += [flatten(), dense(self.latant_dim, activation=RELU)] return Sequential(layers) def build_decoder(self): """ build Deep Convolutional Decoder In each step we add also transope conv layer and unsampling layer in the first step we add dense & reshape layers to convert the representation vector into tensor :return: Decoder (with untrained weights) """ layers = [source(shape=(1, self.latant_dim)), dense(np.prod(self.reshp), activation=RELU), reshape(target_shape=self.reshp)] depth = self.reshp[-1] for kernel in self.kernels[::-1]: depth /= self.filters layers.append(upsample()) layers.append(tconv(depth, kernel, activation=RELU)) return Sequential(layers) def fit(self, x, y): """ fitting the data through encoding-decoding proccess :param x: input matrices to encoding :param y: requied output from the network """ start = source(shape=self.source) model = Model(inputs=start, outputs=self.decoder(self.encoder(start))) model.compile(optimizer=adam(self.lr), loss=MSE) model.fit(x, y, epochs=self.epochs, shuffle=True) self.model = model self.loss += model.history.history[LOSS] return self def predict(self, x): """ predict the outputs of the network from given x :param x: given input for prediction :return: prediction """ return self.model.predict(x) def encode(self, x): """ encoding input x :param x: input x for encoding :return: encoded x in latant dimension """ return self.encoder(x) def decode(self, v): """ decoding input vector :param v: input vector for decoding from latant dimension :return: decoded matrix """ return self.decoder(v) def save(self,path): """ save the model parts as h5 files :param path: given file path to the saved model """ self.encoder.save(f'{path}\Encoder.h5') self.decoder.save(f'{path}\Decoder.h5') self.model.save(f'{path}\Autoencoder.h5') return self def load(self,path): """ load model parts h5 files from given path :param path: given file path to the saved model """ self.encoder = load_model(f'{path}\Encoder.h5') self.decoder = load_model(f'{path}\Decoder.h5') self.model = load_model(f'{path}\Autoencoder.h5') return self def plot_loss(self,size=10): """ plot the loss by the epochs """ plt.figure(figsize=(size, size*.6)) plt.title(f'Loss Value for AutoEncoder\nepoch number = {self.epochs}', fontsize=16) plt.plot(np.arange(len(self.loss)), self.loss, color='r', label=LOSS) plt.xlabel('number of epoch', fontsize=14) plt.ylabel('loss value', fontsize=14) plt.ylim(0, self.loss[0] * 1.05) plt.show()
python
16
0.572759
97
34.52381
147
CNN AutoEncoder Build Deep Convolutional AutoEncoder The user can defined the depth of the Network, The code latant dimension, learning rate, number of filters and kernels size
class
def report_to_dict(self, hls_config, output=False): hls_dir = hls_config.get_output_dir() if not os.path.exists(hls_dir): raise RuntimeError('Path {} does not exist. Exiting.'.format(hls_dir)) prj_dir = None top_func_name = None if os.path.isfile(hls_dir + '/build_prj.tcl'): prj_dir, top_func_name = self._parse_build_script(hls_dir + '/build_prj.tcl') if prj_dir is None or top_func_name is None: raise RuntimeError('Unable to read project data.') sln_dir = hls_dir + '/' + prj_dir if not os.path.exists(sln_dir): raise RuntimeError('Project {} does not exist. Make sure the project is built.'.format(prj_dir, hls_dir)) solutions = self._find_solutions(sln_dir) if len(solutions) > 1: print('WARNING: Found {} solution(s) in {}. Using the first solution.'.format(len(solutions), sln_dir)) report = {} sim_file = hls_dir + '/tb_data/csim_results.log' if os.path.isfile(sim_file): csim_results = [] with open(sim_file, 'r') as f: for line in f.readlines(): csim_results.append([float(r) for r in line.split()]) report['CSimResults'] = csim_results sim_file = hls_dir + '/tb_data/rtl_cosim_results.log' if os.path.isfile(sim_file): cosim_results = [] with open(sim_file, 'r') as f: for line in f.readlines(): cosim_results.append([float(r) for r in line.split()]) report['CosimResults'] = cosim_results syn_file = sln_dir + '/' + solutions[0] + '/syn/report/{}_csynth.xml'.format(top_func_name) if os.path.isfile(syn_file): root = ET.parse(syn_file).getroot() perf_node = root.find('./PerformanceEstimates') report['EstimatedClockPeriod'] = perf_node.find('./SummaryOfTimingAnalysis/EstimatedClockPeriod').text report['BestLatency'] = perf_node.find('./SummaryOfOverallLatency/Best-caseLatency').text report['WorstLatency'] = perf_node.find('./SummaryOfOverallLatency/Worst-caseLatency').text report['IntervalMin'] = perf_node.find('./SummaryOfOverallLatency/Interval-min').text report['IntervalMax'] = perf_node.find('./SummaryOfOverallLatency/Interval-max').text area_node = root.find('./AreaEstimates') report["Resources"] = {} report["AvailableResources"] = {} for child in area_node.find('./Resources'): report["Resources"][child.tag] = child.text for child in area_node.find('./AvailableResources'): report["AvailableResources"][child.tag] = child.text else: print('Synthesis report not found.') cosim_file = sln_dir + '/' + solutions[0] + '/sim/report/{}_cosim.rpt'.format(top_func_name) if os.path.isfile(cosim_file): with open(cosim_file, 'r') as f: for line in f.readlines(): if re.search('VHDL', line) or re.search('Verilog', line): result = line[1:].split() result = [res[:-1] if res[-1] == '|' else res for res in result] if result[1] == 'NA': continue else: report['CosimRTL'] = result[0] report['CosimStatus'] = result[1] report['CosimLatencyMin'] = result[2] report['CosimLatencyMax'] = result[4] report['CosimIntervalMin'] = result[5] report['CosimIntervalMax'] = result[7] if output: self.read_report(hls_dir) return report
python
18
0.543642
117
55.794118
68
Low level function to return the report as a dictionary. Users should generally not call this function directly but should use functions from the HLSModel. Args: dir (string): The directory where the project is found hls_config (HLSConfig): The project configuration output, optional: whether to pint a summary Returns: dict: the report dictionary Raises exceptions on errors
function
pub fn echo_syslog(enable: bool) -> Result<(), Error> { let state_ptr = unsafe { STATE }; if state_ptr.is_null() { return Err(Error::NeverInitialized); } let mut state = lock().map_err(|_| Error::Poisoned)?; state.syslog.enable(enable) }
rust
11
0.607547
57
32.25
8
/// Enables or disables echoing log messages to the syslog. /// /// The default behavior is **enabled**. /// /// If `enable` goes from `true` to `false`, the syslog connection is closed. The connection is /// reopened if `enable` is set to `true` after it became `false`. /// /// Returns an error if syslog was never initialized or the syslog connection failed to be /// established. /// /// # Arguments /// * `enable` - `true` to enable echoing to syslog, `false` to disable echoing to syslog.
function
@Override protected List< Number > calculateTickValues( final double length, final Object range ) { final List< Number > tickValues = new ArrayList<>(); if ( range != null ) { final Number lowerBound = ( ( Number[] ) range )[ 0 ]; final Number upperBound = ( ( Number[] ) range )[ 1 ]; final double lowerBoundLog10 = StrictMath.log10( lowerBound.doubleValue() ); final double upperBoundLog10 = StrictMath.log10( upperBound.doubleValue() ); for ( double i = lowerBoundLog10; i <= upperBoundLog10; i += 1.0d ) { for ( int j = 1; j <= 10; j++ ) { final double tickValue = j * StrictMath.pow( 10.0d, i ); tickValues.add( tickValue ); } } } return tickValues; }
java
14
0.536905
93
48.470588
17
/** * {@inheritDoc} * <p> * This method is used to calculate a list of all the data values for each * tick mark in range, represented by the second parameter. The formula is * the same as the one for the minor tick marks, but here we want to display * one tick for each power of 10. * <p> * NOTE: This calculates one tick value for every power of 10. */
function
def WADFlags(flags): class FlagsUnion(ctypes.Union): class Flags(ctypes.LittleEndianStructure): _fields_ = [(name, ctypes.c_uint16, size) for (name, size) in flags] _fields_ = [("flags", ctypes.c_uint16), ("_flags", Flags)] _anonymous_ = ("_flags",) return FlagsUnion
python
12
0.604502
80
43.571429
7
This is a helper function to generate flags which can be accessed either individually or as an entire 16-bit field. The flags argument is a list of (name, size) tuples, where size is in bits. See omg.mapedit for usage examples.
function
public class SentryEntityTargetSorter implements Comparator<Entity> { public final Pos center; public SentryEntityTargetSorter(Pos pos) { center = pos; } @Override public int compare(Entity entity, Entity entity2) { int distanceA = (int) Math.floor(center.distance(entity)); int distanceB = (int) Math.floor(center.distance(entity2)); int distanceCompare = Integer.compare(distanceA, distanceB); if (distanceCompare == 0) { float hpA = entity instanceof EntityLivingBase ? ((EntityLivingBase) entity).getHealth() : 0; float hpB = entity2 instanceof EntityLivingBase ? ((EntityLivingBase) entity2).getHealth() : 0; int hpCompare = Float.compare(hpA, hpB); if (hpCompare == 0) { int armorA = entity instanceof EntityLivingBase ? ((EntityLivingBase) entity).getTotalArmorValue() : 0; int armorB = entity2 instanceof EntityLivingBase ? ((EntityLivingBase) entity2).getTotalArmorValue() : 0; int armorCompare = Integer.compare(armorA, armorB); //TODO prioritise entity types (creeper > zombie) return armorCompare; } return hpCompare; } return distanceCompare; } }
java
15
0.619985
121
39.060606
33
/** * @see <a href="https://github.com/BuiltBrokenModding/VoltzEngine/blob/development/license.md">License</a> for what you can and can't do with the code. * Created by Dark(DarkGuardsman, Robert) on 4/9/2017. */
class
function checkConditionForNull(flagged, domainName, values, done) { logger.debug('iam in Null check'); let flag = flagged; if (values.length > 0) { if (domainName) { values.forEach((data) => { if (data.username && data.role) { flag += 1; } else { flag += 0; } }); logger.debug('flag', flag); done(null, flag); } else { done({ error: 'URI parameter cannot be empty.....' }); } } else { done({ error: 'Body data cannot be empty' }); } }
javascript
17
0.520599
67
24.47619
21
/* *POST and UPDATE Method *Condition check for null value of username and role for add and modify member details */
function
def remove_object(image, mask): assert image.shape[:2] == mask.shape out = np.copy(image) out = np.transpose(out, (1, 0, 2)) mask = np.transpose(mask) H, W, _ = out.shape mask = mask*1 idx = np.where(mask == 1) W_new = W - (np.amax(idx[1])-np.amin(idx[1])) for i in range(W - W_new): energy = energy_function(out) energy[idx[0],idx[1]] = -999 vcost, vpaths = compute_forward_cost(out,energy) end = np.argmin(vcost[-1]) seam_energy = vcost[-1, end] seam = backtrack_seam(vpaths, end) out = remove_seam(out,seam) mask = remove_seam(mask,seam) idx = np.where(mask == 1) print(out.shape) out = enlarge(out, W) out = np.transpose(out, (1, 0, 2)) print(out.shape,image.shape) assert out.shape == image.shape return out
python
11
0.56568
57
32.84
25
Remove the object present in the mask. Returns an output image with same shape as the input image, but without the object in the mask. Args: image: numpy array of shape (H, W, 3) mask: numpy boolean array of shape (H, W) Returns: out: numpy array of shape (H, W, 3)
function
@Slf4j public class HeartbeatController { /** Timer that handles the heartbeat tasks */ private final Timer timer; /** The task that does the periodic heartbeat send */ private final SendHeartbeatTask sendHeartbeatTask; /** * Create a new controller and launch the tasks * * @param timer vega context * @param topicName name of the topic the controller belongs to * @param sender instance that can send heartbeats * @param listener user listener for heartbeats events * @param parameters user parameters for the heartbeat mechanism */ public HeartbeatController(final Timer timer, final String topicName, final IHeartbeatSender sender, final IClientConnectionListener listener, final HeartbeatParameters parameters) { log.info("Activate sending Heartbeats to Topic [{}] with parameters [{}]" , topicName, parameters); // Store the timer this.timer = timer; // Start the heartbeats this.sendHeartbeatTask = new SendHeartbeatTask(topicName, parameters, sender, listener); this.timer.schedule(this.sendHeartbeatTask, 0, parameters.getHeartbeatRate()); } /** * Stop the heartbeat sending */ public void stop() { // Stop the tasks this.sendHeartbeatTask.cancel(); // Purge the timer this.timer.purge(); } }
java
10
0.731099
181
30.317073
41
/** * This class contains the CheckClientConnectedTask and HeartbeatTask in order to control * all the events related to heartbeats. It handle the start / stop of the different tasks * * @author avh * */
class
public void jar(final File path) throws IOException { final Manifest manifest = createManifest(); final FileOutputStream fos = new FileOutputStream(path); final JarOutputStream jos = new JarOutputStream(fos, manifest); for (ClassFile file : classes) { writeClassFile(jos, file); } jos.close(); }
java
8
0.597911
71
31
12
/** * This method writes this compiled program to a specified JAR file. * * <p> * If the JAR already exists, then it will be overwritten. * If the JAR does not exist, then it will be created. * </p> * * @param path is the path to the new jar file. */
function
def optimal(self, n_electrons_up, n_electrons_down, transform='bravyi-kitaev'): try: return self._precalc[(n_electrons_up, n_electrons_down)] except KeyError: pass warnings.warn("No pre-calculated initial for configuration (%d, %d)" %\ (n_electrons_up, n_electrons_down)) return None
python
8
0.590028
79
44.25
8
\ Lookup the pre-calculated optimal paramters Args: n_electrons_up(int): number of electrons with spin-up n_electrons_down(int): number of electrons with spin-down transform(str): for which fermion transform the initial applies Returns: optimum(tuple): array of parameters for the global minimum or None
function
@Expose public static String replaceSupplementary(final String value) { return InterpolationHelper.replaceSupplementary(value, new Replacer() { public String replace(String in) { StringBuilder uc = new StringBuilder(); for (char c : in.toCharArray()) { uc.append("\\\\u"); uc.append(Integer.toHexString(c).toUpperCase()); } return uc.toString(); } }); }
java
18
0.482569
77
31.117647
17
/** * <p> * Replaces characters outside the Basic Multilingual Plane with their name. * </p> * * @param value * The text to be matched * @return text with characters outside BMP replaced by their unicode * numbers or the given text unaltered */
function
def clean_data(df): categories = df.categories row = categories[0] category_colnames = re.sub("\d+", "", row) category_colnames = category_colnames.replace('-','') category_colnames = category_colnames.split(';') df[category_colnames] = df.categories.apply(get_categories_series) df.drop(columns=['categories'],inplace=True) return df
python
9
0.667568
70
40.222222
9
This function expands the categories feature into separate columns and their respective values and returns the dataframe INPUT: df: dataframe RETURN: df: cleaned dataframe
function
protected RespBase<Object> makeFailedResponse(AuthenticationToken token, String loginRedirectUrl, Throwable err) { String errmsg = !isNull(err) ? err.getMessage() : "Authentication failure"; RespBase<Object> resp = RespBase.create(sessionStatus()); resp.withCode(UNAUTHC).withMessage(errmsg); resp.forMap().put(config.getParam().getRedirectUrl(), loginRedirectUrl); resp.forMap().put(config.getParam().getApplication(), config.getServiceName()); resp.forMap().put(KEY_SERVICE_ROLE, KEY_SERVICE_ROLE_VALUE_IAMCLIENT); return resp; }
java
9
0.707276
114
64.777778
9
/** * Make login failed response message. * * @param token * e.g, {@link LogoutAuthenticationToken} * @param loginRedirectUrl * Login redirect URL * @param err * Exception object * @return * @see {@link com.wl4g.devops.iam.filter.AbstractIamAuthenticationFilter#makeFailedResponse()} */
function
public static bool TryRemoveHeader(this HttpContext httpContext, string headerName) { if (!httpContext.ResponseContainsHeader(headerName)) return true; try { httpContext.Response.Headers.Remove(headerName); return true; } catch (ArgumentException) { return false; } }
c#
11
0.520482
83
31
13
/// <summary> /// Used to remove a header (supplied via <see cref="headerName"/>) from the current /// <see cref="httpContext"/> /// </summary> /// <param name="httpContext">The current <see cref="HttpContext"/></param> /// <param name="headerName">The name of the HTTP header to remove</param> /// <returns></returns>
function
private static class FrameTimer { private static final int SAMPLES = 6; private static final long MAX_FRAME = 100; // Max time for one frame, to weed out spikes. private final long[] samples = new long[SAMPLES]; private int sampleIndex; public FrameTimer() { long currentTime = System.currentTimeMillis(); for (int i = SAMPLES - 1; i >= 0; i--) { samples[i] = currentTime - (SAMPLES - i) * TARGET_MILLIS_PER_FRAME; } } public long getTimeSinceLastFrame() { long currentTime = System.currentTimeMillis(); int id = sampleIndex - 1; if (id < 0) { id += SAMPLES; } long timeSinceLastSample = currentTime - samples[id]; // If the slice was too big, advance all the previous times by the diff. if (timeSinceLastSample > MAX_FRAME) { long diff = timeSinceLastSample - MAX_FRAME; for (int i = 0; i < SAMPLES; i++) { samples[i] += diff; } } long timeSinceOldestSample = currentTime - samples[sampleIndex]; samples[sampleIndex] = currentTime; sampleIndex = (sampleIndex + 1) % SAMPLES; return timeSinceOldestSample / (long) SAMPLES; } }
java
13
0.535792
97
33.6
40
/** * Uses averaging of the time between the past few frames to provide smooth * animation. */
class
def _get_quantization_type_for_static() -> Tuple[QuantType, QuantType]: arch = cpuinfo.get_cpu_info()["arch"].lower() if torch.cuda.is_available(): activation_type = weight_type = QuantType.QInt8 elif "x86" in arch: cpu_raw_data = cpuinfo.get_cpu_info()["brand_raw"].lower() if "intel" in cpu_raw_data and "xeon" in cpu_raw_data: activation_type = QuantType.QUInt8 weight_type = QuantType.QInt8 else: activation_type = weight_type = QuantType.QUInt8 else: activation_type = QuantType.QUInt8 weight_type = QuantType.QInt8 return activation_type, weight_type
python
13
0.633333
71
43.066667
15
Returns the quantization types for activations and weights, depending on the underlying hardware
function
Double_t StIstFastSimMaker::distortHit(const Double_t x, const Double_t res, const Double_t detLength) { if (fabs(x) > detLength) { LOG_WARN << "distortHit() - Generated hit is outside detector sensor plane" << endm; return x; } Double_t smeared_x; do { smeared_x = mRandom.Gaus(x, res); } while ( fabs(smeared_x) > detLength); return smeared_x; }
c++
9
0.639896
102
28.769231
13
/** * Calculates and returns new value for the local coordinate x by smearing it * acccording to a normal distribution N(mean, sigma) = N(x, res). The returned * value is constrained to be within the characteristic dimension detLength * provided by the user. */
function
def apply(self, klass, resolve_map={}): assert util.safe_issubclass(klass, ProtocolBase) resolved_property = make_property(self.property_name, {'type': resolve_map[self.refuri]}, resolve_map[self.refuri].__doc__) setattr(klass, self.property_name, resolved_property) klass.__propinfo__[self.property_name] = { "$ref": self.refuri, "type": resolve_map[self.refuri] }
python
11
0.524366
77
50.4
10
Attach this property to the provided class Args: klass: (ProtocolBase) The class wrapper to which this property should be attached. resolve_map: (dict) A map of URIs to resolved ProtocolBase objects.
function
public void apply(RegistrarImpl regImpl) { SvcReg oldReg = (SvcReg) regImpl.serviceByID.get(reg.item.serviceID); if (oldReg != null) regImpl.deleteService(oldReg, 0); regImpl.addService(reg); }
java
10
0.545788
73
38.142857
7
/** * Modifies the state of the Registrar by registering the service stored in the reg object. * Also needs to delete any existing service with the same serviceID; this can happen if a * service re-registers while an existing registration is in effect, because we don't log a * separate lease cancellation record for the existing registration in that case. * * @see RegistrarImpl.LocalLogHandler#applyUpdate */
function
public int push(int value) { ensureCapacity(nodeCount + 1); if (idAllocator == Integer.MAX_VALUE) { throw new IllegalStateException("unable to allocate more id"); } int id = idAllocator++; nodeCount++; nodes[nodeCount] = new Node(id, value); idToPosition.put(id, nodeCount); heapifyUp(nodeCount); return id; }
java
9
0.576826
74
32.166667
12
/** * Add a new node to the heap * @throws IllegalStateException when there is no new id available * @return Id of node */
function
function start(FB) { var id = meta("user_id"); if(id==""){ id="me"; } console.log('Welcome to profile.js!'); console.log('start has been called with FB object: ' + FB); FB.api("/me", function(response) { user = response; console.log('Doing this in profile.js, ' + response.name + '.'); if(id!="me"&&id!=user.id){ $("#status_div").hide(); } }); show_user_photos(id); var type = meta("type"); if(type=="about"){ getAbout(id); $("#about").show(); }else if(type=="feed"){ get_feed(id); }else{ $("#nav_tabs").show(); } }
javascript
16
0.566485
66
21
25
/* * start * * start is executed once the document is ready (equivalent to $('document').ready()), and * the FB object is loaded. All document functions should be defined in this document, * and functionality should start within this function. */
function
void* queue_from_stacks_dequeue(struct queue_from_stacks* qfs) { if (stack_isempty(qfs->s2)) { if (stack_isempty(qfs->s1)) { printf("Both are of the stacks are empty!\n"); } else if (!stack_isempty(qfs->s1)) { while (stack_isempty(qfs->s2)) { while (!stack_isempty(qfs->s1)) { void* val = stack_pop(qfs->s1); stack_push(qfs->s2, val); } } } else { while (stack_isempty(qfs->s1)) { while (!stack_isempty(qfs->s2)) { void* val = stack_pop(qfs->s2); stack_push(qfs->s1, val); } } } } return stack_pop(qfs->s2); }
c
19
0.576182
64
25
22
/* * This function should dequeue a value from a given queue-from-stacks and return the dequeued value. * * Parameter: * qfs - the queue-from-stacks from which a value is to be dequeued (may not be NULL) * * Return: * This function should return the value that was dequeued. */
function
func (lc *cachedLayerService) Fetch(dgst digest.Digest) (distribution.Layer, error) { ctxu.GetLogger(lc.ctx).Debugf("(*layerInfoCache).Fetch(%q)", dgst) now := time.Now() defer func() { ctxu.GetLoggerWithField(lc.ctx, "blob.fetch.duration", time.Since(now)). Infof("(*layerInfoCache).Fetch(%q)", dgst) }() atomic.AddUint64(&layerInfoCacheMetrics.Fetch.Requests, 1) available, err := lc.cache.Contains(lc.ctx, lc.repository.Name(), dgst) if err != nil { ctxu.GetLogger(lc.ctx).Errorf("error checking availability of %v@%v: %v", lc.repository.Name(), dgst, err) goto fallback } if available { meta, err := lc.cache.Meta(lc.ctx, dgst) if err != nil { ctxu.GetLogger(lc.ctx).Errorf("error fetching %v@%v from cache: %v", lc.repository.Name(), dgst, err) goto fallback } atomic.AddUint64(&layerInfoCacheMetrics.Fetch.Hits, 1) return newLayerReader(lc.driver, dgst, meta.Path, meta.Length) } fallback: atomic.AddUint64(&layerInfoCacheMetrics.Fetch.Misses, 1) layer, err := lc.LayerService.Fetch(dgst) if err != nil { return nil, err } if err := lc.cache.Add(lc.ctx, lc.repository.Name(), dgst); err != nil { ctxu.GetLogger(lc.ctx). Errorf("error caching repository relationship for %v@%v: %v", lc.repository.Name(), dgst, err) } if path, err := lc.resolveLayerPath(layer); err != nil { ctxu.GetLogger(lc.ctx). Errorf("error resolving path while caching %v@%v: %v", lc.repository.Name(), dgst, err) } else { if err := lc.cache.SetMeta(lc.ctx, dgst, cache.LayerMeta{Path: path, Length: layer.Length()}); err != nil { ctxu.GetLogger(lc.ctx).Errorf("error adding meta for %v@%v to cache: %v", lc.repository.Name(), dgst, err) } } return layer, err }
go
16
0.685849
109
39.571429
42
// Fetch checks for the availability of the layer in the repository via the // cache. If present, the metadata is resolved and the layer is returned. If // any operation fails, the layer is read directly from the upstream. The // results are cached, if possible.
function
public boolean checkBoundRa(List<Integer> tidSet , int lengthOfSequence){ boolean periodic = true; int length = tidSet.size(); int periods[] = new int[length+1]; int temp = 0; double avgPr = 0; int sum = 0; double sumDevi = 0; double stanDevi = 0; Collections.sort(tidSet); int firstPeriod = tidSet.get(0) - 0; if(firstPeriod<0 || firstPeriod>maxPeriodicity) periodic = false; else periods[0] = firstPeriod; for(int i=0; i<length-1; i++){ temp= tidSet.get(i+1) - tidSet.get(i); if (temp <0 || temp> maxPeriodicity){ periodic = false; break; } else{ periods[i+1] = temp; sum+=temp; } } int lastPeriod = lengthOfSequence-tidSet.get(length-1); if(lastPeriod<0 || lastPeriod>maxPeriodicity) periodic = false; else periods[length] = lastPeriod; if(periodic){ avgPr = sum/length; for(int j=0; j<periods.length; j++){ sumDevi += ((periods[j]-avgPr)*(periods[j]-avgPr)); } stanDevi = Math.sqrt(sumDevi/length); if(stanDevi > maxStandardDeviation) periodic = false; } return periodic; }
java
14
0.642066
73
24.833333
42
/** * check if the frequent pattern is periodic. A periodic pattern must satisfies the MAX_PR threshold and * the MAX_STAN_DEV threshold. * @param tidSet * @param lengthOfSequence: the length of currently processing sequence, to calculate the last period of * this frequent pattern. * @return periodic : boolean result. */
function
public class IRCChannelTopicEvent extends IRCRawMessageEvent { /** * the channel that had it's topic changed */ public final IRCChannel channel; /** * the user that changed the topic */ public final IRCUser user; /** * the new topic */ public final String topic; /** * INTERNAL EVENT. YOU SHOULD NOT POST THIS YOURSELF. * @param connection the connection the channel that was left is on * @param channel the channel that was left */ public IRCChannelTopicEvent(IRCConnection connection, IRCMessage rawMessage, IRCChannel channel, IRCUser user, String topic) { super(connection, rawMessage); this.channel = channel; this.user = user; this.topic = topic; } }
java
8
0.718795
127
23.068966
29
/** * This event is published on the MinecraftForge.EVENTBUS bus whenever the topic of a channel changes. * It is also published once when joining a channel, if a topic is set. */
class
def prepare_transparent_pixels(rgba_array): if rgba_array.shape[INDEX_OF_THE_NUMBER_OF_CHANNELS_PER_PIXEL] != CORRECT_NUMBER_OF_CHANNELS_PER_PIXEL: error_info = "The passed image has the incorret number of channels per pixel. " \ "The correct number is equal to {correct_number_of_channels_per_pixel}.".format( correct_number_of_channels_per_pixel=CORRECT_NUMBER_OF_CHANNELS_PER_PIXEL) raise ValueError(error_info) transparent_pixels = [] rows, cols, _ = np.where( rgba_array[:, :, [INDEX_OF_THE_VALUE_OF_ALPHA_CHANNEL]] < MINIMUM_VALUE_OF_THE_ALPHA_CHANNEL) for i in range(len(rows)): row_idx = int(rows[i]) column_idx = int(cols[i]) pixel_value = rgba_array[row_idx][column_idx].tolist() pixel_dictionary = {"row_idx": row_idx, "column_idx": column_idx, "value": pixel_value} transparent_pixels.append(pixel_dictionary) return transparent_pixels
python
11
0.58364
111
59.5
18
This function looks for the pixels, whose alpha channel value is less than the value of 'MINIMUM_VALUE_OF_ALPHA_CHANNEL'. :param rgba_array: an RGBA image converted into a numpy array (the array has following shape(y, x, 4)) :type rgba_array: numpy.ndarray (https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.html) :return: list of dictionaries. Each of these dictionaries has the following keys: 'row_idx', 'column_idx', 'value'. The key 'row_idx' represent the index of a pixel's row (integer - int). The key 'column_idx' represent the index of a pixel's column (integer - int). The key 'value' represent the RGBA color of a pixel(list which contains four integers). :rtype: list - []
function
[PipelineDisplayName(UpgradeConstants.Pipelines.Blocks.PatchEnvironmentJsonBlock)] public class PatchEnvironmentJsonBlock : PipelineBlock<Sitecore.Commerce.Core.FindEntityArgument, Sitecore.Commerce.Core.FindEntityArgument, CommercePipelineExecutionContext> { public override async Task<FindEntityArgument> Run(FindEntityArgument arg, CommercePipelineExecutionContext context) { Condition.Requires(arg).IsNotNull($"{this.Name}: The FindEntityArgument argument cannot be null or empty."); if (arg.EntityType != typeof(CommerceEnvironment)) { return arg; } try { if (arg.SerializedEntity != null) { arg.SerializedEntity = await this.MigrateEntity(arg.SerializedEntity, context.CommerceContext); } return arg; } catch (Exception ex) { context.CommerceContext.LogException(this.Name, ex); throw; } } private async Task<string> MigrateEntity(string entity, CommerceContext context) { try { JObject jObject = null; using (var reader = new StringReader(entity)) { using (var jsonReader = new JsonTextReader(reader) { DateParseHandling = DateParseHandling.DateTimeOffset }) { while (jsonReader.Read()) { if (jsonReader.TokenType == JsonToken.StartObject) { jObject = JObject.Load(jsonReader); UpgradeObsoleteTokens(jObject, context); } } } } return JsonConvert.SerializeObject(jObject); } catch (Exception ex) { var mes = ex.Message; await context.AddMessage( context.GetPolicy<KnownResultCodes>().Error, "FailedToMigrateEnvironment", new object[] { entity, ex }, $"Failed to migrate environment."); return entity; } } private static void UpgradeObsoleteTokens(JToken token, CommerceContext context) { JContainer container = token as JContainer; if (container == null) return; FindTokens(container, context); } private static void FindTokens(JToken containerToken, CommerceContext context) { if (containerToken.Type == JTokenType.Object) { foreach (JProperty child in containerToken.Children<JProperty>()) { FindTokens(child, context); } } else if (containerToken.Type == JTokenType.Array) { foreach (JToken child in containerToken.Children()) { FindTokens(child, context); } } else if (containerToken.Type == JTokenType.Property) { var property = containerToken as JProperty; if (property.Name.Equals("$type", StringComparison.OrdinalIgnoreCase) && property.Value.ToString().Equals("Sitecore.Commerce.Plugin.Customers.Cs.ProfilesSqlPolicy, Sitecore.Commerce.Plugin.Customers.Cs", StringComparison.OrdinalIgnoreCase)) { property.Value = "Plugin.Sample.Customers.CsMigration.ProfilesSqlPolicy, Plugin.Sample.Customers.CsMigration"; } else if (property.Value is JContainer) { foreach (JToken child in (JContainer)property.Value) { FindTokens(child, context); } } } } }
c#
20
0.511408
187
41.927083
96
/// <summary> /// Defines the patch environment Json block /// </summary> /// <seealso> /// <cref> /// Sitecore.Commerce.Core.ConditionalPipelineBlock{Sitecore.Commerce.Core.FindEntityArgument, Sitecore.Commerce.Core.FindEntityArgument, /// Sitecore.Commerce.Core.CommercePipelineExecutionContext} /// </cref> /// </seealso>
class
public class JoystickDrive extends CommandBase { Drivetrain drivetrain; Supplier<Double> throttle; Supplier<Double> wheel; Supplier<Boolean> quickturn; /** * Makes sure that the Drivetrain.java subsystem object exists for use in the * command */ public JoystickDrive(Drivetrain drivetrain, Supplier<Double> throttle, Supplier<Double> wheel, Supplier<Boolean> quickturn) { // Use addRequirements() here to declare subsystem dependencies. addRequirements(drivetrain); this.drivetrain = drivetrain; this.throttle = throttle; this.wheel = wheel; this.quickturn = quickturn; } // Called when the command is initially scheduled. @Override public void initialize() { drivetrain.setState(DrivetrainState.JOYSTICK_DRIVE); } // Called every time the scheduler runs while the command is scheduled. /** * Sends status of buttons and triggers on the trigger to the cheesyIshDrive * method in Drivetrain.java to have a certain drivetrain motor output. */ @Override public void execute() { drivetrain.cheesyIshDrive(throttle.get(), wheel.get(), quickturn.get()); SmartDashboard.putNumber("Throttle", throttle.get()); SmartDashboard.putNumber("Wheel", wheel.get()); SmartDashboard.putBoolean("QuickTurn", quickturn.get()); } // Called once the command ends or is interrupted. /** * When the program ends or is interrupted, the method stops the drivetrain motors * from moving. * @param interrupted whether the command has been interrupted or not */ @Override public void end(boolean interrupted) { drivetrain.stopDrivetrainMotors(); } // Returns true when the command should end (which is not until the robot command is interrupted) @Override public boolean isFinished() { return false; } }
java
10
0.718441
127
30.431034
58
/** * A command that runs throughout the teleoperated period, and constantly * checks the status of the joysticks to determine what output needs be * done by the motors, and sets the motors as such. */
class
func (cfh *CodeframeHelper) extractItemSequence() { var testletRefId, itemRefId, sequenceNumber string for _, cfBytes := range cfh.data["NAPCodeFrame"] { iterate the nested json strucure & extract containers gjson.GetBytes(cfBytes, "NAPCodeFrame.TestletList.Testlet"). ForEach(func(key, value gjson.Result) bool { testletRefId = value.Get("NAPTestletRefId").String() get the testlet refid value.Get("TestItemList.TestItem"). ForEach(func(key, value gjson.Result) bool { itemRefId = value.Get("TestItemRefId").String() get the item refid sn := value.Get("SequenceNumber").Int() + 1 re-baseline sequenceNumber = strconv.Itoa(int(sn)) convert to string if _, ok := cfh.itemSequence[itemRefId]; !ok { avoid null nodes cfh.itemSequence[itemRefId] = make(map[string]string, 0) } cfh.itemSequence[itemRefId][testletRefId] = sequenceNumber store the lookup return true keep iterating }) return true keep iterating }) } }
go
24
0.657598
82
47.5
22
// // creates a lookup to return the sequence locaiton of an // item within a testlet // NOTE: index needs baselining from 1 to align with testlet // definitions - codeframe baselines from 0 //
function
[MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector128<float> CrossProduct(Vector128<float> left, Vector128<float> right) { if (Sse41.IsSupported || AdvSimd.Arm64.IsSupported) { var result = Multiply(CreateFromYZXW(left), CreateFromZXYW(right)); result = MultiplyAddNegated(result, CreateFromZXYW(left), CreateFromYZXW(right)); return BitwiseAnd(result, Vector128.Create(0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000).AsSingle()); } else { return Vector128.Create( (left.GetY() * right.GetZ()) - (left.GetZ() * right.GetY()), (left.GetZ() * right.GetX()) - (left.GetX() * right.GetZ()), (left.GetX() * right.GetY()) - (left.GetY() * right.GetX()), 0 ); } }
c#
16
0.574913
115
44.368421
19
/// <summary>Computes the cross product of two vectors.</summary> /// <param name="left">The vector to multiply by <paramref name="right" />.</param> /// <param name="right">The vector which is used to multiply <paramref name="left" />.</param> /// <returns>The cross product of <paramref name="left" /> and <paramref name="right" />.</returns>
function
public class NetOfficeException : Exception { public NetOfficeException(string message) : base(message) { AppDomainId = AppDomain.CurrentDomain.Id; AppDomainFriendlyName = AppDomain.CurrentDomain.FriendlyName; AppDomainIsDefault = AppDomain.CurrentDomain.IsDefaultAppDomain(); } public NetOfficeException( Exception innerException) : base(null != innerException? innerException.Message : "<NoErrorMessage>", innerException) { AppDomainId = AppDomain.CurrentDomain.Id; AppDomainFriendlyName = AppDomain.CurrentDomain.FriendlyName; AppDomainIsDefault = AppDomain.CurrentDomain.IsDefaultAppDomain(); } public NetOfficeException(string message, Exception innerException) : base(message, innerException) { AppDomainId = AppDomain.CurrentDomain.Id; AppDomainFriendlyName = AppDomain.CurrentDomain.FriendlyName; AppDomainIsDefault = AppDomain.CurrentDomain.IsDefaultAppDomain(); } public bool AppDomainIsDefault { get; private set; } public int AppDomainId { get; private set; } public string AppDomainFriendlyName { get; private set; } }
c#
10
0.686243
152
50.833333
24
/// <summary> /// Indicates an exception occured in NetOffice.dll, not in corresponding NetOffice dependency assembly /// </summary>
class
func (a UDPPort) LayerType() gopacket.LayerType { switch a { case 53: return LayerTypeDNS case 4789: return LayerTypeVXLAN case 6343: return LayerTypeSFlow default: return gopacket.LayerTypePayload } }
go
8
0.753488
49
17
12
// LayerType returns a LayerType that would be able to decode the // application payload. It use some well-known port such as 53 for DNS. // // Returns gopacket.LayerTypePayload for unknown/unsupported port numbers.
function
Page::Page(string entityName, int pageIndex,int entityType) { if(entityType == 0) { this->pageType = entityType; tablesPage(entityName,pageIndex); } else if(entityType == 1) { this->pageType = entityType; matrixPage(entityName,pageIndex); } }
c++
10
0.599338
59
22.307692
13
/** * @brief Construct a new Page:: Page object given the table name and page * index. When tables are loaded they are broken up into blocks of BLOCK_SIZE * and each block is stored in a different file named * "<tablename>_Page<pageindex>". For example, If the Page being loaded is of * table "R" and the pageIndex is 2 then the file name is "R_Page2". The page * loads the rows (or tuples) into a vector of rows (where each row is a vector * of integers). * * @param tableName * @param pageIndex */
function
public class RCC_CustomizerExample : MonoBehaviour { [Header("Current Car")] public RCC_CarControllerV3 car; [Header("UI Menus")] public GameObject wheelsMenu; public GameObject configurationMenu; public GameObject steeringAssistancesMenu; public GameObject colorsMenu; [Header("UI Sliders")] public Slider frontCamber; public Slider rearCamber; public Slider frontSuspensionDistances; public Slider rearSuspensionDistances; public Slider frontSuspensionDampers; public Slider rearSuspensionDampers; public Slider frontSuspensionSprings; public Slider rearSuspensionSprings; public Slider gearShiftingThreshold; public Slider clutchThreshold; [Header("UI Toggles")] public Toggle TCS; public Toggle ABS; public Toggle ESP; public Toggle SH; public Toggle counterSteering; public Toggle steeringSensitivity; public Toggle NOS; public Toggle turbo; public Toggle exhaustFlame; public Toggle revLimiter; public Toggle clutchMargin; public Toggle transmission; [Header("UI InputFields")] public InputField maxSpeed; public InputField maxBrake; public InputField maxTorque; [Header("UI Dropdown Menus")] public Dropdown drivetrainMode; void Start(){ CheckUIs (); } public void CheckUIs (){ if (!car) return; frontCamber.value = car.FrontLeftWheelCollider.camber; rearCamber.value = car.RearLeftWheelCollider.camber; frontSuspensionDistances.value = car.FrontLeftWheelCollider.wheelCollider.suspensionDistance; rearSuspensionDistances.value = car.RearLeftWheelCollider.wheelCollider.suspensionDistance; frontSuspensionDampers.value = car.FrontLeftWheelCollider.wheelCollider.suspensionSpring.damper; rearSuspensionDampers.value = car.RearLeftWheelCollider.wheelCollider.suspensionSpring.damper; frontSuspensionSprings.value = car.FrontLeftWheelCollider.wheelCollider.suspensionSpring.spring; rearSuspensionSprings.value = car.RearLeftWheelCollider.wheelCollider.suspensionSpring.spring; gearShiftingThreshold.value = car.gearShiftingThreshold; clutchThreshold.value = car.clutchInertia; TCS.isOn = car.TCS; ABS.isOn = car.ABS; ESP.isOn = car.ESP; SH.isOn = car.steeringHelper; steeringSensitivity.isOn = car.steerAngleSensitivityAdjuster; NOS.isOn = car.useNOS; turbo.isOn = car.useTurbo; exhaustFlame.isOn = car.useExhaustFlame; revLimiter.isOn = car.useRevLimiter; clutchMargin.isOn = car.useClutchMarginAtFirstGear; transmission.isOn = RCC_Settings.Instance.useAutomaticGear; maxSpeed.text = car.maxspeed.ToString(); maxBrake.text = car.brakeTorque.ToString(); maxTorque.text = car.engineTorque.ToString(); switch (car._wheelTypeChoise) { case RCC_CarControllerV3.WheelType.FWD: drivetrainMode.value = 0; break; case RCC_CarControllerV3.WheelType.RWD: drivetrainMode.value = 1; break; case RCC_CarControllerV3.WheelType.AWD: drivetrainMode.value = 2; break; case RCC_CarControllerV3.WheelType.BIASED: drivetrainMode.value = 3; break; } } public void OpenMenu(GameObject activeMenu){ if (activeMenu.activeInHierarchy) { activeMenu.SetActive (false); return; } wheelsMenu.SetActive (false); configurationMenu.SetActive (false); steeringAssistancesMenu.SetActive (false); colorsMenu.SetActive (false); activeMenu.SetActive (true); } public void CloseAllMenus(){ wheelsMenu.SetActive (false); configurationMenu.SetActive (false); steeringAssistancesMenu.SetActive (false); colorsMenu.SetActive (false); } public void SetCustomizationMode (bool state) { RCC_Customization.SetCustomizationMode (car, state); if(state) CheckUIs (); } public void SetFrontCambersBySlider (Slider slider) { RCC_Customization.SetFrontCambers (car, slider.value); } public void SetRearCambersBySlider (Slider slider) { RCC_Customization.SetRearCambers (car, slider.value); } public void TogglePreviewSmokeByToggle (Toggle toggle){ RCC_Customization.SetSmokeParticle (car, toggle.isOn); } public void TogglePreviewExhaustFlameByToggle (Toggle toggle){ RCC_Customization.SetExhaustFlame (car, toggle.isOn); } public void SetSmokeColorByColorPicker (ColorPickerBySliders color){ RCC_Customization.SetSmokeColor (car, 0, color.color); } public void SetHeadlightColorByColorPicker (ColorPickerBySliders color){ RCC_Customization.SetHeadlightsColor (car, color.color); } public void ChangeWheelsBySlider (Slider slider){ RCC_Customization.ChangeWheels (car, RCC_ChangableWheels.Instance.wheels[(int)slider.value].wheel); } public void SetFrontSuspensionTargetsBySlider (Slider slider){ RCC_Customization.SetFrontSuspensionsTargetPos (car, slider.value); } public void SetRearSuspensionTargetsBySlider (Slider slider){ RCC_Customization.SetRearSuspensionsTargetPos (car, slider.value); } public void SetFrontSuspensionDistancesBySlider (Slider slider){ RCC_Customization.SetFrontSuspensionsDistances (car, slider.value); } public void SetRearSuspensionDistancesBySlider (Slider slider){ RCC_Customization.SetRearSuspensionsDistances (car, slider.value); } public void SetGearShiftingThresholdBySlider (Slider slider){ RCC_Customization.SetGearShiftingThreshold (car, Mathf.Clamp(slider.value, .5f, .95f)); } public void SetClutchThresholdBySlider (Slider slider){ RCC_Customization.SetClutchThreshold (car, Mathf.Clamp(slider.value, .1f, .9f)); } public void SetDriveTrainModeByDropdown (Dropdown dropdown){ switch (dropdown.value) { case 0: RCC_Customization.SetDrivetrainMode (car, RCC_CarControllerV3.WheelType.FWD); break; case 1: RCC_Customization.SetDrivetrainMode (car, RCC_CarControllerV3.WheelType.RWD); break; case 2: RCC_Customization.SetDrivetrainMode (car, RCC_CarControllerV3.WheelType.AWD); break; } } public void SetSteeringSensitivityByToggle (Toggle toggle){ RCC_Customization.SetSteeringSensitivity (car, toggle.isOn); } public void SetNOSByToggle (Toggle toggle){ RCC_Customization.SetNOS (car, toggle.isOn); } public void SetTurboByToggle (Toggle toggle){ RCC_Customization.SetTurbo (car, toggle.isOn); } public void SetExhaustFlameByToggle (Toggle toggle){ RCC_Customization.SetUseExhaustFlame (car, toggle.isOn); } public void SetRevLimiterByToggle (Toggle toggle){ RCC_Customization.SetRevLimiter (car, toggle.isOn); } public void SetClutchMarginByToggle (Toggle toggle){ RCC_Customization.SetClutchMargin (car, toggle.isOn); } public void SetFrontSuspensionsSpringForceBySlider (Slider slider){ RCC_Customization.SetFrontSuspensionsSpringForce (car, Mathf.Clamp(slider.value, 10000f, 100000f)); } public void SetRearSuspensionsSpringForceBySlider (Slider slider){ RCC_Customization.SetRearSuspensionsSpringForce (car, Mathf.Clamp(slider.value, 10000f, 100000f)); } public void SetFrontSuspensionsSpringDamperBySlider (Slider slider){ RCC_Customization.SetFrontSuspensionsSpringDamper (car, Mathf.Clamp(slider.value, 1000f, 10000f)); } public void SetRearSuspensionsSpringDamperBySlider (Slider slider){ RCC_Customization.SetRearSuspensionsSpringDamper (car, Mathf.Clamp(slider.value, 1000f, 10000f)); } public void SetMaximumSpeedByInputField (InputField inputField){ RCC_Customization.SetMaximumSpeed (car, StringToFloat(inputField.text, 200f)); inputField.text = car.maxspeed.ToString (); } public void SetMaximumTorqueByInputField (InputField inputField){ RCC_Customization.SetMaximumTorque (car, StringToFloat(inputField.text, 2000f)); inputField.text = car.engineTorque.ToString (); } public void SetMaximumBrakeByInputField (InputField inputField){ RCC_Customization.SetMaximumBrake (car, StringToFloat(inputField.text, 2000f)); inputField.text = car.brakeTorque.ToString (); } public void RepairCar (){ RCC_Customization.RepairCar (car); } public void SetESP(Toggle toggle){ RCC_Customization.SetESP (car, toggle.isOn); } public void SetABS(Toggle toggle){ RCC_Customization.SetABS (car, toggle.isOn); } public void SetTCS(Toggle toggle){ RCC_Customization.SetTCS (car, toggle.isOn); } public void SetSH(Toggle toggle){ RCC_Customization.SetSH (car, toggle.isOn); } public void SetSHStrength(Slider slider){ RCC_Customization.SetSHStrength (car, slider.value); } public void SetTransmission(Toggle toggle){ RCC_Customization.SetTransmission (toggle.isOn); } public void SaveStats(){ RCC_Customization.SaveStats (car); } public void LoadStats(){ RCC_Customization.LoadStats (car); CheckUIs (); } private float StringToFloat(string stringValue, float defaultValue){ float result = defaultValue; float.TryParse(stringValue, out result); return result; } }
c#
15
0.786752
101
35.935622
233
/// <summary> /// A simple customizer example script used for receiving methods from UI elements and send them to RCC_Customization script. Also updates all UI elements for new spawned cars too. /// </summary>
class
class AddCustomBuildScriptGUI: """Class representing a window for editing a currently loaded install config in the GUI. Attributes ---------- root : InstallSynAppsGUI The top TK instance that opened this window master : Toplevel The main container Tk object viewFrame Tk frame that contains all widgets dropdown : OptionMenu dropdown menu for selecting from injector files applyButton : Button button that runs the apply method editPanel : ScrolledText Panel for editing the loaded injector file. """ def __init__(self, root, install_config): """Initializer for the AddCustomBuildScriptGUI class """ self.root = root self.master = Toplevel() self.master.title('Edit Custom Build Scripts') self.master.resizable(False, False) self.smallFont = tkFont.Font(family = "Helvetica", size = 10) self.largeFont = tkFont.Font(family = "Helvetica", size = 14) self.install_config = install_config self.viewFrame = Frame(self.master, relief = GROOVE, padx = 10, pady = 10) self.viewFrame.pack() self.customBuildModuleVar = StringVar() self.customBuildModuleVar.set(self.install_config.get_module_list()[0].name) self.legal_names = [] self.currentModule = self.install_config.get_module_list()[0] for module in self.install_config.get_module_list(): self.legal_names.append(module.name) self.module_dropdown = ttk.Combobox(self.viewFrame, textvariable=self.customBuildModuleVar, values=self.legal_names, width='40') self.module_dropdown.grid(row = 0, column = 0, columnspan = 3, padx=10, pady=10) self.customBuildModuleVar.trace('w', self.loadCustomBuild) Button(self.viewFrame, text='Save Build Script', command = self.applyChanges).grid(row = 0, column = 3, columnspan = 1) Button(self.viewFrame, text='Exit', command = self.exit).grid( row = 0, column = 5, columnspan = 1) Button(self.viewFrame, text='Delete Build Script', command = self.deleteBuildScript).grid( row = 0, column = 4, columnspan = 1) self.editPanel = ScrolledText.ScrolledText(self.viewFrame, height = 37, width = 100) self.editPanel.grid(row = 1, column = 0, columnspan = 6) self.loadCustomBuild() self.master.mainloop() def deleteBuildScript(self): """Function that deletes custom build script associated with module """ if self.currentModule.custom_build_script_path is None: self.root.showErrorMessage('ERROR', 'ERROR - No build script associated with this module.', force_popup=True) else: os.remove(self.currentModule.custom_build_script_path) self.currentModule.custom_build_script_path = None self.root.updateAllRefs(self.install_config) self.reloadPanel(ifsame=True) def loadCustomBuild(self, *args): """Wrapper that reloads the panel based on selection """ self.reloadPanel() def reloadPanel(self, ifsame=False): """Reloads Panel based on selection """ found = False for module in self.install_config.get_module_list(): if module.name == self.customBuildModuleVar.get(): self.currentModule = module found = True if not found and not ifsame: return self.editPanel.delete('1.0', END) self.editPanel.insert(INSERT, '#\n') self.editPanel.insert(INSERT, '# Custom Build script for module {}\n'.format(self.currentModule.name)) if self.currentModule.custom_build_script_path is None: self.editPanel.insert(INSERT, '# Currently, module {} will not apply a custom build script.\n'.format(self.currentModule.name)) else: self.editPanel.insert(INSERT, '# Building {} will use script from {}/{}\n'.format(self.currentModule.name, '$CONFIGURE/customBuildScripts', os.path.basename(self.currentModule.custom_build_script_path))) self.editPanel.insert(INSERT, '#\n') if self.currentModule.custom_build_script_path is not None: custom_build = open(self.currentModule.custom_build_script_path, 'r') script = custom_build.readlines() for line in script: self.editPanel.insert(INSERT, line) custom_build.close() def applyChanges(self): """Method that reads the edit panel, and writes a custom build script with $MODULE_NAME as the name, and an OS appropriate extension """ temp = self.editPanel.get('1.0', END).splitlines() if platform == 'win32': script_ending = '.bat' else: script_ending = '.sh' if not self.root.configure_path is None: self.root.showWarningMessage('Error', 'Currently loaded Install configuration has not been saved. Please save it first to be able to save build scripts.') return build_script_folder = os.path.join(self.root.configure_path, 'customBuildScripts') if not os.path.exists(build_script_folder): os.mkdir(build_script_folder) build_script_file = os.path.join(build_script_folder, '{}{}'.format(self.currentModule.name, script_ending)) if os.path.exists(build_script_file): os.remove(build_script_file) for module in self.install_config.get_module_list(): if module.name == self.currentModule.name: module.custom_build_script_path = build_script_file fp = open(build_script_file, 'w') for line in temp: if not line.startswith('#'): fp.write(line + '\n') fp.close() self.reloadPanel(ifsame=True) self.root.updateAllRefs(self.install_config) self.root.updateConfigPanel() self.root.unsaved_changes = True def exit(self): """Applies changes and exits window """ self.master.destroy()
python
17
0.63443
215
40.297297
148
Class representing a window for editing a currently loaded install config in the GUI. Attributes ---------- root : InstallSynAppsGUI The top TK instance that opened this window master : Toplevel The main container Tk object viewFrame Tk frame that contains all widgets dropdown : OptionMenu dropdown menu for selecting from injector files applyButton : Button button that runs the apply method editPanel : ScrolledText Panel for editing the loaded injector file.
class
void MenuItem::SetMarked(bool marked) { if (marked && fMenu != NULL && fMenu->Type() == CHOICE_MENU) { fMenu->SetChoiceText(Label()); } if (fIsMarked == marked) return; if (marked && fMenu != NULL && fMenu->Type() == CHOICE_MENU) { MenuItem *markedItem = fMenu->FindMarked(); if (markedItem != NULL) markedItem->SetMarked(false); } fIsMarked = marked; if (fMenu != NULL) fMenu->Draw(this); }
c++
10
0.635036
63
23.235294
17
/** Marks or unmarks a menu item. A marked menu item usually gets a visual * clue like a checkmark that distinguishes it from others. * For menus of type CHOICE_MENU, there can only be one marked item - the * chosen one. */
function
func (m Model) renderRow(rowIndex int, last bool) string { numColumns := len(m.columns) row := m.GetVisibleRows()[rowIndex] highlighted := rowIndex == m.rowCursorIndex columnStrings := []string{} rowStyle := row.Style.Copy() if m.focused && highlighted { rowStyle = rowStyle.Inherit(m.highlightStyle) } stylesInner, stylesLast := m.styleRows() for columnIndex, column := range m.columns { cellStyle := rowStyle.Copy().Inherit(column.style).Inherit(m.baseStyle) var str string if column.key == columnKeySelect { if row.selected { str = m.selectedText } else { str = m.unselectedText } } else if entry, exists := row.Data[column.key]; exists { switch entry := entry.(type) { case StyledCell: str = fmt.Sprintf("%v", entry.Data) cellStyle = entry.Style.Copy().Inherit(cellStyle) default: str = fmt.Sprintf("%v", entry) } } var rowStyles borderStyleRow if !last { rowStyles = stylesInner } else { rowStyles = stylesLast } if columnIndex == 0 { cellStyle = cellStyle.Inherit(rowStyles.left) } else if columnIndex < numColumns-1 { cellStyle = cellStyle.Inherit(rowStyles.inner) } else { cellStyle = cellStyle.Inherit(rowStyles.right) } cellStr := cellStyle.Render(limitStr(str, column.width)) columnStrings = append(columnStrings, cellStr) } return lipgloss.JoinHorizontal(lipgloss.Bottom, columnStrings...) }
go
17
0.690442
73
29.5
46
// This is somewhat complicated but at the moment splitting this out feels like // it would just make things harder to read. May revisit later. // nolint: cyclop
function
def _check_counters_on_profile(self, profile, total_outgoing=0, pending_outgoing=0, direct_incoming=0, total_incoming=0, starred_public=0): self.assertEqual(profile.total_outgoing_request_count, total_outgoing) self.assertEqual(profile.pending_outgoing_request_count, pending_outgoing) self.assertEqual(profile.direct_incoming_request_count, direct_incoming) self.assertEqual(profile.total_incoming_request_count, total_incoming) self.assertEqual(profile.starred_public_request_count, starred_public)
python
7
0.564626
73
55.615385
13
Check that the counters match the expected values. Args: profile (reviewboard.accounts.models.LocalSiteProfile): The profile object to test counts on. total_outgoing (int): The expected number of total outgoing review requests. pending_outgoing (int): The expected number of pending outgoing review requests. direct_incoming (int): The expected number of review requests assigned directly to the user. total_incoming (int): The expected number of review requests assigned either directly or indirectly to the user. starred_public (int): The expected number of public review requests starred by the user.
function
def probe_location(self): logging.info('Determining optimal probe sequences') for sample in self.samples: for gene in sample.gene: for window in gene.windows: passed = False for sliding in window.sliding: if sliding.datastore and sliding.mean == window.max and sliding.mean >= window.min \ and not passed: sample.location = sliding.location passed = True
python
17
0.503636
108
49.090909
11
Find the 'best' probes for each gene by evaluating the percent identity of the probe to the best recorded percent identity for that organism + gene pair. Extract the location of the probe, so that all the alleles in that location can be recovered
function
def delete(evaluator): if evaluator.environment["selected_items"]: warning_message = f"You are going to delete the {evaluator.args.ogc} " \ f"with the following name (if available) and id:\n" for x in evaluator.environment["selected_items"]: try: if "error" in x: pass elif "@iot.id" in x: if "name" in x: warning_message += f"id = {str(x['@iot.id'])} - name = {str(x['name'])}\n" else: warning_message += f"id = {str(x['@iot.id'])}\n" except AttributeError as attr: print("missing" + attr) pass for x in evaluator.environment["non_critical_failures"]: if "error" in x: print(x["error"]["message"]) proceed = input(warning_message + "\nProceed?(y/N)") if proceed == "y": if bool(evaluator.environment["selected_items"]): for x in evaluator.environment["selected_items"]: try: if "error" in x: evaluator.environment["non_critical_failures"].append(x["error"]) elif "@iot.id" in x: result = ogc_util.delete_item(x.get("@iot.id"), evaluator.args.ogc, evaluator.environment) conditions.add_result(evaluator, result, "results") except AttributeError as attr: print("missing" + attr) pass evaluator.environment["selected_items"] = [] else: evaluator.environment["selected_items"] = [] print("Aborted deletion") else: print("trying to delete but no item defined or found")
python
23
0.481046
118
48.315789
38
Delete from GOST db the selected items: before deleting asks user for confirmation
function
[TestMethod] public async Task Cancelling_SingleEvent() { const int ExpectedCounterValueBeforeRun = 0; const int ExpectedCounterValueAfterRun = 0; const int MinimumOverheadDelayMs = 200; TimeSpan overheadDelay = TimeSpan.FromMilliseconds(MinimumOverheadDelayMs); TimeSpan twoSecondsTimeSpan = TimeSpan.FromSeconds(2); TimeSpan threeSecondsTimeSpan = TimeSpan.FromSeconds(3); var eventFiredCounter = 0; var eventMock = new Mock<Event>(); EventsReactor reactor = SetupReactorWithConsoleLogger(Options.Create(new EventsReactorOptions() { EventRoundByMilliseconds = MinimumOverheadDelayMs })); using var cts = new CancellationTokenSource(); reactor.EventReady += (sender, evt, timeDrift) => { Assert.AreEqual(reactor, sender); Assert.IsNotNull(evt); if (evt == eventMock.Object) { eventFiredCounter++; } }; Task reactorTask = reactor.RunAsync(cts.Token); reactor.Push(eventMock.Object, twoSecondsTimeSpan); await Task.Delay(overheadDelay).ContinueWith(prev => { Assert.AreEqual(ExpectedCounterValueBeforeRun, eventFiredCounter, $"Events counter does not match: Expected {ExpectedCounterValueBeforeRun}, got {eventFiredCounter}."); }); reactor.Cancel(eventMock.Object.Id); await Task.Delay(threeSecondsTimeSpan).ContinueWith(prev => { Assert.AreEqual(ExpectedCounterValueAfterRun, eventFiredCounter, $"Events counter does not match: Expected {ExpectedCounterValueAfterRun}, got {eventFiredCounter}."); }); }
c#
18
0.669822
180
48.735294
34
/// <summary> /// Checks that <see cref="EventsReactor.Cancel(Guid)"/> does what it should. /// </summary> /// <returns>A <see cref="Task"/> representing the asynchronous unit test.</returns>
function
func TestRegisterHandler(t *testing.T) { s, err := getServices() if err != nil { t.Fatal(err) } data := url.Values{} data.Set("name", registerName) rr := send( t, HandlerRegister(s), registerDomain, "/owid/api/v1/register", data) v := decompressAsString(t, rr) if v == "" || strings.Contains(v, "html") == false { t.Error("handler didn't return HTML") return } c, err := s.store.GetCreator(registerDomain) if err != nil { t.Errorf("get failed with '%s'", err) return } if registerDomain != c.domain { t.Errorf("expected domain '%s', found '%s'", registerDomain, c.domain) return } if registerDomain != c.domain { t.Errorf("expected name '%s', found '%s'", registerName, c.name) return } if c.privateKey == "" { t.Error("no private key") return } if c.publicKey == "" { t.Error("no public key") return } }
go
9
0.623256
72
20.525
40
// TestRegisterHandler uses the HTTP handler to add a new domain to the OWID // store and verifies that the response is expected and that the store has been // updated to contain the new information.
function
func (z *Writer) Write(buf []byte) (int, error) { if !z.Header.done { if err := z.writeHeader(); err != nil { return 0, err } } if debugFlag { debug("input buffer len=%d index=%d", len(buf), z.idx) } zn := len(z.data) var n int for len(buf) > 0 { if z.idx == 0 && len(buf) >= zn { if err := z.compressBlock(buf[:zn]); err != nil { return n, err } n += zn buf = buf[zn:] continue } m := copy(z.data[z.idx:], buf) n += m z.idx += m buf = buf[m:] if debugFlag { debug("%d bytes copied to buf, current index %d", n, z.idx) } if z.idx < len(z.data) { if debugFlag { debug("need more data for compression") } return n, nil } if err := z.compressBlock(z.data); err != nil { return n, err } z.idx = 0 } return n, nil }
go
13
0.540404
62
18.825
40
// Write compresses data from the supplied buffer into the underlying io.Writer. // Write does not return until the data has been written.
function
func Run(path string, handler func(net.Conn) string, errs func(error)) (func(), error) { closed := false os.Remove(path) listener, err := Listen(path) if err != nil { return nil, err } go func() { for { conn, err := listener.Accept() if closed { return } if err != nil { errs(err) continue } _, err = conn.Write([]byte(handler(conn))) if err != nil { errs(err) } conn.Close() } }() return func() { closed = true listener.Close() }, nil }
go
16
0.563126
88
16.241379
29
// Run creates and runs a simple server that will call handler for each // connection and write back whatever that function returns to the client. // Content is served over a Named Pipe on Windows or a Unix domain socket on // Linux and Mac. // Returns a function that can be called to stop the server.
function
@Override public void run() { super.run(); LOG.info("---->>>>---->>>>---- {} of {} connection(s) are still open ---->>>>---->>>>----", openConnections.size(), sumOfConnections); this.logCallerStacktraces(); LOG.info("----<<<<----<<<<---- {} of {} connection(s) are still open ----<<<<----<<<<----", openConnections.size(), sumOfConnections); }
java
8
0.50134
99
40.555556
9
/** * This method is called when the ConnectionMonitor is registered as * shutdown hook. * * @see Thread#run() */
function
public void Replace(Type serviceType, object service) { if (service == null) { throw Error.ArgumentNull("service"); } RemoveAll(serviceType, _ => true); Insert(serviceType, 0, service); }
c#
11
0.487455
53
30.111111
9
/// <summary> /// Replaces all existing services for the given service type with the given /// service instance. /// </summary> /// <param name="serviceType">The service type.</param> /// <param name="service">The service instance.</param>
function
def nodes_iterator(self, yield_fusions=True): for step_idx, step in enumerate(self.graph_state.steps): node = step['node'] if yield_fusions: if isinstance(node, (FusionBase)) and not isinstance(node, ExpressionFusionParameters): for fusion_idx, fnode in enumerate(node.contained_nodes()): yield (step_idx, node, fusion_idx, fnode) yield (step_idx, node, None, None)
python
14
0.592751
103
57.75
8
Yields a tuple of length 4 with the step idx and parameters of each node. Optionally when in a fusion yields tuples containing the fusion internal step id and node for each internal node. Args: yield_fusions (bool, optional): Whether to yield fusion nodes. Defaults to True. Yields: [Tuple[int, Parameters, Optional[int], Optional[Parameters]]]: Tuple containing node_idx, node, fusion_idx, fusion_node
function
def _compute_qq_residuals(y_true: np.ndarray, y_pred: np.ndarray) -> pd.Series: n = len(y_true) df = pd.DataFrame({"res": sorted((y_true - y_pred))}) m, s = df["res"].mean(), df["res"].std() df["z_res"] = df["res"].apply(lambda x: (x-m)/s) df["rank"] = df.index+1 df["percentile"] = df["rank"].apply(lambda x: x/(n+1)) df["q_theoretical"] = norm.ppf(df["percentile"]) return pd.Series({ "quantiles": df["q_theoretical"].values, "residuals": df["z_res"].values, })
python
13
0.48398
64
44.692308
13
Convenience function to compute various scalar performance measures and return them in a pd.Series. Parameters ---------- y_true : np.ndarray True binary target data labels. y_pred : np.ndarray Target scores of the model. Returns ------- pd.Series Theoretical quantiles and associated actual residuals.
function
public final class _Constants { private _Constants(){} /** * Convenient e.g. for reflective invocation */ public static final Object[] emptyObjects = new Object[0]; /** * Convenient e.g. for reflective invocation */ public static final Class<?>[] emptyClasses = new Class[0]; /** * Convenient e.g. for reflective invocation */ public static final Class<?>[] classesOfObject = new Class[] { Object.class }; /** * Convenient e.g. for toArray conversions */ public static final String[] emptyStringArray = new String[0]; /** * empty array of byte */ public static final byte[] emptyBytes = new byte[0]; /** * empty array of Annotation */ public static final Annotation[] emptyAnnotations = new Annotation[0]; /** * Writer that does nothing */ public static final Writer nopWriter = new Writer() { @Override public void write(char[] cbuf, int off, int len) throws IOException { } @Override public void flush() throws IOException { } @Override public void close() throws IOException { } }; /** * OutputStream that does nothing */ public static final OutputStream nopOutputStream = new OutputStream() { @Override public void write(int b) throws IOException { } }; /** * PrintStream that does nothing */ public static final PrintStream nopPrintStream = new PrintStream(nopOutputStream); @Primary private static final class PrimaryAnnotated {} public static final Primary ANNOTATION_PRIMARY = PrimaryAnnotated.class.getAnnotation(Primary.class); }
java
12
0.632701
105
26.241935
62
/** * <h1>- internal use only -</h1> * <p> * A collection of commonly used constants. * </p> * <p> * <b>WARNING</b>: Do <b>NOT</b> use any of the classes provided by this package! <br/> * These may be changed or removed without notice! * </p> * * @since 2.0 */
class
class Dijkstra { struct edge { int to; long long distance; }; int N; std::vector<std::vector<edge>> adj; public: static const long long inf; Dijkstra(int n = 0) : N(n), adj(n) { } int size() { return N; } void add_edge(int from, int to, long long distance) { throw_if_invalid_index(from); throw_if_invalid_index(to); if (distance < 0) throw; adj[from].push_back({ to, distance }); } void solve(int s, std::vector<long long>& D, std::vector<int>& P) { D.assign(N, inf); P.assign(N, -1); using pii = std::pair<int,int>; std::priority_queue<pii, std::vector<pii>, std::greater<pii>> Q; std::vector<bool> used(N, false); D[s] = 0; Q.push({0, s}); while (!Q.empty()) { int v = Q.top().second; Q.pop(); if (used[v]) continue; used[v] = true; for (auto edge : adj[v]) { int to = edge.to; long long len = edge.distance; if (D[v] + len < D[to]) { D[to] = D[v] + len; P[to] = v; Q.push({D[to], to}); } } } } private: void throw_if_invalid_index(int index) { if (index < 0 || index >= N) throw "index out of range"; } }
c++
17
0.463757
72
29.75
44
// Use Dijkstra(ver: priority_queue<>) in most case. // This has better performance than Dijkstra(ver: set<>). // Memory: O(V + E) // NOTE: directed, multi-edge, self-loop, no-negative-weight
class
def pca_train(groups, arrayname='norm', xmin=-np.inf, xmax=np.inf): xdat, ydat = groups2matrix(groups, arrayname, xmin=xmin, xmax=xmax) ret = PCA().fit(ydat) labels = [get_label(g) for g in groups] return Group(x=xdat, arrayname=arrayname, labels=labels, ydat=ydat, xmin=xmin, xmax=xmax, model=ret, mean=ret.mean_, components=ret.components_, variances=ret.explained_variance_ratio_)
python
9
0.634146
71
55.5
8
use a list of data groups to train a Principal Component Analysis Arguments --------- groups list of groups to use as components arrayname string of array name to be fit (see Note 2) ['norm'] xmin x-value for start of fit range [-inf] xmax x-value for end of fit range [+inf] Returns ------- group with trained PCA or N model, to be used with pca_fit Notes ----- 1. The group members for the components must match each other in data content and array names. 2. arrayname can be one of `norm` or `dmude`
function
def interpolate(sequence): t = sequence[:, 3] t[t < 0] += 1000 t[t > 1000] -= 1000 sequence = sequence[t.argsort()] first = sequence[0] last = sequence[-1] if first[3] > 0: value = last.copy() value[3] -= 1000 sequence = np.vstack((value, sequence)) if last[3] < 1000: value = first.copy() value[3] += 1000 sequence = np.vstack((sequence, value)) p = sequence[:, :3] t = sequence[:, 3] assert np.array_equiv(np.unique(t), t) tck, _ = interpolate.splprep(p.T, u=t, s=0, k=1) return partial(interpolate.splev, tck=tck)
python
10
0.486994
56
33.65
20
Create parametric functions given a sequence of points. :param sequence: A numpy sequence of points. :return: A function to interpolate points at times t.
function
public int numSquares(int n) { if (Math.ceil(Math.sqrt(n)) == Math.floor(Math.sqrt(n))) return 1; while (n % 4 == 0) n = n / 4; if (n % 8 == 7) return 4; Set<Integer> squares = new HashSet<>(); for (int i = 1; i <= n / 2; i++) { squares.add(i * i); } for (int i = 1; i <= n / 2; i++) { if (squares.contains(i) && squares.contains(n - i)) return 2; } return 3; }
java
11
0.438178
74
34.538462
13
/** * This solution is based on Lagrange's Theorem that any number can be made up of summing 4 numbers * there is another variation of the same which says that we can form any number using 3 numbers * if its not in the form of 4^a(8b + 7). so we first check if its one by checking if the number * provided itself is a perfect square. * then we keep dividing n by 4 until its no longer divisible by 4; * then we check if its of the form (8b + 7) by checking num % 8 == 7; if it is, then return 4 * else,we check for 2 and that's basically checked in a bruteforce way. * if none of these checks out, we simply return 3 since that is the only left option. * * @param n * @return */
function
TEST(TreeSynchronizerTest, syncSimpleTreeThenDestroy) { DebugScopedSetImplThread impl; Vector<int> ccLayerDestructionList; CCLayerTreeSettings settings; OwnPtr<CCLayerTreeHostImpl> hostImpl = CCLayerTreeHostImpl::create(settings, 0); RefPtr<LayerChromium> oldLayerTreeRoot = MockLayerChromium::create(&ccLayerDestructionList); oldLayerTreeRoot->addChild(MockLayerChromium::create(&ccLayerDestructionList)); oldLayerTreeRoot->addChild(MockLayerChromium::create(&ccLayerDestructionList)); int oldTreeRootLayerId = oldLayerTreeRoot->id(); int oldTreeFirstChildLayerId = oldLayerTreeRoot->children()[0]->id(); int oldTreeSecondChildLayerId = oldLayerTreeRoot->children()[1]->id(); OwnPtr<CCLayerImpl> ccLayerTreeRoot = TreeSynchronizer::synchronizeTrees(oldLayerTreeRoot.get(), nullptr, hostImpl.get()); expectTreesAreIdentical(oldLayerTreeRoot.get(), ccLayerTreeRoot.get(), hostImpl.get()); oldLayerTreeRoot->removeAllChildren(); RefPtr<LayerChromium> newLayerTreeRoot = LayerChromium::create(); ccLayerTreeRoot = TreeSynchronizer::synchronizeTrees(newLayerTreeRoot.get(), ccLayerTreeRoot.release(), hostImpl.get()); expectTreesAreIdentical(newLayerTreeRoot.get(), ccLayerTreeRoot.get(), hostImpl.get()); ASSERT_EQ(3u, ccLayerDestructionList.size()); EXPECT_TRUE(ccLayerDestructionList.contains(oldTreeRootLayerId)); EXPECT_TRUE(ccLayerDestructionList.contains(oldTreeFirstChildLayerId)); EXPECT_TRUE(ccLayerDestructionList.contains(oldTreeSecondChildLayerId)); }
c++
10
0.792208
126
66
23
// Constructs a very simple tree, synchronizes it, then synchronizes to a totally new tree. All layers from the old tree should be deleted.
function
def haversine(lat1: float, lon1: float, lat2: float, lon2: float, radius: float = 6378.0) -> float: R: float = 2.0 RADIUS: float = radius lon1: float = math.radians(lon1) lat1: float = math.radians(lat1) lon2: float = math.radians(lon2) lat2: float = math.radians(lat2) dlon: float = lon2 - lon1 dlat: float = lat2 - lat1 arc = math.sin(dlat / 2) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2 distance = R * math.asin(math.sqrt(arc)) * RADIUS return distance
python
11
0.619962
99
42.5
12
Calculates the distance in kilo-meters between to points on the Earth. The points are the starting and stopping lattitudes and longitudes. This will work with any Sphere provided a radius is given. Args: lat1 (float): Starting point lattitude in degrees. lon1 (float): Starting point longitude in degrees. lat2 (float): Stoping point lattitude in degrees. lon2 (float): Stoping point longitude in degrees. radius (float, optional): Radius of the sphere to measure. Default: 6378.0 Returns: float: The distance given in the same units as the radius.
function
def wrap( func=None, *, ingress=None, egress=None, caller=None, name=None, dflt_wrap=Wrap ): if _should_use_wrapx(func, ingress, egress, caller): return Wrapx(func, ingress, egress, caller=caller, name=name) else: return dflt_wrap(func, ingress, egress, name=name)
python
10
0.671233
83
40.857143
7
Wrap a function, optionally transforming interface, input and output. :param func: The wrapped function :param ingress: The incoming data transformer. It determines the argument properties (name, kind, default and annotation) as well as the actual input of the wrapped function. :param egress: The outgoing data transformer. It also takes precedence over the wrapped function to determine the return annotation of the ``Wrap`` instance :return: A callable instance wrapping ``func`` Consider the following function. >>> def f(w, /, x: float = 1, y=2, *, z: int = 3): ... return w + x * y ** z ... >>> assert f(0) == 8 >>> assert f(1,2) == 17 == 1 + 2 * 2 ** 3 See that ``f`` is restricted to use ``z`` as keyword only argument kind: >>> f(1, 2, 3, 4) Traceback (most recent call last): ... TypeError: f() takes from 1 to 3 positional arguments but 4 were given and ``w`` has position only argument kind: >>> f(w=1, x=2, y=3, z=4) Traceback (most recent call last): ... TypeError: f() got some positional-only arguments passed as keyword arguments: 'w' Say we wanted a version of this function that didn't have the argument kind restrinctions, where the annotation of ``x`` was ``int`` and where the default of ``z`` was ``10`` instead of ``3``, and doesn't have an annotation. We can do so using the following ingress function: >>> def ingress(w, x: int = 1, y: int=2, z = 10): ... return (w,), dict(x=x, y=y, z=z) The ingress function serves two purposes: - Redefining the signature (i.e. the argument names, kinds, defaults, and annotations (not including the return annotation, which is taken care of by the egress argument). - Telling the wrapper how to get from that interface to the interface of the wrapped function. If we also wanted to add a return_annotation, we could do so via an ``egress`` function argument: >>> def egress(wrapped_func_output) -> float: ... return wrapped_func_output # because here we don't want to do anything extra Now we can use these ingress and egress functions to get the version of ``f`` of our dreams: >>> h = wrap(f, ingress=ingress, egress=egress) Let's see what the signature of our new function looks like: >>> from inspect import signature >>> str(signature(h)) '(w, x: int = 1, y: int = 2, z=10) -> float' Now let's see that we can actually use this new function ``h``, without the restrictions of argument kind, getting the same results as the wrapped ``f``, but with default ``z=10``. What we wanted (but couldn't) do with ``f``: >>> h(1, 2, 3, 4) # == 1 + 2 * 3 ** 4 163 >>> h(w=1, x=2, y=3, z=4) 163 >>> assert h(0) == h(0, 1) == h(0, 1, 2) == 0 + 1 * 2 ** 10 == 2 ** 10 == 1024 Note that ``wrap`` can also be used as a decorator "factory", for instance to wrap functions at definition time, and if we change ``caller`` it will automatically use ``Wrapx`` instead of ``Wrap`` to wrap the function. >>> def iterize(func, args, kwargs): ... first_arg_val = next(iter(kwargs.values())) ... return list(map(func, first_arg_val)) >>> >>> @wrap(caller=iterize) ... def func(x, y=2): ... return x + y ... >>> isinstance(func, Wrapx) True >>> func([1, 2, 3, 4]) [3, 4, 5, 6] For more examples, see also the .. seealso:: ``Wrap`` class. ``Wrapx`` class.
function
public static void compileSelection(View view) { StringList results = new StringList(); ICoffeeScriptParser parser = new CoffeeScriptParser(); TextArea textArea = view.getTextArea(); Buffer buffer = view.getBuffer(); DefaultErrorSource errorSource = getErrorSource(view); ParserConfig config = ParserConfig.forCompiling(buffer, errorSource); errorSource.clear(); if (textArea.getSelectionCount() == 0) { results.add(parser.compile(textArea.getText(), config)); } else { for (Selection sel : textArea.getSelection()) { config.line = sel.getStartLine(); config.column = sel.getStart() - buffer.getLineStartOffset(config.line); results.add( parser.compile(textArea.getSelectedText(sel), config)); } } if (errorSource.getErrorCount() == 0) { Buffer outputBuffer = jEdit.newFile(view.getEditPane()); outputBuffer.insert(0, results.join("\n").trim()); outputBuffer.setMode("javascript"); outputBuffer.setDirty(false); } }
java
14
0.582436
80
43.740741
27
/** * Compiles text of selection(s) into a new buffer. * * The text of each selection or or the whole buffer (if no selection * exists) is compiled with CoffeeScript. If successful the result will be * opened in a new buffer. Errors will be forwarded to ErrorList. * * @param view the current View */
function
def _recursivePivotMean(self, segment: MessageSegment): if not segment.values: segment.analyzer.analyze() mymean = segment.mean() if segment.length >= 4: pivot = segment.length//2 leftSegment = MessageSegment(segment.analyzer, segment.offset, pivot) rightSegment = MessageSegment(segment.analyzer, segment.offset + pivot, segment.length - pivot) returnSegments = list() if abs(leftSegment.mean() - mymean) > self._meanThreshold: returnSegments.extend(self._recursivePivotMean(leftSegment)) else: returnSegments.append(leftSegment) if abs(rightSegment.mean() - mymean) > self._meanThreshold: returnSegments.extend(self._recursivePivotMean(rightSegment)) else: returnSegments.append(rightSegment) # if abs(lsm - rsm) > .1: return returnSegments else: return [segment]
python
12
0.597236
107
47.285714
21
Recursively split the segment in half, calculate the mean for the values of each of the two resulting sub-segments, and compare each of them to the original segments mean. If a sub-segment is sufficiently different from its parent (meanThreshold = .02) further split the sub-segment. :param segment: One message segment that should be segmented. :return: List of segments after the splitting.
function
public void startMining() throws InterruptedException { while (true) { if (!this.connection.hasTransactions()) { Thread.sleep(100); } else if (this.connection.hasBlocks()) { this.removeAlreadyMinedTransactions(); } else { makeBlockAndMineIt(); } } }
java
11
0.509589
55
32.272727
11
/** * Start to wait for transactions and mine them when received. * * @throws InterruptedException if the thread has an error when sleeping */
function
public class StrictRedirectUriValidator : IRedirectUriValidator { protected bool StringCollectionContainsString(IEnumerable<string> uris, string requestedUri) { if (uris == null) return false; return uris.Contains(requestedUri, StringComparer.OrdinalIgnoreCase); } public virtual Task<bool> IsRedirectUriValidAsync(string requestedUri, Client client) { return Task.FromResult(StringCollectionContainsString(client.RedirectUris, requestedUri)); } public virtual Task<bool> IsPostLogoutRedirectUriValidAsync(string requestedUri, Client client) { return Task.FromResult(StringCollectionContainsString(client.PostLogoutRedirectUris, requestedUri)); } }
c#
13
0.708763
112
47.5625
16
/// <summary> /// Default implementation of redirect URI validator. Validates the URIs against /// the client's configured URIs. /// </summary>
class
pub fn normal_for_face<F>(&self, i: usize, mut f: F) -> Normal where F: FnMut(&T) -> Normal, { let Triangle { x, y, z } = self.polygons[i]; let x = Vector3::from(f(&self.vertices[x])); let y = Vector3::from(f(&self.vertices[y])); let z = Vector3::from(f(&self.vertices[z])); let a = z - x; let b = z - y; a.cross(b).normalize().into() }
rust
11
0.495146
62
33.416667
12
/// Calculate the normal for face. This is a `flat` shading /// /// You must supply a function that can be used to lookup /// The position which is needed to calculate the normal
function
public Location createFirstLocationFromRoute(DirectionsRoute route) { List<Point> coordinates = route.routeOptions().coordinates(); Point origin = coordinates.get(FIRST_COORDINATE); Location forcedLocation = new Location(FORCED_LOCATION); forcedLocation.setLatitude(origin.latitude()); forcedLocation.setLongitude(origin.longitude()); return forcedLocation; }
java
8
0.771429
69
47.25
8
/** * If navigation begins, a location update is sometimes needed to force a * progress change update as soon as navigation is started. * <p> * This method creates a location update from the first coordinate (origin) that created * the route. * * @param route with list of coordinates * @return {@link Location} from first coordinate * @since 0.10.0 */
function
func TestLogRedirector(t *testing.T) { lines := []string{ "no prefix #1\n", "[TRACE] trace line #1\n", "[TRACE] trace line #2\n", "no prefix #2\n", "[DEBUG] debug line #1\n", "[DEBUG] debug line #2\n", "[INFO] info line #1\n", "no prefix #3\n", "[INFO] info line #2\n", "[WARN] warning line #1\n", "[WARN] warning line #2\n", "[ERROR] error line #1\n", "[ERROR] error line #2\n", "no prefix #4\n", "[TRACE] trace line #3\n", "[DEBUG] debug line #3\n", "[INFO] info line #3\n", "[WARN] warning line #3\n", "[ERROR] error line #3\n", "no prefix #5\n", } var traces []string var debugs []string var infos []string var warnings []string var errors []string ld := &LogRedirector{ enabled: true, writers: map[string]func(string) error{ tfTracePrefix: func(msg string) error { traces = append(traces, msg) return nil }, tfDebugPrefix: func(msg string) error { debugs = append(debugs, msg) return nil }, tfInfoPrefix: func(msg string) error { infos = append(infos, msg) return nil }, tfWarnPrefix: func(msg string) error { warnings = append(warnings, msg) return nil }, tfErrorPrefix: func(msg string) error { errors = append(errors, msg) return nil }, }, } for _, line := range lines { for len(line) > 0 { sz := 16 if sz > len(line) { sz = len(line) } n, err := ld.Write([]byte(line[:sz])) assert.Nil(t, err) assert.Equal(t, n, sz) line = line[sz:] } } assert.Equal(t, 3, len(traces)) assert.Equal(t, 3+5, len(debugs)) assert.Equal(t, 3, len(infos)) assert.Equal(t, 3, len(warnings)) assert.Equal(t, 3, len(errors)) }
go
20
0.591507
42
22.56338
71
// TestLogDirector ensures that logging redirects to the right place.
function
private async Task ReactionAdded(Cacheable<IUserMessage, ulong> before, ISocketMessageChannel after, SocketReaction reaction) { if (reaction.User.Value.IsBot) { return; } var chn = reaction.Channel as SocketGuildChannel; if (!guildsDefinition.reactRoles.ContainsKey(chn.Guild.Id)) { return; } bool foundMessage = false; ReactRolesDefinition rrDef = null; string msgGroup = ""; foreach (string group in guildsDefinition.reactRoles[chn.Guild.Id].Keys) { if (guildsDefinition.reactRoles[chn.Guild.Id][group].FirstOrDefault(x => x.messageID == reaction.MessageId && x.emoji == reaction.Emote.ToString()) != null) { foundMessage = true; msgGroup = group; rrDef = guildsDefinition.reactRoles[chn.Guild.Id][group].FirstOrDefault(x => x.messageID == reaction.MessageId && x.emoji == reaction.Emote.ToString()); break; } } if (!foundMessage) { return; } if (string.IsNullOrEmpty(msgGroup)) { SocketRole realRole = chn.Guild.Roles.FirstOrDefault(x => x.Name.ToLower() == rrDef.role.ToLower()); if (realRole != null) { await AssignRole(realRole, chn.Guild, reaction.UserId); } } else { for (int i = 0; i < guildsDefinition.reactRoles[chn.Guild.Id][msgGroup].Count; i++) { if (chn.Guild.GetUser(reaction.UserId).Roles.FirstOrDefault(x => x.Name == guildsDefinition.reactRoles[chn.Guild.Id][msgGroup][i].role) != null) { var m = await reaction.Channel.SendMessageAsync($"{reaction.User.Value.Mention}, you can only have one role within group {msgGroup}."); _ = Clean(m); return; } } SocketRole realRole = chn.Guild.Roles.FirstOrDefault(x => x.Name.ToLower() == rrDef.role.ToLower()); if (realRole != null) { await AssignRole(realRole, chn.Guild, reaction.UserId); } } }
c#
23
0.4908
164
43.660714
56
/// <summary> /// When a user adds an reaction, check if we have to assign a role for that assignment. /// </summary> /// <param name="before"></param> /// <param name="after"></param> /// <param name="reaction"></param> /// <returns></returns>
function
def VideoToMp3(targetDir): Files = listdir(str(targetDir)) print("Processing: " + str(len(Files))) try: mkdir(str(targetDir) + "Output") except: print("directory already exists") for file in Files: try: clip = VideoFileClip( str(targetDir) + file) clip.audio.write_audiofile(str(targetDir) + "Output/" + str(file.split('.')[0]) + ".mp3" ) except: print("damaged or unsupported file") print("Process completed!")
python
18
0.583333
102
35.071429
14
Takes a Path as an argument scans it and converts each Video file duration to an MP3 P.S doesn't support filtering, creates output folder ** targetDir: the path to scan for files
function
public static File persist( final IncrementalIndex index, final Interval dataInterval, File outDir, IndexSpec indexSpec ) throws IOException { return persist(index, dataInterval, outDir, indexSpec, new BaseProgressIndicator()); }
java
8
0.714829
88
28.333333
9
/** * This is *not* thread-safe and havok will ensue if this is called and writes are still occurring * on the IncrementalIndex object. * * @param index the IncrementalIndex to persist * @param dataInterval the Interval that the data represents * @param outDir the directory to persist the data to * * @return the index output directory * * @throws java.io.IOException if an IO error occurs persisting the index */
function
fn check_for_help_and_version_str(&self, arg: &OsStr) -> ClapResult<()> { debugln!("Parser::check_for_help_and_version_str;"); debug!( "Parser::check_for_help_and_version_str: Checking if --{} is help or version...", arg.to_str().unwrap() ); if arg == "help" && self.is_set(AS::NeedsLongHelp) { sdebugln!("Help"); return Err(self._help(true)); } if arg == "version" && self.is_set(AS::NeedsLongVersion) { sdebugln!("Version"); return Err(self._version(true)); } sdebugln!("Neither"); Ok(()) }
rust
13
0.504688
93
36.705882
17
// Retrieves the names of all args the user has supplied thus far, except required ones // because those will be listed in self.required
function
def create( cls, device: str, source_module: str, entry_point: str = None, image_uri: str = None, job_name: str = None, code_location: str = None, role_arn: str = None, hyperparameters: Dict[str, Any] = None, input_data: Union[str, Dict, S3DataSourceConfig] = None, output_data_config: OutputDataConfig = None, checkpoint_config: CheckpointConfig = None, aws_session: AwsSession = None, ) -> LocalQuantumJob: create_job_kwargs = prepare_quantum_job( device=device, source_module=source_module, entry_point=entry_point, image_uri=image_uri, job_name=job_name, code_location=code_location, role_arn=role_arn, hyperparameters=hyperparameters, input_data=input_data, output_data_config=output_data_config, checkpoint_config=checkpoint_config, aws_session=aws_session, ) job_name = create_job_kwargs["jobName"] if os.path.isdir(job_name): raise ValueError( f"A local directory called {job_name} already exists. " f"Please use a different job name." ) session = aws_session or AwsSession() algorithm_specification = create_job_kwargs["algorithmSpecification"] if "containerImage" in algorithm_specification: image_uri = algorithm_specification["containerImage"]["uri"] else: image_uri = retrieve_image(Framework.BASE, session.region) with _LocalJobContainer(image_uri) as container: env_variables = setup_container(container, session, **create_job_kwargs) container.run_local_job(env_variables) container.copy_from("/opt/ml/model", job_name) with open(os.path.join(job_name, "log.txt"), "w") as log_file: log_file.write(container.run_log) if "checkpointConfig" in create_job_kwargs: checkpoint_config = create_job_kwargs["checkpointConfig"] if "localPath" in checkpoint_config: checkpoint_path = checkpoint_config["localPath"] container.copy_from(checkpoint_path, os.path.join(job_name, "checkpoints")) run_log = container.run_log return LocalQuantumJob(f"local:job/{job_name}", run_log)
python
15
0.590705
95
44.444444
54
Creates and runs job by setting up and running the customer script in a local docker container. Args: device (str): ARN for the AWS device which is primarily accessed for the execution of this job. source_module (str): Path (absolute, relative or an S3 URI) to a python module to be tarred and uploaded. If `source_module` is an S3 URI, it must point to a tar.gz file. Otherwise, source_module may be a file or directory. entry_point (str): A str that specifies the entry point of the job, relative to the source module. The entry point must be in the format `importable.module` or `importable.module:callable`. For example, `source_module.submodule:start_here` indicates the `start_here` function contained in `source_module.submodule`. If source_module is an S3 URI, entry point must be given. Default: source_module's name image_uri (str): A str that specifies the ECR image to use for executing the job. `image_uris.retrieve_image()` function may be used for retrieving the ECR image URIs for the containers supported by Braket. Default = `<Braket base image_uri>`. job_name (str): A str that specifies the name with which the job is created. Default: f'{image_uri_type}-{timestamp}'. code_location (str): The S3 prefix URI where custom code will be uploaded. Default: f's3://{default_bucket_name}/jobs/{job_name}/script'. role_arn (str): This field is currently not used for local jobs. Local jobs will use the current role's credentials. This may be subject to change. hyperparameters (Dict[str, Any]): Hyperparameters accessible to the job. The hyperparameters are made accessible as a Dict[str, str] to the job. For convenience, this accepts other types for keys and values, but `str()` is called to convert them before being passed on. Default: None. input_data (Union[str, S3DataSourceConfig, dict]): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local path, S3 URI, or S3DataSourceConfig is provided, it will be given a default channel name "input". Default: {}. output_data_config (OutputDataConfig): Specifies the location for the output of the job. Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', kmsKeyId=None). checkpoint_config (CheckpointConfig): Configuration that specifies the location where checkpoint data is stored. Default: CheckpointConfig(localPath='/opt/jobs/checkpoints', s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints'). aws_session (AwsSession): AwsSession for connecting to AWS Services. Default: AwsSession() Returns: LocalQuantumJob: The representation of a local Braket Job.
function
def boolean_iter(iterable): def inner_check(): check, items = tee(iterable) try: next(check) except StopIteration: yield False else: yield True yield from items values = inner_check() return next(values), values
python
10
0.534653
36
24.333333
12
Check if an iterable returns at least one value, without consuming it. The function returns a tuple containing a boolean flag indicating if the original iterator is empty or not, and the original un-consumed iterator.
function
public class BasicCassandraPersistentEntity<T> extends BasicPersistentEntity<T, CassandraPersistentProperty> implements CassandraPersistentEntity<T>, ApplicationContextAware { private static final CassandraPersistentEntityMetadataVerifier DEFAULT_VERIFIER = new CompositeCassandraPersistentEntityMetadataVerifier(); private Boolean forceQuote; private CassandraPersistentEntityMetadataVerifier verifier = DEFAULT_VERIFIER; private CqlIdentifier tableName; private NamingStrategy namingStrategy = NamingStrategy.INSTANCE; private @Nullable StandardEvaluationContext spelContext; /** * Create a new {@link BasicCassandraPersistentEntity} given {@link TypeInformation}. * * @param typeInformation must not be {@literal null}. */ public BasicCassandraPersistentEntity(TypeInformation<T> typeInformation) { this(typeInformation, DEFAULT_VERIFIER); } /** * Create a new {@link BasicCassandraPersistentEntity} with the given {@link TypeInformation}. Will default the table * name to the entity's simple type name. * * @param typeInformation must not be {@literal null}. * @param verifier must not be {@literal null}. */ public BasicCassandraPersistentEntity(TypeInformation<T> typeInformation, CassandraPersistentEntityMetadataVerifier verifier) { super(typeInformation, CassandraPersistentPropertyComparator.INSTANCE); setVerifier(verifier); } /** * Create a new {@link BasicCassandraPersistentEntity} with the given {@link TypeInformation}. Will default the table * name to the entity's simple type name. * * @param typeInformation must not be {@literal null}. * @param verifier must not be {@literal null}. * @param comparator must not be {@literal null}. * @since 2.1 */ protected BasicCassandraPersistentEntity(TypeInformation<T> typeInformation, CassandraPersistentEntityMetadataVerifier verifier, Comparator<CassandraPersistentProperty> comparator) { super(typeInformation, comparator); setVerifier(verifier); } protected CqlIdentifier determineTableName() { Table annotation = findAnnotation(Table.class); if (annotation != null) { return determineName(annotation.value(), annotation.forceQuote()); } return IdentifierFactory.create(getNamingStrategy().getTableName(this), false); } CqlIdentifier determineName(String value, boolean forceQuote) { if (!StringUtils.hasText(value)) { return IdentifierFactory.create(getNamingStrategy().getTableName(this), forceQuote); } String name = Optional.ofNullable(this.spelContext).map(it -> SpelUtils.evaluate(value, it)).orElse(value); Assert.state(name != null, () -> String.format("Cannot determine default name for %s", this)); return IdentifierFactory.create(name, forceQuote); } /* (non-Javadoc) * @see org.springframework.data.mapping.model.BasicPersistentEntity#addAssociation(org.springframework.data.mapping.Association) */ @Override public void addAssociation(Association<CassandraPersistentProperty> association) { throw new UnsupportedCassandraOperationException("Cassandra does not support associations"); } /* (non-Javadoc) * @see org.springframework.data.mapping.model.BasicPersistentEntity#doWithAssociations(org.springframework.data.mapping.AssociationHandler) */ @Override public void doWithAssociations(AssociationHandler<CassandraPersistentProperty> handler) {} /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#isCompositePrimaryKey() */ @Override public boolean isCompositePrimaryKey() { return isAnnotationPresent(PrimaryKeyClass.class); } /* (non-Javadoc) * @see org.springframework.data.mapping.model.BasicPersistentEntity#verify() */ @Override public void verify() throws MappingException { super.verify(); this.verifier.verify(this); if (this.tableName == null) { setTableName(determineTableName()); } } /* (non-Javadoc) * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) */ @Override public void setApplicationContext(ApplicationContext context) throws BeansException { Assert.notNull(context, "ApplicationContext must not be null"); spelContext = new StandardEvaluationContext(); spelContext.addPropertyAccessor(new BeanFactoryAccessor()); spelContext.setBeanResolver(new BeanFactoryResolver(context)); spelContext.setRootObject(context); } /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#setForceQuote(boolean) */ @Override public void setForceQuote(boolean forceQuote) { boolean changed = !Boolean.valueOf(forceQuote).equals(this.forceQuote); this.forceQuote = forceQuote; if (changed) { setTableName(IdentifierFactory.create(getTableName().asInternal(), forceQuote)); } } /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#setTableName(org.springframework.data.cassandra.core.cql.CqlIdentifier) */ @Override public void setTableName(CqlIdentifier tableName) { Assert.notNull(tableName, "CqlIdentifier must not be null"); this.tableName = tableName; } /** * Set the {@link NamingStrategy} to use. * * @param namingStrategy must not be {@literal null}. * @since 3.0 */ public void setNamingStrategy(NamingStrategy namingStrategy) { Assert.notNull(namingStrategy, "NamingStrategy must not be null"); this.namingStrategy = namingStrategy; } NamingStrategy getNamingStrategy() { return namingStrategy; } /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#getTableName() */ @Override public CqlIdentifier getTableName() { return Optional.ofNullable(this.tableName).orElseGet(this::determineTableName); } /** * @param verifier The verifier to set. */ public void setVerifier(CassandraPersistentEntityMetadataVerifier verifier) { this.verifier = verifier; } /** * @return the verifier. */ @SuppressWarnings("unused") public CassandraPersistentEntityMetadataVerifier getVerifier() { return this.verifier; } /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#isTupleType() */ @Override public boolean isTupleType() { return false; } /* (non-Javadoc) * @see org.springframework.data.cassandra.core.mapping.CassandraPersistentEntity#isUserDefinedType() */ @Override public boolean isUserDefinedType() { return false; } }
java
15
0.770779
154
29.56338
213
/** * Cassandra specific {@link BasicPersistentEntity} implementation that adds Cassandra specific metadata. * * @author Alex Shvid * @author Matthew T. Adams * @author John Blum * @author Mark Paluch */
class
public static int[] find(String value, char sep, int start, int end) { int count = 0; for (int i = start; i < end; i++) { if (value.charAt(i) == sep) count++; } int[] positions = new int[count + 1]; if (count > 0) { int j = 0; for (int i = start; i < end; i++) { if (value.charAt(i) == sep) { positions[j++] = i; if (j == count) break; } } } positions[count] = end; return positions; }
java
13
0.368679
70
29.7
20
/** * Find all occurrences of the separator character in the sub-string. * * @param value entire string * @param sep separator character to search for * @param start starting index of sub-string to search (inclusive) * @param end ending index of sub-string to search (exclusive) * @return indexes of all occurrences of <code>sep</code>, in order, plus one extra index equal to <code>end</code> * @throws StringIndexOutOfBoundsException if <code>start</code> is less than zero or <code>end</code> is * greater than the length of <code>value</code> */
function
function normalize(opts) { opts = opts || {}; for (var key in exports.normalizers) { if (exports.normalizers.hasOwnProperty(key)) { exports.normalizers[key](opts); } } if (!opts.listen || !opts.listen.length) { throw new Error('Not listening for any events. Preboot not going to do anything.'); } return opts; }
javascript
11
0.598383
91
30
12
/** * Normalize options so user can enter shorthand and it is * expanded as appropriate for the client code */
function
def pair_issue(issue_list1, issue_list2): dict1 = {} pairs = [] for i in issue_list1: s_i = "-".join([i[0], str(i[1]), i[2]]) dict1[s_i] = i for j in issue_list2: s_j = "-".join([j[0], str(j[1]), j[2]]) if s_j in dict1: pairs.append((dict1[s_j], j)) return pairs
python
13
0.469325
47
28.727273
11
Associates pairs of issues originating from original and canonical repositories. :param issue_list1: list of IssueDir :type issue_list1: array :param issue_list2: list of IssueDir :type issue_list2: array :return: list containing tuples of issue pairs [(issue1, issue2), (...)] :rtype: list
function
func (mc *MessageCard) Validate() error { if mc.ValidateFunc != nil { return mc.ValidateFunc() } if (mc.Text == "") && (mc.Summary == "") { return fmt.Errorf("invalid message card: summary or text field is required") } return nil }
go
10
0.65
78
25.777778
9
// Validate validates a MessageCard calling ValidateFunc if defined, // otherwise, a default validation occurs. // // Deprecated: use (messagecard.MessageCard).Validate instead.
function
def read_simulated(simu_file): dateparse = lambda y, d: pd.datetime.strptime(''.join([y, d]), '%Y%j') temp = pd.read_csv(simu_file, index_col='date', delim_whitespace=True, parse_dates={'date': ['YEAR', 'DAY']}, date_parser=dateparse, na_values=[-9999, -999, -99, 'NaN', -999.000]) return(temp)
python
12
0.48995
74
48.875
8
That function takes a single file name from the simulated data and reads this as pandas data frame. Currently, only a single columns holds the data (the third one) and the first two the date and thus the index column. Return value is a pandas dataframe
function
public TypeDef AddExistingType(Type existingType, string newNameSpace, string name, ITypeResolver resolver = null) { #region Start ExistingType exs = new ExistingType(existingType, newNameSpace, name); TypeDef dd = Module.Find(exs.FullName, !exs.FullName.Contains("+")); if (dd != null) { Module.Types.Remove(dd); } if (resolver == null) resolver = new BasicTypeResolver(); TypeDefUser userT = new TypeDefUser(exs.Namespace, exs.Name); #endregion #region Resolve { for (int e = 0; e < exs.Interfaces.Count(); e++) { InterfaceImpl _i = exs.Interfaces.ToArray()[e]; ITypeDefOrRef reffed = null; TypeDef importedInterface = _i.Interface.ScopeType.ResolveTypeDef(); ResolveData dat = new ResolveData(importedInterface); if (dat.ImportType != null) reffed = resolver.ResolveType(this, dat, newNameSpace, resolver); InterfaceImpl myInterface = new InterfaceImplUser(reffed); if (reffed != default || reffed != null) { userT.Interfaces.Add(myInterface); } } ResolveData iD = new ResolveData(exs.Inherit); ITypeDefOrRef resolvedInherit = null; if (iD.ImportType != null) resolvedInherit = resolver.ResolveType(this, iD, newNameSpace, resolver); if (resolvedInherit != null) userT.BaseType = resolvedInherit; } #endregion #region Main userT.Attributes = exs.Attributes; Module.Types.Add(userT); TypeDef created = Module.Find(userT.FullName, !userT.FullName.Contains("+")); foreach (FieldDef m in exs.Fields) { m.DeclaringType = null; created.Fields.Add(m); } Dictionary<string, MethodDef> specMethods = new Dictionary<string, MethodDef>(); List<MethodDef> __IMPORTANT_CTOR_defs = new List<MethodDef>(); if (exs.Inherit != null) { List<MethodDef> meths = exs.Inherit?.ResolveTypeDef()?.Methods.ToList(); if (meths != null) for (int i = 0; i < meths.Count(); i++) { if (meths[i].IsConstructor) { meths[i].DeclaringType = null; __IMPORTANT_CTOR_defs.Add(meths[i]); } } } foreach (MethodDef m in exs.Methods) { if (!m.IsSpecialName || m.IsConstructor) { m.DeclaringType = null; created.Methods.Add(m); } else if (m.IsSpecialName) { m.DeclaringType = null; specMethods.Add(m.Name, m); } } foreach (MethodDef m in __IMPORTANT_CTOR_defs) { created.Methods.Add(m); } foreach (EventDef m in exs.Events) { m.DeclaringType = null; created.Events.Add(m); } foreach (PropertyDef def in exs.Properties) { MethodAttributes attrs; TypeSig sig = def.PropertySig.RetType; if (def.GetMethod is null) { attrs = MethodAttributes.Public; } else attrs = def.GetMethod.Attributes; def.DeclaringType = null; PropertyDef newProp = Property<int>.CreateEmptyProperty(this, new TargetType(created), def.Name, attrs); newProp.PropertySig.RetType = sig; created.Methods.Remove(newProp.GetMethod); created.Methods.Remove(newProp.SetMethod); if (def.GetMethod != null) { MethodDef getter = specMethods[def.GetMethod.Name]; created.Methods.Add(getter); newProp.GetMethod = getter; newProp.GetMethod.ReturnType = getter.ReturnType; newProp.GetMethod.Name = getter.Name; } else { newProp.GetMethod = null; newProp.SetMethod = null; } if (def.SetMethod != null) { MethodDef setter = specMethods[def.SetMethod.Name]; created.Methods.Add(setter); newProp.SetMethod = setter; newProp.SetMethod.Name = setter.Name; } else newProp.SetMethod = null; } AddChange($"User Added The The Type :: {created.FullName}"); #endregion return created; }
c#
18
0.466251
120
41.32
125
/// <summary> /// Add an existing <see cref="Type"/> to the <seealso cref="Module"/> with a different namespace and name /// <para>Does not work with Inheritance</para> /// </summary> /// <param name="existingType"></param> /// <param name="newNameSpace"></param> /// <param name="name"></param> /// <param name="resolver">If not supplied, the Writer will supply one for you <see cref="BasicTypeResolver"/></param> /// <returns></returns>
function